mirror of
https://github.com/home-assistant/frontend.git
synced 2025-08-08 00:47:47 +00:00
Compare commits
No commits in common. "dev" and "20200406.0" have entirely different histories.
dev
...
20200406.0
@ -1,36 +0,0 @@
|
||||
[modern]
|
||||
# Modern builds target recent browsers supporting the latest features to minimize transpilation, polyfills, etc.
|
||||
# It is served to browsers meeting the following requirements:
|
||||
# - released in the last year + current alpha/beta versions
|
||||
# - Firefox extended support release (ESR)
|
||||
# - with global utilization at or above 0.5%
|
||||
# - exclude dead browsers (no security maintenance for 2+ years)
|
||||
# - exclude KaiOS, QQ, and UC browsers due to lack of sufficient feature support data
|
||||
unreleased versions
|
||||
last 1 year
|
||||
Firefox ESR
|
||||
>= 0.5%
|
||||
not dead
|
||||
not KaiOS > 0
|
||||
not QQAndroid > 0
|
||||
not UCAndroid > 0
|
||||
|
||||
[legacy]
|
||||
# Legacy builds are served when modern requirements are not met and support browsers:
|
||||
# - released in the last 7 years + current alpha/beta versionss
|
||||
# - with global utilization at or above 0.05%
|
||||
# - exclude dead browsers (no security maintenance for 2+ years)
|
||||
# - exclude Opera Mini which does not support web sockets
|
||||
unreleased versions
|
||||
last 7 years
|
||||
>= 0.05%
|
||||
not dead
|
||||
not op_mini all
|
||||
|
||||
[legacy-sw]
|
||||
# Same as legacy plus supports service workers
|
||||
unreleased versions
|
||||
last 7 years
|
||||
>= 0.05% and supports serviceworkers
|
||||
not dead
|
||||
not op_mini all
|
@ -1,6 +0,0 @@
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.13
|
||||
|
||||
ENV \
|
||||
DEBIAN_FRONTEND=noninteractive \
|
||||
DEVCONTAINER=true \
|
||||
PATH=$PATH:./node_modules/.bin
|
@ -1,49 +0,0 @@
|
||||
{
|
||||
"name": "Home Assistant Frontend",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"context": ".."
|
||||
},
|
||||
"appPort": "8124:8123",
|
||||
"postCreateCommand": "./.devcontainer/post_create.sh",
|
||||
"postStartCommand": "script/bootstrap",
|
||||
"containerEnv": {
|
||||
"DEV_CONTAINER": "1",
|
||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
||||
},
|
||||
"remoteEnv": {
|
||||
"NODE_OPTIONS": "--max_old_space_size=8192"
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"runem.lit-plugin",
|
||||
"github.vscode-pull-request-github",
|
||||
"eamodio.gitlens",
|
||||
"yeion7.styled-global-variables-autocomplete"
|
||||
],
|
||||
"settings": {
|
||||
"files.eol": "\n",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"editor.renderWhitespace": "boundary",
|
||||
"editor.rulers": [80],
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"terminal.integrated.shell.linux": "/usr/bin/zsh",
|
||||
"gitlens.showWelcomeOnInstall": false,
|
||||
"gitlens.showWhatsNewAfterUpgrades": false,
|
||||
"workbench.startupEditor": "none"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script will run after the container is created
|
||||
|
||||
# add github cli
|
||||
(type -p wget >/dev/null || (sudo apt update && sudo apt-get install wget -y)) \
|
||||
&& sudo mkdir -p -m 755 /etc/apt/keyrings \
|
||||
&& out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
|
||||
&& cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
|
||||
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
|
||||
|
||||
# Update package lists
|
||||
sudo apt-get update
|
||||
|
||||
sudo apt upgrade -y
|
||||
|
||||
# Install necessary packages
|
||||
sudo apt-get install -y libpcap-dev gh
|
||||
|
||||
# Display a message
|
||||
echo "Post-create script has been executed successfully."
|
4
.dockerignore
Normal file
4
.dockerignore
Normal file
@ -0,0 +1,4 @@
|
||||
node_modules
|
||||
hass_frontend
|
||||
hass_frontend_es5
|
||||
.git
|
80
.eslintrc-hound.json
Normal file
80
.eslintrc-hound.json
Normal file
@ -0,0 +1,80 @@
|
||||
{
|
||||
"extends": ["airbnb-base", "prettier"],
|
||||
"parserOptions": {
|
||||
"ecmaVersion": "2020",
|
||||
"ecmaFeatures": {
|
||||
"jsx": true,
|
||||
"modules": true
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"react": {
|
||||
"pragma": "h",
|
||||
"version": "15.0"
|
||||
},
|
||||
"import/resolver": {
|
||||
"webpack": {
|
||||
"config": "webpack.config.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"globals": {
|
||||
"__DEV__": false,
|
||||
"__DEMO__": false,
|
||||
"__BUILD__": false,
|
||||
"__VERSION__": false,
|
||||
"__STATIC_PATH__": false,
|
||||
"Polymer": true,
|
||||
"webkitSpeechRecognition": false,
|
||||
"ResizeObserver": false
|
||||
},
|
||||
"env": {
|
||||
"browser": true,
|
||||
"mocha": true
|
||||
},
|
||||
"rules": {
|
||||
"class-methods-use-this": 0,
|
||||
"new-cap": 0,
|
||||
"prefer-template": 0,
|
||||
"object-shorthand": 0,
|
||||
"func-names": 0,
|
||||
"prefer-arrow-callback": 0,
|
||||
"no-underscore-dangle": 0,
|
||||
"no-var": 0,
|
||||
"strict": 0,
|
||||
"prefer-spread": 0,
|
||||
"no-plusplus": 0,
|
||||
"no-bitwise": 0,
|
||||
"comma-dangle": 0,
|
||||
"vars-on-top": 0,
|
||||
"no-continue": 0,
|
||||
"no-param-reassign": 0,
|
||||
"no-multi-assign": 0,
|
||||
"radix": 0,
|
||||
"no-alert": 0,
|
||||
"no-return-await": 0,
|
||||
"prefer-destructuring": 0,
|
||||
"no-restricted-globals": [2, "event"],
|
||||
"prefer-promise-reject-errors": 0,
|
||||
"import/prefer-default-export": 0,
|
||||
"import/no-unresolved": 0,
|
||||
"import/extensions": [2, "ignorePackages"],
|
||||
"object-curly-newline": 0,
|
||||
"default-case": 0,
|
||||
"react/jsx-no-bind": [2, { "ignoreRefs": true }],
|
||||
"react/jsx-no-duplicate-props": 2,
|
||||
"react/self-closing-comp": 2,
|
||||
"react/prefer-es6-class": 2,
|
||||
"react/no-string-refs": 2,
|
||||
"react/require-render-return": 2,
|
||||
"react/no-find-dom-node": 2,
|
||||
"react/no-is-mounted": 2,
|
||||
"react/jsx-no-comment-textnodes": 2,
|
||||
"react/jsx-no-undef": 2,
|
||||
"react/jsx-uses-react": 2,
|
||||
"react/jsx-uses-vars": 2,
|
||||
"no-restricted-syntax": [0, "ForOfStatement"],
|
||||
"prettier/prettier": "error"
|
||||
},
|
||||
"plugins": ["react", "prettier"]
|
||||
}
|
12
.eslintrc.json
Normal file
12
.eslintrc.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"extends": "./.eslintrc-hound.json",
|
||||
"plugins": ["react"],
|
||||
"env": {
|
||||
"browser": true
|
||||
},
|
||||
"rules": {
|
||||
"import/no-unresolved": 2,
|
||||
"linebreak-style": 0,
|
||||
"implicit-arrow-linebreak": 0
|
||||
}
|
||||
}
|
@ -51,7 +51,7 @@ DO NOT DELETE ANY TEXT from this template! Otherwise, your issue may be closed w
|
||||
<!--
|
||||
Provide details about the versions you are using, which helps us reproducing
|
||||
and finding the issue quicker. Version information is found in the
|
||||
Home Assistant frontend: Settings -> About.
|
||||
Home Assistant frontend: Developer tools -> Info.
|
||||
|
||||
Browser version and operating system is important! Please try to replicate
|
||||
your issue in a different browser and be sure to include your findings.
|
||||
@ -62,24 +62,12 @@ DO NOT DELETE ANY TEXT from this template! Otherwise, your issue may be closed w
|
||||
- Browser and browser version:
|
||||
- Operating system:
|
||||
|
||||
## State of relevant entities
|
||||
## Problem-relevant configuration
|
||||
|
||||
<!--
|
||||
If your issue is about how an entity is shown in the UI, please add the state
|
||||
and attributes for all situations with a screenshot of the UI.
|
||||
You can find this information at `/developer-tools/state`
|
||||
-->
|
||||
|
||||
```yaml
|
||||
|
||||
```
|
||||
|
||||
## Problem-relevant frontend configuration
|
||||
|
||||
<!--
|
||||
An example configuration that caused the problem for you, e.g. the YAML configuration
|
||||
of the used cards. Fill this out even if it seems unimportant to you. Please be sure
|
||||
to remove personal information like passwords, private URLs and other credentials.
|
||||
An example configuration that caused the problem for you. Fill this out even
|
||||
if it seems unimportant to you. Please be sure to remove personal information
|
||||
like passwords, private URLs and other credentials.
|
||||
-->
|
||||
|
||||
```yaml
|
||||
@ -89,7 +77,7 @@ DO NOT DELETE ANY TEXT from this template! Otherwise, your issue may be closed w
|
||||
## Javascript errors shown in your browser console/inspector
|
||||
|
||||
<!--
|
||||
If you come across any Javascript or other error logs, e.g. in your browser
|
||||
If you come across any javascript or other error logs, e.g., in your browser
|
||||
console/inspector please provide them.
|
||||
-->
|
||||
|
25
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
vendored
Normal file
25
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
---
|
||||
name: Request a feature for the UI, Frontend or Lovelace
|
||||
about: Request an new feature for the Home Assistant frontend.
|
||||
labels: feature request
|
||||
---
|
||||
<!--
|
||||
DO NOT DELETE ANY TEXT from this template!
|
||||
Otherwise, your request may be closed without comment.
|
||||
-->
|
||||
## The request
|
||||
<!--
|
||||
Describe to our maintainers, the feature you would like to be added.
|
||||
Please be clear and concise and, if possible, provide a screenshot or mockup.
|
||||
-->
|
||||
|
||||
|
||||
## The alternatives
|
||||
<!--
|
||||
Are you currently using, or have you considered alternatives?
|
||||
If so, could you please describe those?
|
||||
-->
|
||||
|
||||
|
||||
## Additional information
|
||||
|
122
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
122
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -1,122 +0,0 @@
|
||||
name: Report a bug with the UI / Dashboards
|
||||
description: Report an issue related to the Home Assistant frontend.
|
||||
labels: bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Make sure you are running the [latest version of Home Assistant][releases] before reporting an issue.
|
||||
|
||||
If you have a feature or enhancement request for the frontend, please [start a discussion][fr] instead of creating an issue.
|
||||
|
||||
**Please do not report issues for custom cards.**
|
||||
|
||||
[fr]: https://github.com/orgs/home-assistant/discussions
|
||||
[releases]: https://github.com/home-assistant/home-assistant/releases
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Checklist
|
||||
description: Please verify that you've followed these steps
|
||||
options:
|
||||
- label: I have updated to the latest available Home Assistant version.
|
||||
required: true
|
||||
- label: I have cleared the cache of my browser.
|
||||
required: true
|
||||
- label: I have tried a different browser to see if it is related to my browser.
|
||||
required: true
|
||||
- label: I have tried reproducing the issue in [safe mode](https://www.home-assistant.io/blog/2023/11/01/release-202311/#restarting-into-safe-mode) to rule out problems with unsupported custom resources.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## The problem
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Describe the issue you are experiencing
|
||||
description: Provide a clear and concise description of what the bug is.
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Describe the behavior you expected
|
||||
description: Describe what you expected to happen or it should look/behave.
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
description: |
|
||||
Please tell us exactly how to reproduce your issue.
|
||||
Provide clear and concise step by step instructions and add code snippets if needed.
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
...
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## Environment
|
||||
- type: input
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: What version of Home Assistant Core has the issue?
|
||||
placeholder: core-
|
||||
description: >
|
||||
Can be found in: [Settings -> About](https://my.home-assistant.io/redirect/info/).
|
||||
- type: input
|
||||
attributes:
|
||||
label: What was the last working version of Home Assistant Core?
|
||||
placeholder: core-
|
||||
description: >
|
||||
If known, otherwise leave blank.
|
||||
- type: input
|
||||
attributes:
|
||||
label: In which browser are you experiencing the issue?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: input
|
||||
attributes:
|
||||
label: Which operating system are you using to run this browser?
|
||||
placeholder: macOS Big Sur (1.11)
|
||||
description: >
|
||||
Don't forget to add the version!
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# Details
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: State of relevant entities
|
||||
description: >
|
||||
If your issue is about how an entity is shown in the UI, please add the
|
||||
state and attributes for all situations. You can find this information
|
||||
at Developer Tools -> States.
|
||||
render: txt
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Problem-relevant frontend configuration
|
||||
description: >
|
||||
An example configuration that caused the problem for you, e.g., the YAML
|
||||
configuration of the used cards. Fill this out even if it seems
|
||||
unimportant to you. Please be sure to remove personal information like
|
||||
passwords, private URLs and other credentials.
|
||||
render: yaml
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: JavaScript errors shown in your browser console/inspector
|
||||
description: >
|
||||
If you come across any JavaScript or other error logs, e.g., in your
|
||||
browser console/inspector please provide them.
|
||||
render: txt
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: >
|
||||
If you have any additional information for us, use the field below.
|
||||
Please note, you can attach screenshots or screen recordings here, by
|
||||
dragging and dropping files in the field below.
|
9
.github/ISSUE_TEMPLATE/config.yml
vendored
9
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,17 +1,14 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Request a feature for the UI / Dashboards
|
||||
url: https://github.com/orgs/home-assistant/discussions
|
||||
about: Request a new feature for the Home Assistant frontend.
|
||||
- name: Report a bug that is NOT related to the UI / Dashboards
|
||||
- name: Report a bug that is NOT related to the UI, Frontend or Lovelace
|
||||
url: https://github.com/home-assistant/core/issues
|
||||
about: This is the issue tracker for our frontend. Please report other issues in the backend ("core") repository.
|
||||
about: This is the issue tracker for our frontend. Please report other issues with the backend repository.
|
||||
- name: Report incorrect or missing information on our website
|
||||
url: https://github.com/home-assistant/home-assistant.io/issues
|
||||
about: Our documentation has its own issue tracker. Please report issues with the website there.
|
||||
- name: I have a question or need support
|
||||
url: https://www.home-assistant.io/help
|
||||
about: We use GitHub for tracking bugs. Check our website for resources on getting help.
|
||||
about: We use GitHub for tracking bugs, check our website for resources on getting help.
|
||||
- name: I'm unsure where to go
|
||||
url: https://www.home-assistant.io/join-chat
|
||||
about: If you are unsure where to go, then joining our chat is recommended; Just ask!
|
||||
|
53
.github/ISSUE_TEMPLATE/task.yml
vendored
53
.github/ISSUE_TEMPLATE/task.yml
vendored
@ -1,53 +0,0 @@
|
||||
name: Task
|
||||
description: For staff only - Create a task
|
||||
type: Task
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## ⚠️ RESTRICTED ACCESS
|
||||
|
||||
**This form is restricted to Open Home Foundation staff and authorized contributors only.**
|
||||
|
||||
If you are a community member wanting to contribute, please:
|
||||
- For bug reports: Use the [bug report form](https://github.com/home-assistant/frontend/issues/new?template=bug_report.yml)
|
||||
- For feature requests: Submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)
|
||||
|
||||
---
|
||||
|
||||
### For authorized contributors
|
||||
|
||||
Use this form to create tasks for development work, improvements, or other actionable items that need to be tracked.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide a clear and detailed description of the task that needs to be accomplished.
|
||||
|
||||
Be specific about what needs to be done, why it's important, and any constraints or requirements.
|
||||
placeholder: |
|
||||
Describe the task, including:
|
||||
- What needs to be done
|
||||
- Why this task is needed
|
||||
- Expected outcome
|
||||
- Any constraints or requirements
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional_context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Any additional information, links, research, or context that would be helpful.
|
||||
|
||||
Include links to related issues, research, prototypes, roadmap opportunities etc.
|
||||
placeholder: |
|
||||
- Roadmap opportunity: [link]
|
||||
- Epic: [link]
|
||||
- Feature request: [link]
|
||||
- Technical design documents: [link]
|
||||
- Prototype/mockup: [link]
|
||||
- Dependencies: [links]
|
||||
validations:
|
||||
required: false
|
13
.github/PULL_REQUEST_TEMPLATE.md
vendored
13
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -13,18 +13,18 @@
|
||||
|
||||
|
||||
## Proposed change
|
||||
<!--
|
||||
<!--
|
||||
Describe the big picture of your changes here to communicate to the
|
||||
maintainers why we should accept this pull request. If it fixes a bug
|
||||
or resolves a feature request, be sure to link to that issue or discussion
|
||||
in the additional information section.
|
||||
or resolves a feature request, be sure to link to that issue in the
|
||||
additional information section.
|
||||
-->
|
||||
|
||||
|
||||
## Type of change
|
||||
<!--
|
||||
What type of change does your PR introduce to the Home Assistant frontend?
|
||||
NOTE: Please, check only 1! box!
|
||||
NOTE: Please, check only 1! box!
|
||||
If your PR requires multiple boxes to be checked, you'll most likely need to
|
||||
split it into multiple PRs. This makes things easier and faster to code review.
|
||||
-->
|
||||
@ -52,8 +52,8 @@
|
||||
-->
|
||||
|
||||
- This PR fixes or closes issue: fixes #
|
||||
- This PR is related to issue or discussion:
|
||||
- Link to documentation pull request:
|
||||
- This PR is related to issue:
|
||||
- Link to documentation pull request:
|
||||
|
||||
## Checklist
|
||||
<!--
|
||||
@ -74,5 +74,4 @@ If user exposed functionality or configuration variables are added/changed:
|
||||
<!--
|
||||
Thank you for contributing <3
|
||||
-->
|
||||
|
||||
[docs-repository]: https://github.com/home-assistant/home-assistant.io
|
||||
|
596
.github/copilot-instructions.md
vendored
596
.github/copilot-instructions.md
vendored
@ -1,596 +0,0 @@
|
||||
# GitHub Copilot & Claude Code Instructions
|
||||
|
||||
You are an assistant helping with development of the Home Assistant frontend. The frontend is built using Lit-based Web Components and TypeScript, providing a responsive and performant interface for home automation control.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Quick Reference](#quick-reference)
|
||||
- [Core Architecture](#core-architecture)
|
||||
- [Development Standards](#development-standards)
|
||||
- [Component Library](#component-library)
|
||||
- [Common Patterns](#common-patterns)
|
||||
- [Text and Copy Guidelines](#text-and-copy-guidelines)
|
||||
- [Development Workflow](#development-workflow)
|
||||
- [Review Guidelines](#review-guidelines)
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Essential Commands
|
||||
|
||||
```bash
|
||||
yarn lint # ESLint + Prettier + TypeScript + Lit
|
||||
yarn format # Auto-fix ESLint + Prettier
|
||||
yarn lint:types # TypeScript compiler
|
||||
yarn test # Vitest
|
||||
script/develop # Development server
|
||||
```
|
||||
|
||||
### Component Prefixes
|
||||
|
||||
- `ha-` - Home Assistant components
|
||||
- `hui-` - Lovelace UI components
|
||||
- `dialog-` - Dialog components
|
||||
|
||||
### Import Patterns
|
||||
|
||||
```typescript
|
||||
import type { HomeAssistant } from "../types";
|
||||
import { fireEvent } from "../common/dom/fire_event";
|
||||
import { showAlertDialog } from "../dialogs/generic/show-alert-dialog";
|
||||
```
|
||||
|
||||
## Core Architecture
|
||||
|
||||
The Home Assistant frontend is a modern web application that:
|
||||
|
||||
- Uses Web Components (custom elements) built with Lit framework
|
||||
- Is written entirely in TypeScript with strict type checking
|
||||
- Communicates with the backend via WebSocket API
|
||||
- Provides comprehensive theming and internationalization
|
||||
|
||||
## Development Standards
|
||||
|
||||
### Code Quality Requirements
|
||||
|
||||
**Linting and Formatting (Enforced by Tools)**
|
||||
|
||||
- ESLint config extends Airbnb, TypeScript strict, Lit, Web Components, Accessibility
|
||||
- Prettier with ES5 trailing commas enforced
|
||||
- No console statements (`no-console: "error"`) - use proper logging
|
||||
- Import organization: No unused imports, consistent type imports
|
||||
|
||||
**Naming Conventions**
|
||||
|
||||
- PascalCase for types and classes
|
||||
- camelCase for variables, methods
|
||||
- Private methods require leading underscore
|
||||
- Public methods forbid leading underscore
|
||||
|
||||
### TypeScript Usage
|
||||
|
||||
- **Always use strict TypeScript**: Enable all strict flags, avoid `any` types
|
||||
- **Proper type imports**: Use `import type` for type-only imports
|
||||
- **Define interfaces**: Create proper interfaces for data structures
|
||||
- **Type component properties**: All Lit properties must be properly typed
|
||||
- **No unused variables**: Prefix with `_` if intentionally unused
|
||||
- **Consistent imports**: Use `@typescript-eslint/consistent-type-imports`
|
||||
|
||||
```typescript
|
||||
// Good
|
||||
import type { HomeAssistant } from "../types";
|
||||
|
||||
interface EntityConfig {
|
||||
entity: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
@property({ type: Object })
|
||||
hass!: HomeAssistant;
|
||||
|
||||
// Bad
|
||||
@property()
|
||||
hass: any;
|
||||
```
|
||||
|
||||
### Web Components with Lit
|
||||
|
||||
- **Use Lit 3.x patterns**: Follow modern Lit practices
|
||||
- **Extend appropriate base classes**: Use `LitElement`, `SubscribeMixin`, or other mixins as needed
|
||||
- **Define custom element names**: Use `ha-` prefix for components
|
||||
|
||||
```typescript
|
||||
@customElement("ha-my-component")
|
||||
export class HaMyComponent extends LitElement {
|
||||
@property({ attribute: false })
|
||||
hass!: HomeAssistant;
|
||||
|
||||
@state()
|
||||
private _config?: MyComponentConfig;
|
||||
|
||||
static get styles() {
|
||||
return css`
|
||||
:host {
|
||||
display: block;
|
||||
}
|
||||
`;
|
||||
}
|
||||
|
||||
render() {
|
||||
return html`<div>Content</div>`;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Component Guidelines
|
||||
|
||||
- **Use composition**: Prefer composition over inheritance
|
||||
- **Lazy load panels**: Heavy panels should be dynamically imported
|
||||
- **Optimize renders**: Use `@state()` for internal state, `@property()` for public API
|
||||
- **Handle loading states**: Always show appropriate loading indicators
|
||||
- **Support themes**: Use CSS custom properties from theme
|
||||
|
||||
### Data Management
|
||||
|
||||
- **Use WebSocket API**: All backend communication via home-assistant-js-websocket
|
||||
- **Cache appropriately**: Use collections and caching for frequently accessed data
|
||||
- **Handle errors gracefully**: All API calls should have error handling
|
||||
- **Update real-time**: Subscribe to state changes for live updates
|
||||
|
||||
```typescript
|
||||
// Good
|
||||
try {
|
||||
const result = await fetchEntityRegistry(this.hass.connection);
|
||||
this._processResult(result);
|
||||
} catch (err) {
|
||||
showAlertDialog(this, {
|
||||
text: `Failed to load: ${err.message}`,
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Styling Guidelines
|
||||
|
||||
- **Use CSS custom properties**: Leverage the theme system
|
||||
- **Mobile-first responsive**: Design for mobile, enhance for desktop
|
||||
- **Follow Material Design**: Use Material Web Components where appropriate
|
||||
- **Support RTL**: Ensure all layouts work in RTL languages
|
||||
|
||||
```typescript
|
||||
static get styles() {
|
||||
return css`
|
||||
:host {
|
||||
--spacing: 16px;
|
||||
padding: var(--spacing);
|
||||
color: var(--primary-text-color);
|
||||
background-color: var(--card-background-color);
|
||||
}
|
||||
|
||||
@media (max-width: 600px) {
|
||||
:host {
|
||||
--spacing: 8px;
|
||||
}
|
||||
}
|
||||
`;
|
||||
}
|
||||
```
|
||||
|
||||
### Performance Best Practices
|
||||
|
||||
- **Code split**: Split code at the panel/dialog level
|
||||
- **Lazy load**: Use dynamic imports for heavy components
|
||||
- **Optimize bundle**: Keep initial bundle size minimal
|
||||
- **Use virtual scrolling**: For long lists, implement virtual scrolling
|
||||
- **Memoize computations**: Cache expensive calculations
|
||||
|
||||
### Testing Requirements
|
||||
|
||||
- **Write tests**: Add tests for data processing and utilities
|
||||
- **Test with Vitest**: Use the established test framework
|
||||
- **Mock appropriately**: Mock WebSocket connections and API calls
|
||||
- **Test accessibility**: Ensure components are accessible
|
||||
|
||||
## Component Library
|
||||
|
||||
### Dialog Components
|
||||
|
||||
**Available Dialog Types:**
|
||||
|
||||
- `ha-md-dialog` - Preferred for new code (Material Design 3)
|
||||
- `ha-dialog` - Legacy component still widely used
|
||||
|
||||
**Opening Dialogs (Fire Event Pattern - Recommended):**
|
||||
|
||||
```typescript
|
||||
fireEvent(this, "show-dialog", {
|
||||
dialogTag: "dialog-example",
|
||||
dialogImport: () => import("./dialog-example"),
|
||||
dialogParams: { title: "Example", data: someData },
|
||||
});
|
||||
```
|
||||
|
||||
**Dialog Implementation Requirements:**
|
||||
|
||||
- Implement `HassDialog<T>` interface
|
||||
- Use `createCloseHeading()` for standard headers
|
||||
- Import `haStyleDialog` for consistent styling
|
||||
- Return `nothing` when no params (loading state)
|
||||
- Fire `dialog-closed` event when closing
|
||||
- Add `dialogInitialFocus` for accessibility
|
||||
|
||||
````
|
||||
|
||||
### Form Component (ha-form)
|
||||
- Schema-driven using `HaFormSchema[]`
|
||||
- Supports entity, device, area, target, number, boolean, time, action, text, object, select, icon, media, location selectors
|
||||
- Built-in validation with error display
|
||||
- Use `dialogInitialFocus` in dialogs
|
||||
- Use `computeLabel`, `computeError`, `computeHelper` for translations
|
||||
|
||||
```typescript
|
||||
<ha-form
|
||||
.hass=${this.hass}
|
||||
.data=${this._data}
|
||||
.schema=${this._schema}
|
||||
.error=${this._errors}
|
||||
.computeLabel=${(schema) => this.hass.localize(`ui.panel.${schema.name}`)}
|
||||
@value-changed=${this._valueChanged}
|
||||
></ha-form>
|
||||
````
|
||||
|
||||
### Alert Component (ha-alert)
|
||||
|
||||
- Types: `error`, `warning`, `info`, `success`
|
||||
- Properties: `title`, `alert-type`, `dismissable`, `icon`, `action`, `rtl`
|
||||
- Content announced by screen readers when dynamically displayed
|
||||
|
||||
```html
|
||||
<ha-alert alert-type="error">Error message</ha-alert>
|
||||
<ha-alert alert-type="warning" title="Warning">Description</ha-alert>
|
||||
<ha-alert alert-type="success" dismissable>Success message</ha-alert>
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Creating a Panel
|
||||
|
||||
```typescript
|
||||
@customElement("ha-panel-myfeature")
|
||||
export class HaPanelMyFeature extends SubscribeMixin(LitElement) {
|
||||
@property({ attribute: false })
|
||||
hass!: HomeAssistant;
|
||||
|
||||
@property({ type: Boolean, reflect: true })
|
||||
narrow!: boolean;
|
||||
|
||||
@property()
|
||||
route!: Route;
|
||||
|
||||
hassSubscribe() {
|
||||
return [
|
||||
subscribeEntityRegistry(this.hass.connection, (entities) => {
|
||||
this._entities = entities;
|
||||
}),
|
||||
];
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Creating a Dialog
|
||||
|
||||
```typescript
|
||||
@customElement("dialog-my-feature")
|
||||
export class DialogMyFeature
|
||||
extends LitElement
|
||||
implements HassDialog<MyDialogParams>
|
||||
{
|
||||
@property({ attribute: false })
|
||||
hass!: HomeAssistant;
|
||||
|
||||
@state()
|
||||
private _params?: MyDialogParams;
|
||||
|
||||
public async showDialog(params: MyDialogParams): Promise<void> {
|
||||
this._params = params;
|
||||
}
|
||||
|
||||
public closeDialog(): void {
|
||||
this._params = undefined;
|
||||
fireEvent(this, "dialog-closed", { dialog: this.localName });
|
||||
}
|
||||
|
||||
protected render() {
|
||||
if (!this._params) {
|
||||
return nothing;
|
||||
}
|
||||
|
||||
return html`
|
||||
<ha-dialog
|
||||
open
|
||||
@closed=${this.closeDialog}
|
||||
.heading=${createCloseHeading(this.hass, this._params.title)}
|
||||
>
|
||||
<!-- Dialog content -->
|
||||
<ha-button
|
||||
appearance="plain"
|
||||
@click=${this.closeDialog}
|
||||
slot="secondaryAction"
|
||||
>
|
||||
${this.hass.localize("ui.common.cancel")}
|
||||
</ha-button>
|
||||
<ha-button @click=${this._submit} slot="primaryAction">
|
||||
${this.hass.localize("ui.common.save")}
|
||||
</ha-button>
|
||||
</ha-dialog>
|
||||
`;
|
||||
}
|
||||
|
||||
static styles = [haStyleDialog, css``];
|
||||
}
|
||||
```
|
||||
|
||||
### Dialog Design Guidelines
|
||||
|
||||
- Max width: 560px (Alert/confirmation: 320px fixed width)
|
||||
- Close X-icon on top left (all screen sizes)
|
||||
- Submit button grouped with cancel at bottom right
|
||||
- Keep button labels short: "Save", "Delete", "Enable"
|
||||
- Destructive actions use red warning button
|
||||
- Always use a title (best practice)
|
||||
- Strive for minimalism
|
||||
|
||||
#### Creating a Lovelace Card
|
||||
|
||||
**Purpose**: Cards allow users to tell different stories about their house (based on gallery)
|
||||
|
||||
```typescript
|
||||
@customElement("hui-my-card")
|
||||
export class HuiMyCard extends LitElement implements LovelaceCard {
|
||||
@property({ attribute: false })
|
||||
hass!: HomeAssistant;
|
||||
|
||||
@state()
|
||||
private _config?: MyCardConfig;
|
||||
|
||||
public setConfig(config: MyCardConfig): void {
|
||||
if (!config.entity) {
|
||||
throw new Error("Entity required");
|
||||
}
|
||||
this._config = config;
|
||||
}
|
||||
|
||||
public getCardSize(): number {
|
||||
return 3; // Height in grid units
|
||||
}
|
||||
|
||||
// Optional: Editor for card configuration
|
||||
public static getConfigElement(): LovelaceCardEditor {
|
||||
return document.createElement("hui-my-card-editor");
|
||||
}
|
||||
|
||||
// Optional: Stub config for card picker
|
||||
public static getStubConfig(): object {
|
||||
return { entity: "" };
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Card Guidelines:**
|
||||
|
||||
- Cards are highly customizable for different households
|
||||
- Implement `LovelaceCard` interface with `setConfig()` and `getCardSize()`
|
||||
- Use proper error handling in `setConfig()`
|
||||
- Consider all possible states (loading, error, unavailable)
|
||||
- Support different entity types and states
|
||||
- Follow responsive design principles
|
||||
- Add configuration editor when needed
|
||||
|
||||
### Internationalization
|
||||
|
||||
- **Use localize**: Always use the localization system
|
||||
- **Add translation keys**: Add keys to src/translations/en.json
|
||||
- **Support placeholders**: Use proper placeholder syntax
|
||||
|
||||
```typescript
|
||||
this.hass.localize("ui.panel.config.updates.update_available", {
|
||||
count: 5,
|
||||
});
|
||||
```
|
||||
|
||||
### Accessibility
|
||||
|
||||
- **ARIA labels**: Add appropriate ARIA labels
|
||||
- **Keyboard navigation**: Ensure all interactions work with keyboard
|
||||
- **Screen reader support**: Test with screen readers
|
||||
- **Color contrast**: Meet WCAG AA standards
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Setup and Commands
|
||||
|
||||
1. **Setup**: `script/setup` - Install dependencies
|
||||
2. **Develop**: `script/develop` - Development server
|
||||
3. **Lint**: `yarn lint` - Run all linting before committing
|
||||
4. **Test**: `yarn test` - Add and run tests
|
||||
5. **Build**: `script/build_frontend` - Test production build
|
||||
|
||||
### Common Pitfalls to Avoid
|
||||
|
||||
- Don't use `querySelector` - Use refs or component properties
|
||||
- Don't manipulate DOM directly - Let Lit handle rendering
|
||||
- Don't use global styles - Scope styles to components
|
||||
- Don't block the main thread - Use web workers for heavy computation
|
||||
- Don't ignore TypeScript errors - Fix all type issues
|
||||
|
||||
### Security Best Practices
|
||||
|
||||
- Sanitize HTML - Never use `unsafeHTML` with user content
|
||||
- Validate inputs - Always validate user inputs
|
||||
- Use HTTPS - All external resources must use HTTPS
|
||||
- CSP compliance - Ensure code works with Content Security Policy
|
||||
|
||||
### Text and Copy Guidelines
|
||||
|
||||
#### Terminology Standards
|
||||
|
||||
**Delete vs Remove** (Based on gallery/src/pages/Text/remove-delete-add-create.markdown)
|
||||
|
||||
- **Use "Remove"** for actions that can be restored or reapplied:
|
||||
- Removing a user's permission
|
||||
- Removing a user from a group
|
||||
- Removing links between items
|
||||
- Removing a widget from dashboard
|
||||
- Removing an item from a cart
|
||||
- **Use "Delete"** for permanent, non-recoverable actions:
|
||||
- Deleting a field
|
||||
- Deleting a value in a field
|
||||
- Deleting a task
|
||||
- Deleting a group
|
||||
- Deleting a permission
|
||||
- Deleting a calendar event
|
||||
|
||||
**Create vs Add** (Create pairs with Delete, Add pairs with Remove)
|
||||
|
||||
- **Use "Add"** for already-existing items:
|
||||
- Adding a permission to a user
|
||||
- Adding a user to a group
|
||||
- Adding links between items
|
||||
- Adding a widget to dashboard
|
||||
- Adding an item to a cart
|
||||
- **Use "Create"** for something made from scratch:
|
||||
- Creating a new field
|
||||
- Creating a new task
|
||||
- Creating a new group
|
||||
- Creating a new permission
|
||||
- Creating a new calendar event
|
||||
|
||||
#### Writing Style (Consistent with Home Assistant Documentation)
|
||||
|
||||
- **Use American English**: Standard spelling and terminology
|
||||
- **Friendly, informational tone**: Be inspiring, personal, comforting, engaging
|
||||
- **Address users directly**: Use "you" and "your"
|
||||
- **Be inclusive**: Objective, non-discriminatory language
|
||||
- **Be concise**: Use clear, direct language
|
||||
- **Be consistent**: Follow established terminology patterns
|
||||
- **Use active voice**: "Delete the automation" not "The automation should be deleted"
|
||||
- **Avoid jargon**: Use terms familiar to home automation users
|
||||
|
||||
#### Language Standards
|
||||
|
||||
- **Always use "Home Assistant"** in full, never "HA" or "HASS"
|
||||
- **Avoid abbreviations**: Spell out terms when possible
|
||||
- **Use sentence case everywhere**: Titles, headings, buttons, labels, UI elements
|
||||
- ✅ "Create new automation"
|
||||
- ❌ "Create New Automation"
|
||||
- ✅ "Device settings"
|
||||
- ❌ "Device Settings"
|
||||
- **Oxford comma**: Use in lists (item 1, item 2, and item 3)
|
||||
- **Replace Latin terms**: Use "like" instead of "e.g.", "for example" instead of "i.e."
|
||||
- **Avoid CAPS for emphasis**: Use bold or italics instead
|
||||
- **Write for all skill levels**: Both technical and non-technical users
|
||||
|
||||
#### Key Terminology
|
||||
|
||||
- **"add-on"** (hyphenated, not "addon")
|
||||
- **"integration"** (preferred over "component")
|
||||
- **Technical terms**: Use lowercase (automation, entity, device, service)
|
||||
|
||||
#### Translation Considerations
|
||||
|
||||
- **Add translation keys**: All user-facing text must be translatable
|
||||
- **Use placeholders**: Support dynamic content in translations
|
||||
- **Keep context**: Provide enough context for translators
|
||||
|
||||
```typescript
|
||||
// Good
|
||||
this.hass.localize("ui.panel.config.automation.delete_confirm", {
|
||||
name: automation.alias,
|
||||
});
|
||||
|
||||
// Bad - hardcoded text
|
||||
("Are you sure you want to delete this automation?");
|
||||
```
|
||||
|
||||
### Common Review Issues (From PR Analysis)
|
||||
|
||||
#### User Experience and Accessibility
|
||||
|
||||
- **Form validation**: Always provide proper field labels and validation feedback
|
||||
- **Form accessibility**: Prevent password managers from incorrectly identifying fields
|
||||
- **Loading states**: Show clear progress indicators during async operations
|
||||
- **Error handling**: Display meaningful error messages when operations fail
|
||||
- **Mobile responsiveness**: Ensure components work well on small screens
|
||||
- **Hit targets**: Make clickable areas large enough for touch interaction
|
||||
- **Visual feedback**: Provide clear indication of interactive states
|
||||
|
||||
#### Dialog and Modal Patterns
|
||||
|
||||
- **Dialog width constraints**: Respect minimum and maximum width requirements
|
||||
- **Interview progress**: Show clear progress for multi-step operations
|
||||
- **State persistence**: Handle dialog state properly during background operations
|
||||
- **Cancel behavior**: Ensure cancel/close buttons work consistently
|
||||
- **Form prefilling**: Use smart defaults but allow user override
|
||||
|
||||
#### Component Design Patterns
|
||||
|
||||
- **Terminology consistency**: Use "Join"/"Apply" instead of "Group" when appropriate
|
||||
- **Visual hierarchy**: Ensure proper font sizes and spacing ratios
|
||||
- **Grid alignment**: Components should align to the design grid system
|
||||
- **Badge placement**: Position badges and indicators consistently
|
||||
- **Color theming**: Respect theme variables and design system colors
|
||||
|
||||
#### Code Quality Issues
|
||||
|
||||
- **Null checking**: Always check if entities exist before accessing properties
|
||||
- **TypeScript safety**: Handle potentially undefined array/object access
|
||||
- **Import organization**: Remove unused imports and use proper type imports
|
||||
- **Event handling**: Properly subscribe and unsubscribe from events
|
||||
- **Memory leaks**: Clean up subscriptions and event listeners
|
||||
|
||||
#### Configuration and Props
|
||||
|
||||
- **Optional parameters**: Make configuration fields optional when sensible
|
||||
- **Smart defaults**: Provide reasonable default values
|
||||
- **Future extensibility**: Design APIs that can be extended later
|
||||
- **Validation**: Validate configuration before applying changes
|
||||
|
||||
## Review Guidelines
|
||||
|
||||
### Core Requirements Checklist
|
||||
|
||||
- [ ] TypeScript strict mode passes (`yarn lint:types`)
|
||||
- [ ] No ESLint errors or warnings (`yarn lint:eslint`)
|
||||
- [ ] Prettier formatting applied (`yarn lint:prettier`)
|
||||
- [ ] Lit analyzer passes (`yarn lint:lit`)
|
||||
- [ ] Component follows Lit best practices
|
||||
- [ ] Proper error handling implemented
|
||||
- [ ] Loading states handled
|
||||
- [ ] Mobile responsive
|
||||
- [ ] Theme variables used
|
||||
- [ ] Translations added
|
||||
- [ ] Accessible to screen readers
|
||||
- [ ] Tests added (where applicable)
|
||||
- [ ] No console statements (use proper logging)
|
||||
- [ ] Unused imports removed
|
||||
- [ ] Proper naming conventions
|
||||
|
||||
### Text and Copy Checklist
|
||||
|
||||
- [ ] Follows terminology guidelines (Delete vs Remove, Create vs Add)
|
||||
- [ ] Localization keys added for all user-facing text
|
||||
- [ ] Uses "Home Assistant" (never "HA" or "HASS")
|
||||
- [ ] Sentence case for ALL text (titles, headings, buttons, labels)
|
||||
- [ ] American English spelling
|
||||
- [ ] Friendly, informational tone
|
||||
- [ ] Avoids abbreviations and jargon
|
||||
- [ ] Correct terminology (add-on not addon, integration not component)
|
||||
|
||||
### Component-Specific Checks
|
||||
|
||||
- [ ] Dialogs implement HassDialog interface
|
||||
- [ ] Dialog styling uses haStyleDialog
|
||||
- [ ] Dialog accessibility includes dialogInitialFocus
|
||||
- [ ] ha-alert used correctly for messages
|
||||
- [ ] ha-form uses proper schema structure
|
||||
- [ ] Components handle all states (loading, error, unavailable)
|
||||
- [ ] Entity existence checked before property access
|
||||
- [ ] Event subscriptions properly cleaned up
|
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
@ -1,11 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
labels:
|
||||
- Dependencies
|
||||
- GitHub Actions
|
51
.github/labeler.yml
vendored
51
.github/labeler.yml
vendored
@ -1,51 +0,0 @@
|
||||
Build:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- build-scripts/**
|
||||
- .browserslistrc
|
||||
- gulpfile.js
|
||||
|
||||
Cast:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- cast/src/**
|
||||
- src/cast/**
|
||||
|
||||
Demo:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- demo/src/**
|
||||
- src/fake_data/**
|
||||
|
||||
Design:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- gallery/src/**
|
||||
- src/fake_data/**
|
||||
|
||||
Dependencies:
|
||||
- any:
|
||||
- changed-files:
|
||||
# Match when only these files are changed (i.e. don't match PRs that happen to add or remove packages)
|
||||
- any-glob-to-all-files:
|
||||
- package.json
|
||||
- renovate.json
|
||||
- yarn.lock
|
||||
- .yarn/**
|
||||
- .yarnrc.yml
|
||||
- .nvmrc
|
||||
# Dependabot and Renovate branches always match (i.e. compatibility tweaks by members considered minor)
|
||||
- head-branch:
|
||||
- "^renovate/"
|
||||
- "^dependabot/"
|
||||
|
||||
GitHub Actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/workflows/**
|
||||
- .github/*.yml
|
||||
|
||||
Supervisor:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- hassio/src/**
|
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2020-01-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
5
.github/release-drafter.yml
vendored
5
.github/release-drafter.yml
vendored
@ -1,8 +1,3 @@
|
||||
categories:
|
||||
- title: "Dependency updates"
|
||||
collapse-after: 3
|
||||
labels:
|
||||
- "Dependencies"
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
|
56
.github/stale.yml
vendored
Normal file
56
.github/stale.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 90
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
daysUntilClose: 7
|
||||
|
||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||
onlyLabels: []
|
||||
|
||||
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
|
||||
exemptLabels:
|
||||
- feature request
|
||||
- Help wanted
|
||||
- to do
|
||||
|
||||
# Set to true to ignore issues in a project (defaults to false)
|
||||
exemptProjects: true
|
||||
|
||||
# Set to true to ignore issues in a milestone (defaults to false)
|
||||
exemptMilestones: true
|
||||
|
||||
# Set to true to ignore issues with an assignee (defaults to false)
|
||||
exemptAssignees: false
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
There hasn't been any activity on this issue recently. Due to the high number
|
||||
of incoming GitHub notifications, we have to clean some of the old issues,
|
||||
as many of them have already been resolved with the latest updates.
|
||||
|
||||
Please make sure to update to the latest Home Assistant version and check
|
||||
if that solves the issue. Let us know if that works for you by adding a
|
||||
comment 👍
|
||||
|
||||
This issue now has been marked as stale and will be closed if no further
|
||||
activity occurs. Thank you for your contributions.
|
||||
|
||||
# Comment to post when removing the stale label.
|
||||
# unmarkComment: >
|
||||
# Your comment here.
|
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
# closeComment: >
|
||||
# Your comment here.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 30
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: issues
|
83
.github/workflows/cast_deployment.yaml
vendored
83
.github/workflows/cast_deployment.yaml
vendored
@ -1,83 +0,0 @@
|
||||
name: Cast deployment
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
|
||||
jobs:
|
||||
deploy_dev:
|
||||
runs-on: ubuntu-latest
|
||||
name: Deploy Development
|
||||
if: github.event_name != 'push'
|
||||
environment:
|
||||
name: Cast Development
|
||||
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
ref: dev
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build Cast
|
||||
run: ./node_modules/.bin/gulp build-cast
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Deploy to Netlify
|
||||
id: deploy
|
||||
run: |
|
||||
npx -y netlify-cli deploy --dir=cast/dist --alias dev
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_CAST_SITE_ID }}
|
||||
|
||||
deploy_master:
|
||||
runs-on: ubuntu-latest
|
||||
name: Deploy Production
|
||||
if: github.event_name == 'push'
|
||||
environment:
|
||||
name: Cast Production
|
||||
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
ref: master
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build Cast
|
||||
run: ./node_modules/.bin/gulp build-cast
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Deploy to Netlify
|
||||
id: deploy
|
||||
run: |
|
||||
npx -y netlify-cli deploy --dir=cast/dist --prod
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_CAST_SITE_ID }}
|
157
.github/workflows/ci.yaml
vendored
157
.github/workflows/ci.yaml
vendored
@ -10,111 +10,118 @@ on:
|
||||
- dev
|
||||
- master
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Lint and check format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
uses: actions/checkout@v2
|
||||
- name: Setting up Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
node-version: 12.x
|
||||
- name: Get yarn cache path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
- name: Fetching Yarn cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
- name: Check for duplicate dependencies
|
||||
run: yarn dedupe --check
|
||||
- name: Build resources
|
||||
run: ./node_modules/.bin/gulp gen-icons-json build-translations build-locale-data gather-gallery-pages
|
||||
- name: Setup lint cache
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: |
|
||||
node_modules/.cache/prettier
|
||||
node_modules/.cache/eslint
|
||||
node_modules/.cache/typescript
|
||||
key: lint-${{ github.sha }}
|
||||
restore-keys: lint-
|
||||
run: yarn install
|
||||
env:
|
||||
CI: true
|
||||
- name: Build icons
|
||||
run: ./node_modules/.bin/gulp gen-icons-hassio gen-icons-mdi gen-icons-app
|
||||
- name: Build translations
|
||||
run: ./node_modules/.bin/gulp build-translations
|
||||
- name: Run eslint
|
||||
run: yarn run lint:eslint --quiet
|
||||
run: ./node_modules/.bin/eslint src hassio/src gallery/src
|
||||
- name: Run tslint
|
||||
run: ./node_modules/.bin/tslint 'src/**/*.ts' 'hassio/src/**/*.ts' 'gallery/src/**/*.ts' 'cast/src/**/*.ts' 'test-mocha/**/*.ts'
|
||||
- name: Run tsc
|
||||
run: yarn run lint:types
|
||||
- name: Run lit-analyzer
|
||||
run: yarn run lint:lit --quiet
|
||||
- name: Run prettier
|
||||
run: yarn run lint:prettier
|
||||
run: ./node_modules/.bin/tsc
|
||||
test:
|
||||
name: Run tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
uses: actions/checkout@v2
|
||||
- name: Setting up Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
node-version: 12.x
|
||||
- name: Get yarn cache path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
- name: Fetching Yarn cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
- name: Build resources
|
||||
run: ./node_modules/.bin/gulp gen-icons-json build-translations build-locale-data
|
||||
- name: Run Tests
|
||||
run: yarn run test
|
||||
run: yarn install
|
||||
env:
|
||||
CI: true
|
||||
- name: Run Mocha
|
||||
run: npm run mocha
|
||||
build:
|
||||
name: Build frontend
|
||||
needs: [lint, test]
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, test]
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
uses: actions/checkout@v2
|
||||
- name: Setting up Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
node-version: 12.x
|
||||
- name: Get yarn cache path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
- name: Fetching Yarn cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
run: yarn install
|
||||
env:
|
||||
CI: true
|
||||
- name: Build Application
|
||||
run: ./node_modules/.bin/gulp build-app
|
||||
env:
|
||||
IS_TEST: "true"
|
||||
- name: Upload bundle stats
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: frontend-bundle-stats
|
||||
path: build/stats/*.json
|
||||
if-no-files-found: error
|
||||
TRAVIS: "true"
|
||||
supervisor:
|
||||
name: Build supervisor
|
||||
needs: [lint, test]
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, test]
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
uses: actions/checkout@v2
|
||||
- name: Setting up Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
node-version: 12.x
|
||||
- name: Get yarn cache path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
- name: Fetching Yarn cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
run: yarn install
|
||||
env:
|
||||
CI: true
|
||||
- name: Build Application
|
||||
run: ./node_modules/.bin/gulp build-hassio
|
||||
env:
|
||||
IS_TEST: "true"
|
||||
- name: Upload bundle stats
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: supervisor-bundle-stats
|
||||
path: build/stats/*.json
|
||||
if-no-files-found: error
|
||||
TRAVIS: "true"
|
||||
|
60
.github/workflows/codeql-analysis.yml
vendored
60
.github/workflows/codeql-analysis.yml
vendored
@ -1,60 +0,0 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [dev, master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [dev]
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Override automatic language detection by changing the below list
|
||||
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
|
||||
language: ["javascript"]
|
||||
# Learn more...
|
||||
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
39
.github/workflows/demo.yaml
vendored
Normal file
39
.github/workflows/demo.yaml
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
name: Demo
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Setting up Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
- name: Get yarn cache path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
- name: Fetching Yarn cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
env:
|
||||
CI: true
|
||||
- name: Build Demo
|
||||
run: ./node_modules/.bin/gulp build-demo
|
||||
- name: Deploy to Netlify
|
||||
uses: netlify/actions/cli@master
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_DEMO_DEV_SITE_ID }}
|
||||
with:
|
||||
args: deploy --dir=demo/dist --prod
|
84
.github/workflows/demo_deployment.yaml
vendored
84
.github/workflows/demo_deployment.yaml
vendored
@ -1,84 +0,0 @@
|
||||
name: Demo deployment
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
- master
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
|
||||
jobs:
|
||||
deploy_dev:
|
||||
runs-on: ubuntu-latest
|
||||
name: Demo Development
|
||||
if: github.event_name != 'push' || github.ref_name != 'master'
|
||||
environment:
|
||||
name: Demo Development
|
||||
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
ref: dev
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build Demo
|
||||
run: ./node_modules/.bin/gulp build-demo
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Deploy to Netlify
|
||||
id: deploy
|
||||
run: |
|
||||
npx -y netlify-cli deploy --dir=demo/dist --prod
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_DEMO_DEV_SITE_ID }}
|
||||
|
||||
deploy_master:
|
||||
runs-on: ubuntu-latest
|
||||
name: Demo Production
|
||||
if: github.event_name == 'push' && github.ref_name == 'master'
|
||||
environment:
|
||||
name: Demo Production
|
||||
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
ref: master
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build Demo
|
||||
run: ./node_modules/.bin/gulp build-demo
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Deploy to Netlify
|
||||
id: deploy
|
||||
run: |
|
||||
npx -y netlify-cli deploy --dir=demo/dist --prod
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_DEMO_SITE_ID }}
|
41
.github/workflows/design_deployment.yaml
vendored
41
.github/workflows/design_deployment.yaml
vendored
@ -1,41 +0,0 @@
|
||||
name: Design deployment
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: Design
|
||||
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build Gallery
|
||||
run: ./node_modules/.bin/gulp build-gallery
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Deploy to Netlify
|
||||
id: deploy
|
||||
run: |
|
||||
npx -y netlify-cli deploy --dir=gallery/dist --prod
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_GALLERY_SITE_ID }}
|
52
.github/workflows/design_preview.yaml
vendored
52
.github/workflows/design_preview.yaml
vendored
@ -1,52 +0,0 @@
|
||||
name: Design preview
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- labeled
|
||||
branches:
|
||||
- dev
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
|
||||
jobs:
|
||||
preview:
|
||||
runs-on: ubuntu-latest
|
||||
# Skip running on forks since it won't have access to secrets
|
||||
# Skip running PRs without 'needs design preview' label
|
||||
if: github.repository == 'home-assistant/frontend' && contains(github.event.pull_request.labels.*.name, 'needs design preview')
|
||||
steps:
|
||||
- name: Check out files from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build Gallery
|
||||
run: ./node_modules/.bin/gulp build-gallery
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Deploy preview to Netlify
|
||||
id: deploy
|
||||
run: |
|
||||
npx -y netlify-cli deploy --dir=gallery/dist --alias "deploy-preview-${{ github.event.number }}" \
|
||||
--json > deploy_output.json
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_GALLERY_SITE_ID }}
|
||||
|
||||
- name: Generate summary
|
||||
run: |
|
||||
NETLIFY_LIVE_URL=$(jq -r '.deploy_url' deploy_output.json)
|
||||
echo "$NETLIFY_LIVE_URL" >> "$GITHUB_STEP_SUMMARY"
|
15
.github/workflows/labeler.yaml
vendored
15
.github/workflows/labeler.yaml
vendored
@ -1,15 +0,0 @@
|
||||
name: "Pull Request Labeler"
|
||||
|
||||
on: pull_request_target
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Apply labels
|
||||
uses: actions/labeler@v5.0.0
|
||||
with:
|
||||
sync-labels: true
|
21
.github/workflows/lock.yml
vendored
21
.github/workflows/lock.yml
vendored
@ -1,21 +0,0 @@
|
||||
name: Lock
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *"
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
process-only: "issues, prs"
|
||||
issue-lock-inactive-days: "30"
|
||||
issue-exclude-created-before: "2020-10-01T00:00:00Z"
|
||||
issue-lock-reason: ""
|
||||
pr-lock-inactive-days: "1"
|
||||
pr-exclude-created-before: "2020-11-01T00:00:00Z"
|
||||
pr-lock-reason: ""
|
71
.github/workflows/nightly.yaml
vendored
71
.github/workflows/nightly.yaml
vendored
@ -1,71 +0,0 @@
|
||||
name: Nightly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
|
||||
permissions:
|
||||
actions: none
|
||||
|
||||
jobs:
|
||||
nightly:
|
||||
name: Nightly
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
|
||||
- name: Download translations
|
||||
run: ./script/translations_download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
|
||||
- name: Bump version
|
||||
run: script/version_bump.js nightly
|
||||
|
||||
- name: Build nightly Python wheels
|
||||
run: |
|
||||
pip install build
|
||||
yarn install
|
||||
export SKIP_FETCH_NIGHTLY_TRANSLATIONS=1
|
||||
script/build_frontend
|
||||
rm -rf dist home_assistant_frontend.egg-info
|
||||
python3 -m build
|
||||
|
||||
- name: Archive translations
|
||||
run: tar -czvf translations.tar.gz translations
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: wheels
|
||||
path: dist/home_assistant_frontend*.whl
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
if-no-files-found: error
|
25
.github/workflows/relative-ci.yaml
vendored
25
.github/workflows/relative-ci.yaml
vendored
@ -1,25 +0,0 @@
|
||||
name: RelativeCI
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [CI]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
name: Upload stats
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
strategy:
|
||||
matrix:
|
||||
bundle: [frontend, supervisor]
|
||||
build: [modern, legacy]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send bundle stats and build information to RelativeCI
|
||||
uses: relative-ci/agent-action@v3.0.1
|
||||
with:
|
||||
key: ${{ secrets[format('RELATIVE_CI_KEY_{0}_{1}', matrix.bundle, matrix.build)] }}
|
||||
token: ${{ github.token }}
|
||||
artifactName: ${{ format('{0}-bundle-stats', matrix.bundle) }}
|
||||
webpackStatsFile: ${{ format('{0}-{1}.json', matrix.bundle, matrix.build) }}
|
23
.github/workflows/release-drafter.yaml
vendored
23
.github/workflows/release-drafter.yaml
vendored
@ -1,23 +0,0 @@
|
||||
name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
permissions:
|
||||
# write permission for contents is required to create a github release
|
||||
contents: write
|
||||
# write permission for pull-requests is required for autolabeler
|
||||
# otherwise, read permission is required at least
|
||||
pull-requests: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v6.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
141
.github/workflows/release.yaml
vendored
141
.github/workflows/release.yaml
vendored
@ -1,141 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
NODE_OPTIONS: --max_old_space_size=6144
|
||||
|
||||
# Set default workflow permissions
|
||||
# All scopes not mentioned here are set to no access
|
||||
# https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token
|
||||
permissions:
|
||||
actions: none
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # Required to upload release assets
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Verify version
|
||||
uses: home-assistant/actions/helpers/verify-version@master
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
|
||||
- name: Download Translations
|
||||
run: ./script/translations_download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
- name: Build and release package
|
||||
run: |
|
||||
python3 -m pip install twine build
|
||||
export TWINE_USERNAME="__token__"
|
||||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
||||
export SKIP_FETCH_NIGHTLY_TRANSLATIONS=1
|
||||
script/release
|
||||
|
||||
- name: Upload release assets
|
||||
uses: softprops/action-gh-release@v2.3.2
|
||||
with:
|
||||
files: |
|
||||
dist/*.whl
|
||||
dist/*.tar.gz
|
||||
|
||||
wheels-init:
|
||||
name: Init wheels build
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate requirements.txt
|
||||
run: |
|
||||
# Sleep to give pypi time to populate the new version across mirrors
|
||||
sleep 240
|
||||
version=$(echo "${{ github.ref }}" | awk -F"/" '{print $NF}' )
|
||||
echo "home-assistant-frontend==$version" > ./requirements.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
abi: cp313
|
||||
tag: musllinux_1_2
|
||||
arch: amd64
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
requirements: "requirements.txt"
|
||||
|
||||
release-landing-page:
|
||||
name: Release landing-page frontend
|
||||
if: github.event.release.prerelease == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # Required to upload release assets
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Download Translations
|
||||
run: ./script/translations_download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
- name: Build landing-page
|
||||
run: landing-page/script/build_landing_page
|
||||
- name: Tar folder
|
||||
run: tar -czf landing-page/home_assistant_frontend_landingpage-${{ github.event.release.tag_name }}.tar.gz -C landing-page/dist .
|
||||
- name: Upload release asset
|
||||
uses: softprops/action-gh-release@v2.3.2
|
||||
with:
|
||||
files: landing-page/home_assistant_frontend_landingpage-${{ github.event.release.tag_name }}.tar.gz
|
||||
|
||||
release-supervisor:
|
||||
name: Release supervisor frontend
|
||||
if: github.event.release.prerelease == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # Required to upload release assets
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4.4.0
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: yarn
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Download Translations
|
||||
run: ./script/translations_download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
- name: Build supervisor
|
||||
run: hassio/script/build_hassio
|
||||
- name: Tar folder
|
||||
run: tar -czf hassio/home_assistant_frontend_supervisor-${{ github.event.release.tag_name }}.tar.gz -C hassio/build .
|
||||
- name: Upload release asset
|
||||
uses: softprops/action-gh-release@v2.3.2
|
||||
with:
|
||||
files: hassio/home_assistant_frontend_supervisor-${{ github.event.release.tag_name }}.tar.gz
|
58
.github/workflows/restrict-task-creation.yml
vendored
58
.github/workflows/restrict-task-creation.yml
vendored
@ -1,58 +0,0 @@
|
||||
name: Restrict task creation
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
check-authorization:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run if this is a Task issue type (from the issue form)
|
||||
if: github.event.issue.issue_type == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
||||
// Check if user is an organization member
|
||||
try {
|
||||
await github.rest.orgs.checkMembershipForUser({
|
||||
org: 'home-assistant',
|
||||
username: issueAuthor
|
||||
});
|
||||
console.log(`✅ ${issueAuthor} is an organization member`);
|
||||
return; // Authorized
|
||||
} catch (error) {
|
||||
console.log(`❌ ${issueAuthor} is not authorized to create Task issues`);
|
||||
}
|
||||
|
||||
// Close the issue with a comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: `Hi @${issueAuthor}, thank you for your contribution!\n\n` +
|
||||
`Task issues are restricted to Open Home Foundation staff and authorized contributors.\n\n` +
|
||||
`If you would like to:\n` +
|
||||
`- Report a bug: Please use the [bug report form](https://github.com/home-assistant/frontend/issues/new?template=bug_report.yml)\n` +
|
||||
`- Request a feature: Please submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)\n\n` +
|
||||
`If you believe you should have access to create Task issues, please contact the maintainers.`
|
||||
});
|
||||
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Add a label to indicate this was auto-closed
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['auto-closed']
|
||||
});
|
42
.github/workflows/stale.yml
vendored
42
.github/workflows/stale.yml
vendored
@ -1,42 +0,0 @@
|
||||
name: Stale
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 90 days stale policy
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 90
|
||||
days-before-close: 7
|
||||
operations-per-run: 25
|
||||
remove-stale-when-updated: true
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,feature-request,feature%20request"
|
||||
stale-issue-message: >
|
||||
There hasn't been any activity on this issue recently. Due to the
|
||||
high number of incoming GitHub notifications, we have to clean some
|
||||
of the old issues, as many of them have already been resolved with
|
||||
the latest updates.
|
||||
|
||||
Please make sure to update to the latest Home Assistant version and
|
||||
check if that solves the issue. Let us know if that works for you by
|
||||
adding a comment 👍
|
||||
|
||||
This issue has now been marked as stale and will be closed if no
|
||||
further activity occurs. Thank you for your contributions.
|
||||
|
||||
stale-pr-label: "stale"
|
||||
exempt-pr-labels: "no-stale"
|
||||
stale-pr-message: >
|
||||
There hasn't been any activity on this pull request recently. This
|
||||
pull request has been automatically marked as stale because of that
|
||||
and will be closed if no further activity occurs within 7 days.
|
||||
|
||||
Thank you for your contributions.
|
22
.github/workflows/translations.yaml
vendored
22
.github/workflows/translations.yaml
vendored
@ -1,22 +0,0 @@
|
||||
name: Translations
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
paths:
|
||||
- src/translations/en.json
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
name: Upload
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Upload Translations
|
||||
run: |
|
||||
export LOKALISE_TOKEN="${{ secrets.LOKALISE_TOKEN }}"
|
||||
./script/translations_upload_base
|
51
.gitignore
vendored
51
.gitignore
vendored
@ -1,23 +1,11 @@
|
||||
.DS_Store
|
||||
.reify-cache
|
||||
|
||||
# build
|
||||
build/
|
||||
dist/
|
||||
/hass_frontend/
|
||||
/translations/
|
||||
|
||||
# yarn
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/releases
|
||||
!.yarn/plugins
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
.pnp.*
|
||||
/node_modules/
|
||||
yarn-error.log
|
||||
build
|
||||
build-translations/*
|
||||
node_modules/*
|
||||
npm-debug.log
|
||||
.DS_Store
|
||||
hass_frontend/*
|
||||
.reify-cache
|
||||
demo/hademo-icons.html
|
||||
|
||||
# Python stuff
|
||||
*.py[cod]
|
||||
@ -27,33 +15,22 @@ npm-debug.log
|
||||
# venv stuff
|
||||
pyvenv.cfg
|
||||
pip-selfcheck.json
|
||||
/venv/
|
||||
venv
|
||||
.venv
|
||||
lib
|
||||
bin
|
||||
dist
|
||||
|
||||
# vscode
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/tasks.json
|
||||
|
||||
# Cast dev settings
|
||||
# Cast dev settings
|
||||
src/cast/dev_const.ts
|
||||
|
||||
# Secrets
|
||||
.lokalise_token
|
||||
yarn-error.log
|
||||
|
||||
# asdf
|
||||
#asdf
|
||||
.tool-versions
|
||||
|
||||
# Home Assistant config
|
||||
/config/
|
||||
|
||||
# Jetbrains
|
||||
/.idea/
|
||||
|
||||
# test coverage
|
||||
test/coverage/
|
||||
|
||||
# AI tooling
|
||||
.claude
|
||||
|
||||
|
6
.hound.yml
Normal file
6
.hound.yml
Normal file
@ -0,0 +1,6 @@
|
||||
jshint:
|
||||
enabled: false
|
||||
|
||||
eslint:
|
||||
enabled: true
|
||||
config_file: .eslintrc-hound.json
|
@ -1 +0,0 @@
|
||||
yarn run lint-staged --relative
|
@ -1,4 +0,0 @@
|
||||
CLA.md
|
||||
CODE_OF_CONDUCT.md
|
||||
LICENSE.md
|
||||
PULL_REQUEST_TEMPLATE.md
|
8
.vscode/extensions.json
vendored
8
.vscode/extensions.json
vendored
@ -1,11 +1,9 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"esbenp.prettier-vscode",
|
||||
"runem.lit-plugin",
|
||||
"github.vscode-pull-request-github",
|
||||
"eamodio.gitlens",
|
||||
"vitest.explorer",
|
||||
"yeion7.styled-global-variables-autocomplete"
|
||||
"bierner.lit-html",
|
||||
"runem.lit-plugin"
|
||||
]
|
||||
}
|
||||
|
42
.vscode/launch.json
vendored
42
.vscode/launch.json
vendored
@ -1,42 +0,0 @@
|
||||
{
|
||||
// https://github.com/microsoft/vscode-js-debug/blob/master/OPTIONS.md
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug Frontend",
|
||||
"request": "launch",
|
||||
"type": "pwa-chrome",
|
||||
"url": "http://localhost:8123/",
|
||||
"webRoot": "${workspaceFolder}/hass_frontend",
|
||||
"disableNetworkCache": true,
|
||||
"preLaunchTask": "Develop Frontend",
|
||||
"outFiles": ["${workspaceFolder}/hass_frontend/frontend_latest/*.js"]
|
||||
},
|
||||
{
|
||||
"name": "Debug Gallery",
|
||||
"request": "launch",
|
||||
"type": "pwa-chrome",
|
||||
"url": "http://localhost:8100/",
|
||||
"webRoot": "${workspaceFolder}/gallery/dist",
|
||||
"disableNetworkCache": true,
|
||||
"preLaunchTask": "Develop Gallery"
|
||||
},
|
||||
{
|
||||
"name": "Debug Demo",
|
||||
"request": "launch",
|
||||
"type": "pwa-chrome",
|
||||
"url": "http://localhost:8090/",
|
||||
"webRoot": "${workspaceFolder}/demo/dist",
|
||||
"disableNetworkCache": true,
|
||||
"preLaunchTask": "Develop Demo"
|
||||
},
|
||||
{
|
||||
"name": "Debug Cast",
|
||||
"request": "launch",
|
||||
"type": "pwa-chrome",
|
||||
"url": "http://localhost:8080/",
|
||||
"webRoot": "${workspaceFolder}/cast/dist",
|
||||
"disableNetworkCache": true,
|
||||
"preLaunchTask": "Develop Cast"
|
||||
}
|
||||
]
|
||||
}
|
288
.vscode/tasks.json
vendored
288
.vscode/tasks.json
vendored
@ -1,288 +0,0 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Develop and serve Frontend",
|
||||
"type": "shell",
|
||||
"command": "script/develop_and_serve -c ${input:coreUrl}",
|
||||
// Sync changes here to other tasks until issue resolved
|
||||
// https://github.com/Microsoft/vscode/issues/61497
|
||||
"problemMatcher": {
|
||||
"owner": "ha-build",
|
||||
"source": "ha-build",
|
||||
"fileLocation": "absolute",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
|
||||
"severity": 1,
|
||||
"file": 2,
|
||||
"message": 3,
|
||||
"line": 4,
|
||||
"column": 5
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "Changes detected. Starting compilation",
|
||||
"endsPattern": "Build done @"
|
||||
}
|
||||
},
|
||||
"isBackground": true,
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Develop Frontend",
|
||||
"type": "gulp",
|
||||
"task": "develop-app",
|
||||
// Sync changes here to other tasks until issue resolved
|
||||
// https://github.com/Microsoft/vscode/issues/61497
|
||||
"problemMatcher": {
|
||||
"owner": "ha-build",
|
||||
"source": "ha-build",
|
||||
"fileLocation": "absolute",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
|
||||
"severity": 1,
|
||||
"file": 2,
|
||||
"message": 3,
|
||||
"line": 4,
|
||||
"column": 5
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "Changes detected. Starting compilation",
|
||||
"endsPattern": "Build done @"
|
||||
}
|
||||
},
|
||||
"isBackground": true,
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Develop Supervisor panel",
|
||||
"type": "gulp",
|
||||
"task": "develop-hassio",
|
||||
"problemMatcher": {
|
||||
"owner": "ha-build",
|
||||
"source": "ha-build",
|
||||
"fileLocation": "absolute",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
|
||||
"severity": 1,
|
||||
"file": 2,
|
||||
"message": 3,
|
||||
"line": 4,
|
||||
"column": 5
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "Changes detected. Starting compilation",
|
||||
"endsPattern": "Build done @"
|
||||
}
|
||||
},
|
||||
"isBackground": true,
|
||||
"group": "build",
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Develop Gallery",
|
||||
"type": "gulp",
|
||||
"task": "develop-gallery",
|
||||
"problemMatcher": {
|
||||
"owner": "ha-build",
|
||||
"source": "ha-build",
|
||||
"fileLocation": "absolute",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
|
||||
"severity": 1,
|
||||
"file": 2,
|
||||
"message": 3,
|
||||
"line": 4,
|
||||
"column": 5
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "Changes detected. Starting compilation",
|
||||
"endsPattern": "Build done @"
|
||||
}
|
||||
},
|
||||
|
||||
"isBackground": true,
|
||||
"group": "build",
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Develop Landing Page",
|
||||
"type": "gulp",
|
||||
"task": "develop-landing-page",
|
||||
"problemMatcher": {
|
||||
"owner": "ha-build",
|
||||
"source": "ha-build",
|
||||
"fileLocation": "absolute",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
|
||||
"severity": 1,
|
||||
"file": 2,
|
||||
"message": 3,
|
||||
"line": 4,
|
||||
"column": 5
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "Changes detected. Starting compilation",
|
||||
"endsPattern": "Build done @"
|
||||
}
|
||||
},
|
||||
|
||||
"isBackground": true,
|
||||
"group": "build",
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Develop Demo",
|
||||
"type": "gulp",
|
||||
"task": "develop-demo",
|
||||
"problemMatcher": {
|
||||
"owner": "ha-build",
|
||||
"source": "ha-build",
|
||||
"fileLocation": "absolute",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
|
||||
"severity": 1,
|
||||
"file": 2,
|
||||
"message": 3,
|
||||
"line": 4,
|
||||
"column": 5
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "Changes detected. Starting compilation",
|
||||
"endsPattern": "Build done @"
|
||||
}
|
||||
},
|
||||
|
||||
"isBackground": true,
|
||||
"group": "build",
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Develop Cast",
|
||||
"type": "gulp",
|
||||
"task": "develop-cast",
|
||||
"problemMatcher": {
|
||||
"owner": "ha-build",
|
||||
"source": "ha-build",
|
||||
"fileLocation": "absolute",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
|
||||
"severity": 1,
|
||||
"file": 2,
|
||||
"message": 3,
|
||||
"line": 4,
|
||||
"column": 5
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "Changes detected. Starting compilation",
|
||||
"endsPattern": "Build done @"
|
||||
}
|
||||
},
|
||||
|
||||
"isBackground": true,
|
||||
"group": "build",
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Run HA Core in devcontainer",
|
||||
"type": "shell",
|
||||
"command": "script/core",
|
||||
"isBackground": true,
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"problemMatcher": [],
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Run HA Core for Supervisor in devcontainer",
|
||||
"type": "shell",
|
||||
"command": "SUPERVISOR=${input:supervisorHost} SUPERVISOR_TOKEN=${input:supervisorToken} script/core",
|
||||
"isBackground": true,
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"problemMatcher": [],
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Setup and fetch nightly translations",
|
||||
"type": "gulp",
|
||||
"task": "setup-and-fetch-nightly-translations",
|
||||
"problemMatcher": []
|
||||
}
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"id": "supervisorHost",
|
||||
"type": "promptString",
|
||||
"description": "The IP of the Supervisor host running the Remote API proxy add-on"
|
||||
},
|
||||
{
|
||||
"id": "supervisorToken",
|
||||
"type": "promptString",
|
||||
"description": "The token for the Remote API proxy add-on"
|
||||
},
|
||||
{
|
||||
"id": "coreUrl",
|
||||
"type": "promptString",
|
||||
"description": "The URL of the Home Assistant Core instance",
|
||||
"default": "http://127.0.0.1:8123"
|
||||
}
|
||||
]
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
diff --git a/mwc-formfield-base.js b/mwc-formfield-base.js
|
||||
index 7b763326d7d51835ad52646bfbc80fe21989abd3..f2baa8224e6d03df1fdb0b9fd03f5c6d77fc8747 100644
|
||||
--- a/mwc-formfield-base.js
|
||||
+++ b/mwc-formfield-base.js
|
||||
@@ -9,7 +9,7 @@ import { BaseElement } from '@material/mwc-base/base-element.js';
|
||||
import { FormElement } from '@material/mwc-base/form-element.js';
|
||||
import { observer } from '@material/mwc-base/observer.js';
|
||||
import { html } from 'lit';
|
||||
-import { property, query, queryAssignedNodes } from 'lit/decorators.js';
|
||||
+import { property, query, queryAssignedElements } from 'lit/decorators.js';
|
||||
import { classMap } from 'lit/directives/class-map.js';
|
||||
export class FormfieldBase extends BaseElement {
|
||||
constructor() {
|
||||
@@ -96,7 +96,7 @@ __decorate([
|
||||
query('.mdc-form-field')
|
||||
], FormfieldBase.prototype, "mdcRoot", void 0);
|
||||
__decorate([
|
||||
- queryAssignedNodes('', true, '*')
|
||||
+ queryAssignedElements({ slot: "", flatten: true, selector: "*" })
|
||||
], FormfieldBase.prototype, "slottedInputs", void 0);
|
||||
__decorate([
|
||||
query('label')
|
@ -1,26 +0,0 @@
|
||||
diff --git a/mwc-list-base.js b/mwc-list-base.js
|
||||
index 1ba95b6a01dcecea4d85b5cbbbcc3dfb04c40d5f..dced13fdb7929c490d6661b1bbe7e9f96dcd2285 100644
|
||||
--- a/mwc-list-base.js
|
||||
+++ b/mwc-list-base.js
|
||||
@@ -11,7 +11,7 @@ import { BaseElement } from '@material/mwc-base/base-element.js';
|
||||
import { observer } from '@material/mwc-base/observer.js';
|
||||
import { deepActiveElementPath, doesElementContainFocus, isNodeElement } from '@material/mwc-base/utils.js';
|
||||
import { html } from 'lit';
|
||||
-import { property, query, queryAssignedNodes } from 'lit/decorators.js';
|
||||
+import { property, query, queryAssignedElements } from 'lit/decorators.js';
|
||||
import { ifDefined } from 'lit/directives/if-defined.js';
|
||||
import MDCListFoundation, { isIndexSet } from './mwc-list-foundation.js';
|
||||
export { createSetFromIndex, isEventMulti, isIndexSet } from './mwc-list-foundation.js';
|
||||
@@ -425,10 +425,10 @@ __decorate([
|
||||
query('.mdc-deprecated-list')
|
||||
], ListBase.prototype, "mdcRoot", void 0);
|
||||
__decorate([
|
||||
- queryAssignedNodes('', true, '*')
|
||||
+ queryAssignedElements({ flatten: true, selector: "*" })
|
||||
], ListBase.prototype, "assignedElements", void 0);
|
||||
__decorate([
|
||||
- queryAssignedNodes('', true, '[tabindex="0"]')
|
||||
+ queryAssignedElements({ flatten: true, selector: '[tabindex="0"]' })
|
||||
], ListBase.prototype, "tabbableElements", void 0);
|
||||
__decorate([
|
||||
property({ type: Boolean }),
|
File diff suppressed because one or more lines are too long
@ -1,60 +0,0 @@
|
||||
diff --git a/modular/sortable.core.esm.js b/modular/sortable.core.esm.js
|
||||
index 8b5e49b011713c8859c669069fbe85ce53974e1d..6a0afc92787157b8a31c38cc5f67dfa526090a00 100644
|
||||
--- a/modular/sortable.core.esm.js
|
||||
+++ b/modular/sortable.core.esm.js
|
||||
@@ -1781,11 +1781,16 @@ Sortable.prototype = /** @lends Sortable.prototype */{
|
||||
}
|
||||
if (_onMove(rootEl, el, dragEl, dragRect, target, targetRect, evt, !!target) !== false) {
|
||||
capture();
|
||||
- if (elLastChild && elLastChild.nextSibling) {
|
||||
- // the last draggable element is not the last node
|
||||
- el.insertBefore(dragEl, elLastChild.nextSibling);
|
||||
- } else {
|
||||
- el.appendChild(dragEl);
|
||||
+ try {
|
||||
+ if (elLastChild && elLastChild.nextSibling) {
|
||||
+ // the last draggable element is not the last node
|
||||
+ el.insertBefore(dragEl, elLastChild.nextSibling);
|
||||
+ } else {
|
||||
+ el.appendChild(dragEl);
|
||||
+ }
|
||||
+ }
|
||||
+ catch(err) {
|
||||
+ return completed(false);
|
||||
}
|
||||
parentEl = el; // actualization
|
||||
|
||||
@@ -1802,7 +1807,12 @@ Sortable.prototype = /** @lends Sortable.prototype */{
|
||||
targetRect = getRect(target);
|
||||
if (_onMove(rootEl, el, dragEl, dragRect, target, targetRect, evt, false) !== false) {
|
||||
capture();
|
||||
- el.insertBefore(dragEl, firstChild);
|
||||
+ try {
|
||||
+ el.insertBefore(dragEl, firstChild);
|
||||
+ }
|
||||
+ catch(err) {
|
||||
+ return completed(false);
|
||||
+ }
|
||||
parentEl = el; // actualization
|
||||
|
||||
changed();
|
||||
@@ -1849,10 +1859,15 @@ Sortable.prototype = /** @lends Sortable.prototype */{
|
||||
_silent = true;
|
||||
setTimeout(_unsilent, 30);
|
||||
capture();
|
||||
- if (after && !nextSibling) {
|
||||
- el.appendChild(dragEl);
|
||||
- } else {
|
||||
- target.parentNode.insertBefore(dragEl, after ? nextSibling : target);
|
||||
+ try {
|
||||
+ if (after && !nextSibling) {
|
||||
+ el.appendChild(dragEl);
|
||||
+ } else {
|
||||
+ target.parentNode.insertBefore(dragEl, after ? nextSibling : target);
|
||||
+ }
|
||||
+ }
|
||||
+ catch(err) {
|
||||
+ return completed(false);
|
||||
}
|
||||
|
||||
// Undo chrome's scroll adjustment (has no effect on other browsers)
|
@ -1,55 +0,0 @@
|
||||
diff --git a/build/inject-manifest.js b/build/inject-manifest.js
|
||||
index 60e3d2bb51c11a19fbbedbad65e101082ec41c36..fed6026630f43f86e25446383982cf6fb694313b 100644
|
||||
--- a/build/inject-manifest.js
|
||||
+++ b/build/inject-manifest.js
|
||||
@@ -104,7 +104,7 @@ async function injectManifest(config) {
|
||||
replaceString: manifestString,
|
||||
searchString: options.injectionPoint,
|
||||
});
|
||||
- filesToWrite[options.swDest] = source;
|
||||
+ filesToWrite[options.swDest] = source.replace(url, encodeURI(upath_1.default.basename(destPath)));
|
||||
filesToWrite[destPath] = map;
|
||||
}
|
||||
else {
|
||||
diff --git a/build/lib/translate-url-to-sourcemap-paths.js b/build/lib/translate-url-to-sourcemap-paths.js
|
||||
index 3220c5474eeac6e8a56ca9b2ac2bd9be48529e43..5f003879a904d4840529a42dd056d288fd213771 100644
|
||||
--- a/build/lib/translate-url-to-sourcemap-paths.js
|
||||
+++ b/build/lib/translate-url-to-sourcemap-paths.js
|
||||
@@ -22,7 +22,7 @@ function translateURLToSourcemapPaths(url, swSrc, swDest) {
|
||||
const possibleSrcPath = upath_1.default.resolve(upath_1.default.dirname(swSrc), url);
|
||||
if (fs_extra_1.default.existsSync(possibleSrcPath)) {
|
||||
srcPath = possibleSrcPath;
|
||||
- destPath = upath_1.default.resolve(upath_1.default.dirname(swDest), url);
|
||||
+ destPath = `${swDest}.map`;
|
||||
}
|
||||
else {
|
||||
warning = `${errors_1.errors['cant-find-sourcemap']} ${possibleSrcPath}`;
|
||||
diff --git a/src/inject-manifest.ts b/src/inject-manifest.ts
|
||||
index 8795ddcaa77aea7b0356417e4bc4b19e2b3f860c..fcdc68342d9ac53936c9ed40a9ccfc2f5070cad3 100644
|
||||
--- a/src/inject-manifest.ts
|
||||
+++ b/src/inject-manifest.ts
|
||||
@@ -129,7 +129,10 @@ export async function injectManifest(
|
||||
searchString: options.injectionPoint!,
|
||||
});
|
||||
|
||||
- filesToWrite[options.swDest] = source;
|
||||
+ filesToWrite[options.swDest] = source.replace(
|
||||
+ url!,
|
||||
+ encodeURI(upath.basename(destPath)),
|
||||
+ );
|
||||
filesToWrite[destPath] = map;
|
||||
} else {
|
||||
// If there's no sourcemap associated with swSrc, a simple string
|
||||
diff --git a/src/lib/translate-url-to-sourcemap-paths.ts b/src/lib/translate-url-to-sourcemap-paths.ts
|
||||
index 072eac40d4ef5d095a01cb7f7e392a9e034853bd..f0bbe69e88ef3a415de18a7e9cb264daea273d71 100644
|
||||
--- a/src/lib/translate-url-to-sourcemap-paths.ts
|
||||
+++ b/src/lib/translate-url-to-sourcemap-paths.ts
|
||||
@@ -28,7 +28,7 @@ export function translateURLToSourcemapPaths(
|
||||
const possibleSrcPath = upath.resolve(upath.dirname(swSrc), url);
|
||||
if (fse.existsSync(possibleSrcPath)) {
|
||||
srcPath = possibleSrcPath;
|
||||
- destPath = upath.resolve(upath.dirname(swDest), url);
|
||||
+ destPath = `${swDest}.map`;
|
||||
} else {
|
||||
warning = `${errors['cant-find-sourcemap']} ${possibleSrcPath}`;
|
||||
}
|
942
.yarn/releases/yarn-4.9.2.cjs
vendored
942
.yarn/releases/yarn-4.9.2.cjs
vendored
File diff suppressed because one or more lines are too long
@ -1,9 +0,0 @@
|
||||
compressionLevel: mixed
|
||||
|
||||
defaultSemverRangePrefix: ""
|
||||
|
||||
enableGlobalCache: false
|
||||
|
||||
nodeLinker: node-modules
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.9.2.cjs
|
@ -1,8 +0,0 @@
|
||||
# People marked here will be automatically requested for a review
|
||||
# when the code that they own is touched.
|
||||
# https://github.com/blog/2392-introducing-code-owners
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
|
||||
|
||||
# Part of the frontend that mobile developper should review
|
||||
src/external_app/ @bgoncal @TimoPtr
|
||||
test/external_app/ @bgoncal @TimoPtr
|
@ -2,139 +2,79 @@
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
nationality, personal appearance, race, religion, or sexual identity and
|
||||
orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
## Our Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
[safety@home-assistant.io][email] or by using the report/flag feature of
|
||||
the medium used. All complaints will be reviewed and investigated promptly and
|
||||
fairly.
|
||||
reported by contacting the project team at [safety@home-assistant.io][email]. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available [here][version].
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder][mozilla].
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available [here][version].
|
||||
|
||||
## Adoption
|
||||
|
||||
This Code of Conduct was first adopted January 21st, 2017 and announced in
|
||||
[this][coc-blog] blog post and has been updated on May 25th, 2020 to version
|
||||
2.0 of the [Contributor Covenant][homepage] as announced in [this][coc2-blog]
|
||||
blog post.
|
||||
This Code of Conduct was first adopted January 21st, 2017 and announced in [this][coc-blog] blog post.
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
<https://www.contributor-covenant.org/faq>. Translations are available at
|
||||
<https://www.contributor-covenant.org/translations>.
|
||||
|
||||
[coc-blog]: /blog/2017/01/21/home-assistant-governance/
|
||||
[coc2-blog]: /blog/2020/05/25/code-of-conduct-updated/
|
||||
[email]: mailto:safety@home-assistant.io
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[mozilla]: https://github.com/mozilla/diversity
|
||||
[version]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
||||
[email]: mailto:safety@home-assistant.io
|
||||
[coc-blog]: https://home-assistant.io/blog/2017/01/21/home-assistant-governance/
|
||||
|
31
Dockerfile
Normal file
31
Dockerfile
Normal file
@ -0,0 +1,31 @@
|
||||
FROM node:8.11.1-alpine
|
||||
|
||||
# install yarn
|
||||
ENV PATH /root/.yarn/bin:$PATH
|
||||
|
||||
## Install/force base tools
|
||||
RUN apk update \
|
||||
&& apk add make g++ curl bash binutils tar git python2 python3 \
|
||||
&& rm -rf /var/cache/apk/* \
|
||||
&& /bin/bash \
|
||||
&& touch ~/.bashrc
|
||||
|
||||
## Install yarn
|
||||
RUN curl -o- -L https://yarnpkg.com/install.sh | bash
|
||||
|
||||
## Setup the project
|
||||
RUN mkdir -p /frontend
|
||||
|
||||
WORKDIR /frontend
|
||||
|
||||
COPY package.json yarn.lock ./
|
||||
|
||||
RUN yarn install --frozen-lockfile
|
||||
|
||||
COPY . .
|
||||
|
||||
COPY script/docker_entrypoint.sh /usr/bin/docker_entrypoint.sh
|
||||
|
||||
RUN chmod +x /usr/bin/docker_entrypoint.sh
|
||||
|
||||
CMD [ "docker_entrypoint.sh" ]
|
@ -1,4 +1,5 @@
|
||||
include README.md
|
||||
include LICENSE.md
|
||||
graft hass_frontend
|
||||
graft hass_frontend_es5
|
||||
recursive-exclude * *.py[co]
|
||||
|
21
README.md
21
README.md
@ -2,19 +2,19 @@
|
||||
|
||||
This is the repository for the official [Home Assistant](https://home-assistant.io) frontend.
|
||||
|
||||
[](https://demo.home-assistant.io/)
|
||||
[](https://demo.home-assistant.io/)
|
||||
|
||||
- [View demo of Home Assistant](https://demo.home-assistant.io/)
|
||||
- [More information about Home Assistant](https://home-assistant.io)
|
||||
- [Frontend development instructions](https://developers.home-assistant.io/docs/frontend/development/)
|
||||
- [Frontend development instructions](https://developers.home-assistant.io/docs/en/frontend_index.html)
|
||||
|
||||
## Development
|
||||
|
||||
- Initial setup: `script/setup`
|
||||
- Development: [Instructions](https://developers.home-assistant.io/docs/frontend/development/)
|
||||
- Development: [Instructions](https://developers.home-assistant.io/docs/en/frontend_development.html)
|
||||
- Production build: `script/build_frontend`
|
||||
- Gallery: `cd gallery && script/develop_gallery`
|
||||
- Supervisor: [Instructions](https://developers.home-assistant.io/docs/supervisor/developing)
|
||||
- Hass.io: [Instructions](https://developers.home-assistant.io/docs/en/hassio_hass.html)
|
||||
|
||||
## Frontend development
|
||||
|
||||
@ -22,10 +22,17 @@ This is the repository for the official [Home Assistant](https://home-assistant.
|
||||
|
||||
A complete guide can be found at the following [link](https://www.home-assistant.io/developers/frontend/). It describes a short guide for the build of project.
|
||||
|
||||
### Docker environment
|
||||
|
||||
It is possible to compile the project and/or run commands in the development environment having only the [Docker](https://www.docker.com) pre-installed in the system. On the root of project you can do:
|
||||
|
||||
- `sh ./script/docker_run.sh build` Build all the project with one command
|
||||
- `sh ./script/docker_run.sh bash` Open an interactive shell (the same environment generated by the _classic environment_) where you can run commands. This bash work on your project directory and any change on your file is automatically present within your build bash.
|
||||
|
||||
**Note**: if you have installed `npm` in addition to the `docker`, you can use the commands `npm run docker_build` and `npm run bash` to get a full build or bash as explained above
|
||||
|
||||
## License
|
||||
|
||||
Home Assistant is open-source and Apache 2 licensed. Feel free to browse the repository, learn and reuse parts in your own projects.
|
||||
|
||||
We use [BrowserStack](https://www.browserstack.com) to test Home Assistant on a large variety of devices.
|
||||
|
||||
[](https://www.openhomefoundation.org/)
|
||||
We use [BrowserStack](https://www.browserstack.com) to test Home Assistant on a large variation of devices.
|
||||
|
30
azure-pipelines-netlify.yml
Normal file
30
azure-pipelines-netlify.yml
Normal file
@ -0,0 +1,30 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
- cron: "0 0 * * *"
|
||||
displayName: "build preview"
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
always: true
|
||||
variables:
|
||||
- group: netlify
|
||||
|
||||
jobs:
|
||||
|
||||
- job: 'Netlify_preview'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: |
|
||||
# Cast
|
||||
curl -X POST -d {} https://api.netlify.com/build_hooks/${NETLIFY_CAST}
|
||||
|
||||
# Demo
|
||||
curl -X POST -d {} https://api.netlify.com/build_hooks/${NETLIFY_DEMO}
|
||||
|
||||
# Gallery
|
||||
curl -X POST -d {} https://api.netlify.com/build_hooks/${NETLIFY_GALLERY}
|
||||
displayName: 'Trigger netlify build preview'
|
59
azure-pipelines-release.yml
Normal file
59
azure-pipelines-release.yml
Normal file
@ -0,0 +1,59 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
tags:
|
||||
include:
|
||||
- "*"
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.10.1-3.7-alpine3.11'
|
||||
- name: versionNode
|
||||
value: '12.1'
|
||||
- group: twine
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: 'home-assistant/ci-azure'
|
||||
endpoint: 'home-assistant'
|
||||
|
||||
|
||||
stages:
|
||||
- stage: "Validate"
|
||||
jobs:
|
||||
- template: templates/azp-job-version.yaml@azure
|
||||
|
||||
- stage: "Build"
|
||||
jobs:
|
||||
- job: "ReleasePython"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- task: NodeTool@0
|
||||
displayName: "Use Node $(versionNode)"
|
||||
inputs:
|
||||
versionSpec: "$(versionNode)"
|
||||
- script: pip install twine wheel
|
||||
displayName: "Install tools"
|
||||
- script: |
|
||||
export TWINE_USERNAME="$(twineUser)"
|
||||
export TWINE_PASSWORD="$(twinePassword)"
|
||||
|
||||
script/release
|
||||
displayName: "Build and release package"
|
||||
- stage: "Wheels"
|
||||
jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: '$(versionWheels)'
|
||||
wheelsRequirement: 'requirement.txt'
|
||||
preBuild:
|
||||
- script: |
|
||||
sleep 240
|
||||
echo "home-assistant-frontend==$(Build.SourceBranchName)" > requirement.txt
|
70
azure-pipelines-translation.yml
Normal file
70
azure-pipelines-translation.yml
Normal file
@ -0,0 +1,70 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
paths:
|
||||
include:
|
||||
- translations/en.json
|
||||
pr: none
|
||||
schedules:
|
||||
- cron: "30 0 * * *"
|
||||
displayName: "frontend translation update"
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
always: true
|
||||
variables:
|
||||
- group: translation
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: 'home-assistant/ci-azure'
|
||||
endpoint: 'home-assistant'
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
- job: 'Upload'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
displayName: 'Use Node 12.x'
|
||||
inputs:
|
||||
versionSpec: '12.x'
|
||||
- script: |
|
||||
export LOKALISE_TOKEN="$(lokaliseToken)"
|
||||
export AZURE_BRANCH="$(Build.SourceBranchName)"
|
||||
|
||||
./script/translations_upload_base
|
||||
displayName: 'Upload Translation'
|
||||
|
||||
- job: 'Download'
|
||||
dependsOn:
|
||||
- 'Upload'
|
||||
condition: or(eq(variables['Build.Reason'], 'Schedule'), eq(variables['Build.Reason'], 'Manual'))
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
displayName: 'Use Node 12.x'
|
||||
inputs:
|
||||
versionSpec: '12.x'
|
||||
- template: templates/azp-step-git-init.yaml@azure
|
||||
- script: |
|
||||
export LOKALISE_TOKEN="$(lokaliseToken)"
|
||||
export AZURE_BRANCH="$(Build.SourceBranchName)"
|
||||
|
||||
npm install
|
||||
./script/translations_download
|
||||
displayName: 'Download Translation'
|
||||
- script: |
|
||||
git checkout dev
|
||||
git add translation
|
||||
git commit -am "[ci skip] Translation update"
|
||||
git push
|
||||
displayName: 'Update translation'
|
7
build-scripts/.eslintrc
Normal file
7
build-scripts/.eslintrc
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"rules": {
|
||||
"import/no-extraneous-dependencies": 0,
|
||||
"no-restricted-syntax": 0,
|
||||
"no-console": 0
|
||||
}
|
||||
}
|
7
build-scripts/.eslintrc.json
Normal file
7
build-scripts/.eslintrc.json
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"extends": "../.eslintrc.json",
|
||||
"rules": {
|
||||
"import/no-extraneous-dependencies": 0,
|
||||
"global-require": 0
|
||||
}
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
# Bundling Home Assistant Frontend
|
||||
|
||||
The Home Assistant build pipeline contains various steps to prepare a build.
|
||||
|
||||
- Generating icon files to be included
|
||||
- Generating translation files to be included
|
||||
- Converting TypeScript, CSS and JSON files to JavaScript
|
||||
- Bundling
|
||||
- Minifying the files
|
||||
- Generating the HTML entrypoint files
|
||||
- Generating the service worker
|
||||
- Compressing the files
|
||||
|
||||
## Converting files
|
||||
|
||||
Currently in Home Assistant we use a bundler to convert TypeScript, CSS and JSON files to JavaScript files that the browser understands.
|
||||
|
||||
We currently rely on Webpack. Both of these programs bundle the converted files in both production and development.
|
||||
|
||||
For development, bundling is optional. We just want to get the right files in the browser.
|
||||
|
||||
Responsibilities of the converter during development:
|
||||
|
||||
- Convert TypeScript to JavaScript
|
||||
- Convert CSS to JavaScript that sets the content as the default export
|
||||
- Convert JSON to JavaScript that sets the content as the default export
|
||||
- Make sure import, dynamic import and web worker references work
|
||||
- Add extensions where missing
|
||||
- Resolve absolute package imports
|
||||
- Filter out specific imports/packages
|
||||
- Replace constants with values
|
||||
|
||||
In production, the following responsibilities are added:
|
||||
|
||||
- Minify HTML
|
||||
- Bundle multiple imports so that the browser can fetch less files
|
||||
- Generate a second version that is ES5 compatible
|
||||
|
||||
Configuration for all these steps are specified in [bundle.js](bundle.js).
|
@ -1,150 +0,0 @@
|
||||
import defineProvider from "@babel/helper-define-polyfill-provider";
|
||||
import { join } from "node:path";
|
||||
import paths from "../paths.cjs";
|
||||
|
||||
const POLYFILL_DIR = join(paths.root_dir, "src/resources/polyfills");
|
||||
|
||||
// List of polyfill keys with supported browser targets for the functionality
|
||||
const polyfillSupport = {
|
||||
// Note states and shadowRoot properties should be supported.
|
||||
"element-internals": {
|
||||
android: 90,
|
||||
chrome: 90,
|
||||
edge: 90,
|
||||
firefox: 126,
|
||||
ios: 17.4,
|
||||
opera: 76,
|
||||
opera_mobile: 64,
|
||||
safari: 17.4,
|
||||
samsung: 15.0,
|
||||
},
|
||||
"element-getattributenames": {
|
||||
android: 61,
|
||||
chrome: 61,
|
||||
edge: 18,
|
||||
firefox: 45,
|
||||
ios: 10.3,
|
||||
opera: 48,
|
||||
opera_mobile: 45,
|
||||
safari: 10.1,
|
||||
samsung: 8.0,
|
||||
},
|
||||
"element-toggleattribute": {
|
||||
android: 69,
|
||||
chrome: 69,
|
||||
edge: 18,
|
||||
firefox: 63,
|
||||
ios: 12.0,
|
||||
opera: 56,
|
||||
opera_mobile: 48,
|
||||
safari: 12.0,
|
||||
samsung: 10.0,
|
||||
},
|
||||
// FormatJS polyfill detects fix for https://bugs.chromium.org/p/v8/issues/detail?id=10682,
|
||||
// so adjusted to several months after that was marked fixed
|
||||
"intl-getcanonicallocales": {
|
||||
android: 90,
|
||||
chrome: 90,
|
||||
edge: 90,
|
||||
firefox: 48,
|
||||
ios: 10.3,
|
||||
opera: 76,
|
||||
opera_mobile: 64,
|
||||
safari: 10.1,
|
||||
samsung: 15.0,
|
||||
},
|
||||
"intl-locale": {
|
||||
android: 74,
|
||||
chrome: 74,
|
||||
edge: 79,
|
||||
firefox: 75,
|
||||
ios: 14.0,
|
||||
opera: 62,
|
||||
opera_mobile: 53,
|
||||
safari: 14.0,
|
||||
samsung: 11.0,
|
||||
},
|
||||
"intl-other": {
|
||||
// Not specified (i.e. always try polyfill) since compatibility depends on supported locales
|
||||
},
|
||||
"resize-observer": {
|
||||
android: 64,
|
||||
chrome: 64,
|
||||
edge: 79,
|
||||
firefox: 69,
|
||||
ios: 13.4,
|
||||
opera: 51,
|
||||
opera_mobile: 47,
|
||||
safari: 13.1,
|
||||
samsung: 9.0,
|
||||
},
|
||||
};
|
||||
|
||||
// Map of global variables and/or instance and static properties to the
|
||||
// corresponding polyfill key and actual module to import
|
||||
const polyfillMap = {
|
||||
global: {
|
||||
ResizeObserver: {
|
||||
key: "resize-observer",
|
||||
module: join(POLYFILL_DIR, "resize-observer.ts"),
|
||||
},
|
||||
},
|
||||
instance: {
|
||||
attachInternals: {
|
||||
key: "element-internals",
|
||||
module: "element-internals-polyfill",
|
||||
},
|
||||
...Object.fromEntries(
|
||||
["getAttributeNames", "toggleAttribute"].map((prop) => {
|
||||
const key = `element-${prop.toLowerCase()}`;
|
||||
return [prop, { key, module: join(POLYFILL_DIR, `${key}.ts`) }];
|
||||
})
|
||||
),
|
||||
},
|
||||
static: {
|
||||
Intl: {
|
||||
getCanonicalLocales: {
|
||||
key: "intl-getcanonicallocales",
|
||||
module: join(POLYFILL_DIR, "intl-polyfill.ts"),
|
||||
},
|
||||
Locale: {
|
||||
key: "intl-locale",
|
||||
module: join(POLYFILL_DIR, "intl-polyfill.ts"),
|
||||
},
|
||||
...Object.fromEntries(
|
||||
[
|
||||
"DateTimeFormat",
|
||||
"DurationFormat",
|
||||
"DisplayNames",
|
||||
"ListFormat",
|
||||
"NumberFormat",
|
||||
"PluralRules",
|
||||
"RelativeTimeFormat",
|
||||
].map((obj) => [
|
||||
obj,
|
||||
{ key: "intl-other", module: join(POLYFILL_DIR, "intl-polyfill.ts") },
|
||||
])
|
||||
),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Create plugin using the same factory as for CoreJS
|
||||
export default defineProvider(
|
||||
({ createMetaResolver, debug, shouldInjectPolyfill }) => {
|
||||
const resolvePolyfill = createMetaResolver(polyfillMap);
|
||||
return {
|
||||
name: "custom-polyfill",
|
||||
polyfills: polyfillSupport,
|
||||
usageGlobal(meta, utils) {
|
||||
const polyfill = resolvePolyfill(meta);
|
||||
if (polyfill && shouldInjectPolyfill(polyfill.desc.key)) {
|
||||
debug(polyfill.desc.key);
|
||||
utils.injectGlobalImport(polyfill.desc.module);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
};
|
||||
}
|
||||
);
|
@ -1,168 +0,0 @@
|
||||
const path = require("path");
|
||||
|
||||
// Currently only supports CommonJS modules, as require is synchronous. `import` would need babel running asynchronous.
|
||||
module.exports = function inlineConstants(babel, options, cwd) {
|
||||
const t = babel.types;
|
||||
|
||||
if (!Array.isArray(options.modules)) {
|
||||
throw new TypeError(
|
||||
"babel-plugin-inline-constants: expected a `modules` array to be passed"
|
||||
);
|
||||
}
|
||||
|
||||
if (options.resolveExtensions && !Array.isArray(options.resolveExtensions)) {
|
||||
throw new TypeError(
|
||||
"babel-plugin-inline-constants: expected `resolveExtensions` to be an array"
|
||||
);
|
||||
}
|
||||
|
||||
const ignoreModuleNotFound = options.ignoreModuleNotFound;
|
||||
const resolveExtensions = options.resolveExtensions;
|
||||
|
||||
const hasRelativeModules = options.modules.some(
|
||||
(module) => module.startsWith(".") || module.startsWith("/")
|
||||
);
|
||||
|
||||
const modules = Object.fromEntries(
|
||||
options.modules.map((module) => {
|
||||
const absolute = module.startsWith(".")
|
||||
? require.resolve(module, { paths: [cwd] })
|
||||
: module;
|
||||
return [absolute, require(absolute)];
|
||||
})
|
||||
);
|
||||
|
||||
const toLiteral = (value) => {
|
||||
if (typeof value === "string") {
|
||||
return t.stringLiteral(value);
|
||||
}
|
||||
|
||||
if (typeof value === "number") {
|
||||
return t.numericLiteral(value);
|
||||
}
|
||||
|
||||
if (typeof value === "boolean") {
|
||||
return t.booleanLiteral(value);
|
||||
}
|
||||
|
||||
if (value === null) {
|
||||
return t.nullLiteral();
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
"babel-plugin-inline-constants: cannot handle non-literal `" + value + "`"
|
||||
);
|
||||
};
|
||||
|
||||
const resolveAbsolute = (value, state, resolveExtensionIndex) => {
|
||||
if (!state.filename) {
|
||||
throw new TypeError(
|
||||
"babel-plugin-inline-constants: expected a `filename` to be set for files"
|
||||
);
|
||||
}
|
||||
|
||||
if (resolveExtensions && resolveExtensionIndex !== undefined) {
|
||||
value += resolveExtensions[resolveExtensionIndex];
|
||||
}
|
||||
|
||||
try {
|
||||
return require.resolve(value, { paths: [path.dirname(state.filename)] });
|
||||
} catch (error) {
|
||||
if (
|
||||
error.code === "MODULE_NOT_FOUND" &&
|
||||
resolveExtensions &&
|
||||
(resolveExtensionIndex === undefined ||
|
||||
resolveExtensionIndex < resolveExtensions.length - 1)
|
||||
) {
|
||||
const resolveExtensionIdx = (resolveExtensionIndex || -1) + 1;
|
||||
return resolveAbsolute(value, state, resolveExtensionIdx);
|
||||
}
|
||||
|
||||
if (error.code === "MODULE_NOT_FOUND" && ignoreModuleNotFound) {
|
||||
return undefined;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const importDeclaration = (p, state) => {
|
||||
if (p.node.type !== "ImportDeclaration") {
|
||||
return;
|
||||
}
|
||||
const absolute =
|
||||
hasRelativeModules && p.node.source.value.startsWith(".")
|
||||
? resolveAbsolute(p.node.source.value, state)
|
||||
: p.node.source.value;
|
||||
|
||||
if (!absolute || !(absolute in modules)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const module = modules[absolute];
|
||||
|
||||
for (const specifier of p.node.specifiers) {
|
||||
if (
|
||||
specifier.type === "ImportDefaultSpecifier" &&
|
||||
specifier.local &&
|
||||
specifier.local.type === "Identifier"
|
||||
) {
|
||||
if (!("default" in module)) {
|
||||
throw new Error(
|
||||
"babel-plugin-inline-constants: cannot access default export from `" +
|
||||
p.node.source.value +
|
||||
"`"
|
||||
);
|
||||
}
|
||||
|
||||
const variableValue = toLiteral(module.default);
|
||||
const variable = t.variableDeclarator(
|
||||
t.identifier(specifier.local.name),
|
||||
variableValue
|
||||
);
|
||||
|
||||
p.insertBefore({
|
||||
type: "VariableDeclaration",
|
||||
kind: "const",
|
||||
declarations: [variable],
|
||||
});
|
||||
} else if (
|
||||
specifier.type === "ImportSpecifier" &&
|
||||
specifier.imported &&
|
||||
specifier.imported.type === "Identifier" &&
|
||||
specifier.local &&
|
||||
specifier.local.type === "Identifier"
|
||||
) {
|
||||
if (!(specifier.imported.name in module)) {
|
||||
throw new Error(
|
||||
"babel-plugin-inline-constants: cannot access `" +
|
||||
specifier.imported.name +
|
||||
"` from `" +
|
||||
p.node.source.value +
|
||||
"`"
|
||||
);
|
||||
}
|
||||
|
||||
const variableValue = toLiteral(module[specifier.imported.name]);
|
||||
const variable = t.variableDeclarator(
|
||||
t.identifier(specifier.local.name),
|
||||
variableValue
|
||||
);
|
||||
|
||||
p.insertBefore({
|
||||
type: "VariableDeclaration",
|
||||
kind: "const",
|
||||
declarations: [variable],
|
||||
});
|
||||
} else {
|
||||
throw new Error("Cannot handle specifier `" + specifier.type + "`");
|
||||
}
|
||||
}
|
||||
p.remove();
|
||||
};
|
||||
|
||||
return {
|
||||
visitor: {
|
||||
ImportDeclaration: importDeclaration,
|
||||
},
|
||||
};
|
||||
};
|
50
build-scripts/babel.js
Normal file
50
build-scripts/babel.js
Normal file
@ -0,0 +1,50 @@
|
||||
module.exports.babelLoaderConfig = ({ latestBuild }) => {
|
||||
if (latestBuild === undefined) {
|
||||
throw Error("latestBuild not defined for babel loader config");
|
||||
}
|
||||
return {
|
||||
test: /\.m?js$|\.tsx?$/,
|
||||
use: {
|
||||
loader: "babel-loader",
|
||||
options: {
|
||||
presets: [
|
||||
!latestBuild && [
|
||||
require("@babel/preset-env").default,
|
||||
{ modules: false },
|
||||
],
|
||||
[
|
||||
require("@babel/preset-typescript").default,
|
||||
{
|
||||
jsxPragma: "h",
|
||||
},
|
||||
],
|
||||
].filter(Boolean),
|
||||
plugins: [
|
||||
// Part of ES2018. Converts {...a, b: 2} to Object.assign({}, a, {b: 2})
|
||||
[
|
||||
"@babel/plugin-proposal-object-rest-spread",
|
||||
{ loose: true, useBuiltIns: true },
|
||||
],
|
||||
// Only support the syntax, Webpack will handle it.
|
||||
"@babel/syntax-dynamic-import",
|
||||
[
|
||||
"@babel/transform-react-jsx",
|
||||
{
|
||||
pragma: "h",
|
||||
},
|
||||
],
|
||||
"@babel/plugin-proposal-optional-chaining",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator",
|
||||
[
|
||||
require("@babel/plugin-proposal-decorators").default,
|
||||
{ decoratorsBeforeExport: true },
|
||||
],
|
||||
[
|
||||
require("@babel/plugin-proposal-class-properties").default,
|
||||
{ loose: true },
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
@ -1,343 +0,0 @@
|
||||
const path = require("path");
|
||||
const env = require("./env.cjs");
|
||||
const paths = require("./paths.cjs");
|
||||
const { dependencies } = require("../package.json");
|
||||
|
||||
const BABEL_PLUGINS = path.join(__dirname, "babel-plugins");
|
||||
|
||||
// GitHub base URL to use for production source maps
|
||||
// Nightly builds use the commit SHA, otherwise assumes there is a tag that matches the version
|
||||
module.exports.sourceMapURL = () => {
|
||||
const ref = env.version().endsWith("dev")
|
||||
? process.env.GITHUB_SHA || "dev"
|
||||
: env.version();
|
||||
return `https://raw.githubusercontent.com/home-assistant/frontend/${ref}/`;
|
||||
};
|
||||
|
||||
// Files from NPM Packages that should not be imported
|
||||
module.exports.ignorePackages = () => [];
|
||||
|
||||
// Files from NPM packages that we should replace with empty file
|
||||
module.exports.emptyPackages = ({ isHassioBuild }) =>
|
||||
[
|
||||
require.resolve("@vaadin/vaadin-material-styles/typography.js"),
|
||||
require.resolve("@vaadin/vaadin-material-styles/font-icons.js"),
|
||||
// Icons in supervisor conflict with icons in HA so we don't load.
|
||||
isHassioBuild &&
|
||||
require.resolve(
|
||||
path.resolve(paths.root_dir, "src/components/ha-icon.ts")
|
||||
),
|
||||
isHassioBuild &&
|
||||
require.resolve(
|
||||
path.resolve(paths.root_dir, "src/components/ha-icon-picker.ts")
|
||||
),
|
||||
].filter(Boolean);
|
||||
|
||||
module.exports.definedVars = ({ isProdBuild, latestBuild, defineOverlay }) => ({
|
||||
__DEV__: !isProdBuild,
|
||||
__BUILD__: JSON.stringify(latestBuild ? "modern" : "legacy"),
|
||||
__VERSION__: JSON.stringify(env.version()),
|
||||
__DEMO__: false,
|
||||
__SUPERVISOR__: false,
|
||||
__BACKWARDS_COMPAT__: false,
|
||||
__STATIC_PATH__: "/static/",
|
||||
__HASS_URL__: `\`${
|
||||
"HASS_URL" in process.env
|
||||
? process.env.HASS_URL
|
||||
: // eslint-disable-next-line no-template-curly-in-string
|
||||
"${location.protocol}//${location.host}"
|
||||
}\``,
|
||||
"process.env.NODE_ENV": JSON.stringify(
|
||||
isProdBuild ? "production" : "development"
|
||||
),
|
||||
...defineOverlay,
|
||||
});
|
||||
|
||||
module.exports.htmlMinifierOptions = {
|
||||
caseSensitive: true,
|
||||
collapseWhitespace: true,
|
||||
conservativeCollapse: true,
|
||||
decodeEntities: true,
|
||||
removeComments: true,
|
||||
removeRedundantAttributes: true,
|
||||
minifyCSS: {
|
||||
compatibility: "*,-properties.zeroUnits",
|
||||
},
|
||||
};
|
||||
|
||||
module.exports.terserOptions = ({ latestBuild, isTestBuild }) => ({
|
||||
safari10: !latestBuild,
|
||||
ecma: latestBuild ? 2015 : 5,
|
||||
module: latestBuild,
|
||||
format: { comments: false },
|
||||
sourceMap: !isTestBuild,
|
||||
});
|
||||
|
||||
/** @type {import('@rspack/core').SwcLoaderOptions} */
|
||||
module.exports.swcOptions = () => ({
|
||||
jsc: {
|
||||
loose: true,
|
||||
externalHelpers: true,
|
||||
target: "ES2021",
|
||||
parser: {
|
||||
syntax: "typescript",
|
||||
decorators: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
module.exports.babelOptions = ({
|
||||
latestBuild,
|
||||
isProdBuild,
|
||||
isTestBuild,
|
||||
sw,
|
||||
}) => ({
|
||||
babelrc: false,
|
||||
compact: false,
|
||||
assumptions: {
|
||||
privateFieldsAsProperties: true,
|
||||
setPublicClassFields: true,
|
||||
setSpreadProperties: true,
|
||||
},
|
||||
browserslistEnv: latestBuild ? "modern" : `legacy${sw ? "-sw" : ""}`,
|
||||
presets: [
|
||||
[
|
||||
"@babel/preset-env",
|
||||
{
|
||||
useBuiltIns: "usage",
|
||||
corejs: dependencies["core-js"],
|
||||
bugfixes: true,
|
||||
shippedProposals: true,
|
||||
},
|
||||
],
|
||||
],
|
||||
plugins: [
|
||||
[
|
||||
path.join(BABEL_PLUGINS, "inline-constants-plugin.cjs"),
|
||||
{
|
||||
modules: ["@mdi/js"],
|
||||
ignoreModuleNotFound: true,
|
||||
},
|
||||
],
|
||||
// Minify template literals for production
|
||||
isProdBuild && [
|
||||
"template-html-minifier",
|
||||
{
|
||||
modules: {
|
||||
...Object.fromEntries(
|
||||
["lit", "lit-element", "lit-html"].map((m) => [
|
||||
m,
|
||||
[
|
||||
"html",
|
||||
{ name: "svg", encapsulation: "svg" },
|
||||
{ name: "css", encapsulation: "style" },
|
||||
],
|
||||
])
|
||||
),
|
||||
"@polymer/polymer/lib/utils/html-tag.js": ["html"],
|
||||
},
|
||||
strictCSS: true,
|
||||
htmlMinifier: module.exports.htmlMinifierOptions,
|
||||
failOnError: false, // we can turn this off in case of false positives
|
||||
},
|
||||
],
|
||||
// Import helpers and regenerator from runtime package
|
||||
[
|
||||
"@babel/plugin-transform-runtime",
|
||||
{ version: dependencies["@babel/runtime"] },
|
||||
],
|
||||
"@babel/plugin-transform-class-properties",
|
||||
"@babel/plugin-transform-private-methods",
|
||||
].filter(Boolean),
|
||||
exclude: [
|
||||
// \\ for Windows, / for Mac OS and Linux
|
||||
/node_modules[\\/]core-js/,
|
||||
],
|
||||
sourceMaps: !isTestBuild,
|
||||
overrides: [
|
||||
{
|
||||
// Add plugin to inject various polyfills, excluding the polyfills
|
||||
// themselves to prevent self-injection.
|
||||
plugins: [
|
||||
[
|
||||
path.join(BABEL_PLUGINS, "custom-polyfill-plugin.js"),
|
||||
{ method: "usage-global" },
|
||||
],
|
||||
],
|
||||
exclude: [
|
||||
path.join(paths.root_dir, "src/resources/polyfills"),
|
||||
...[
|
||||
"@formatjs/(?:ecma402-abstract|intl-\\w+)",
|
||||
"@lit-labs/virtualizer/polyfills",
|
||||
"@webcomponents/scoped-custom-element-registry",
|
||||
"element-internals-polyfill",
|
||||
"proxy-polyfill",
|
||||
"unfetch",
|
||||
].map((p) => new RegExp(`/node_modules/${p}/`)),
|
||||
],
|
||||
},
|
||||
{
|
||||
// Use unambiguous for dependencies so that require() is correctly injected into CommonJS files
|
||||
// Exclusions are needed in some cases where ES modules have no static imports or exports, such as polyfills
|
||||
sourceType: "unambiguous",
|
||||
include: /\/node_modules\//,
|
||||
exclude: [
|
||||
"element-internals-polyfill",
|
||||
"@shoelace-style",
|
||||
"@?lit(?:-labs|-element|-html)?",
|
||||
].map((p) => new RegExp(`/node_modules/${p}/`)),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const nameSuffix = (latestBuild) => (latestBuild ? "-modern" : "-legacy");
|
||||
|
||||
const outputPath = (outputRoot, latestBuild) =>
|
||||
path.resolve(outputRoot, latestBuild ? "frontend_latest" : "frontend_es5");
|
||||
|
||||
const publicPath = (latestBuild, root = "") =>
|
||||
latestBuild ? `${root}/frontend_latest/` : `${root}/frontend_es5/`;
|
||||
|
||||
/*
|
||||
BundleConfig {
|
||||
// Object with entrypoints that need to be bundled
|
||||
entry: { [name: string]: pathToFile },
|
||||
// Folder where bundled files need to be written
|
||||
outputPath: string,
|
||||
// absolute url-path where bundled files can be found
|
||||
publicPath: string,
|
||||
// extra definitions that we need to replace in source
|
||||
defineOverlay: {[name: string]: value },
|
||||
// if this is a production build
|
||||
isProdBuild: boolean,
|
||||
// If we're targeting latest browsers
|
||||
latestBuild: boolean,
|
||||
// If we're doing a stats build (create nice chunk names)
|
||||
isStatsBuild: boolean,
|
||||
// If it's just a test build in CI, skip time on source map generation
|
||||
isTestBuild: boolean,
|
||||
// Names of entrypoints that should not be hashed
|
||||
dontHash: Set<string>
|
||||
}
|
||||
*/
|
||||
|
||||
module.exports.config = {
|
||||
app({ isProdBuild, latestBuild, isStatsBuild, isTestBuild, isWDS }) {
|
||||
return {
|
||||
name: "frontend" + nameSuffix(latestBuild),
|
||||
entry: {
|
||||
"service-worker": !latestBuild
|
||||
? {
|
||||
import: "./src/entrypoints/service-worker.ts",
|
||||
layer: "sw",
|
||||
}
|
||||
: "./src/entrypoints/service-worker.ts",
|
||||
app: "./src/entrypoints/app.ts",
|
||||
authorize: "./src/entrypoints/authorize.ts",
|
||||
onboarding: "./src/entrypoints/onboarding.ts",
|
||||
core: "./src/entrypoints/core.ts",
|
||||
"custom-panel": "./src/entrypoints/custom-panel.ts",
|
||||
},
|
||||
outputPath: outputPath(paths.app_output_root, latestBuild),
|
||||
publicPath: publicPath(latestBuild),
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
isTestBuild,
|
||||
isWDS,
|
||||
};
|
||||
},
|
||||
|
||||
demo({ isProdBuild, latestBuild, isStatsBuild }) {
|
||||
return {
|
||||
name: "demo" + nameSuffix(latestBuild),
|
||||
entry: {
|
||||
main: path.resolve(paths.demo_dir, "src/entrypoint.ts"),
|
||||
},
|
||||
outputPath: outputPath(paths.demo_output_root, latestBuild),
|
||||
publicPath: publicPath(latestBuild),
|
||||
defineOverlay: {
|
||||
__VERSION__: JSON.stringify(`DEMO-${env.version()}`),
|
||||
__DEMO__: true,
|
||||
},
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
};
|
||||
},
|
||||
|
||||
cast({ isProdBuild, latestBuild }) {
|
||||
const entry = {
|
||||
launcher: path.resolve(paths.cast_dir, "src/launcher/entrypoint.ts"),
|
||||
media: path.resolve(paths.cast_dir, "src/media/entrypoint.ts"),
|
||||
};
|
||||
|
||||
if (latestBuild) {
|
||||
entry.receiver = path.resolve(
|
||||
paths.cast_dir,
|
||||
"src/receiver/entrypoint.ts"
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
name: "cast" + nameSuffix(latestBuild),
|
||||
entry,
|
||||
outputPath: outputPath(paths.cast_output_root, latestBuild),
|
||||
publicPath: publicPath(latestBuild),
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
defineOverlay: {
|
||||
__BACKWARDS_COMPAT__: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
hassio({ isProdBuild, latestBuild, isStatsBuild, isTestBuild }) {
|
||||
return {
|
||||
name: "supervisor" + nameSuffix(latestBuild),
|
||||
entry: {
|
||||
entrypoint: path.resolve(paths.hassio_dir, "src/entrypoint.ts"),
|
||||
},
|
||||
outputPath: outputPath(paths.hassio_output_root, latestBuild),
|
||||
publicPath: publicPath(latestBuild, paths.hassio_publicPath),
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
isTestBuild,
|
||||
isHassioBuild: true,
|
||||
defineOverlay: {
|
||||
__SUPERVISOR__: true,
|
||||
__STATIC_PATH__: `"${paths.hassio_publicPath}/static/"`,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
gallery({ isProdBuild, latestBuild }) {
|
||||
return {
|
||||
name: "gallery" + nameSuffix(latestBuild),
|
||||
entry: {
|
||||
entrypoint: path.resolve(paths.gallery_dir, "src/entrypoint.js"),
|
||||
},
|
||||
outputPath: outputPath(paths.gallery_output_root, latestBuild),
|
||||
publicPath: publicPath(latestBuild),
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
defineOverlay: {
|
||||
__DEMO__: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
landingPage({ isProdBuild, latestBuild }) {
|
||||
return {
|
||||
name: "landing-page" + nameSuffix(latestBuild),
|
||||
entry: {
|
||||
entrypoint: path.resolve(paths.landingPage_dir, "src/entrypoint.js"),
|
||||
},
|
||||
outputPath: outputPath(paths.landingPage_output_root, latestBuild),
|
||||
publicPath: publicPath(latestBuild),
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
};
|
||||
},
|
||||
};
|
@ -1,34 +0,0 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const paths = require("./paths.cjs");
|
||||
|
||||
const isTrue = (value) => value === "1" || value?.toLowerCase() === "true";
|
||||
|
||||
module.exports = {
|
||||
isProdBuild() {
|
||||
return (
|
||||
process.env.NODE_ENV === "production" || module.exports.isStatsBuild()
|
||||
);
|
||||
},
|
||||
isStatsBuild() {
|
||||
return isTrue(process.env.STATS);
|
||||
},
|
||||
isTestBuild() {
|
||||
return isTrue(process.env.IS_TEST);
|
||||
},
|
||||
isNetlify() {
|
||||
return isTrue(process.env.NETLIFY);
|
||||
},
|
||||
version() {
|
||||
const version = fs
|
||||
.readFileSync(path.resolve(paths.root_dir, "pyproject.toml"), "utf8")
|
||||
.match(/version\W+=\W"(\d{8}\.\d(?:\.dev)?)"/);
|
||||
if (!version) {
|
||||
throw Error("Version not found");
|
||||
}
|
||||
return version[1];
|
||||
},
|
||||
isDevContainer() {
|
||||
return isTrue(process.env.DEV_CONTAINER);
|
||||
},
|
||||
};
|
14
build-scripts/env.js
Normal file
14
build-scripts/env.js
Normal file
@ -0,0 +1,14 @@
|
||||
module.exports = {
|
||||
isProdBuild() {
|
||||
return process.env.NODE_ENV === "production";
|
||||
},
|
||||
isStatsBuild() {
|
||||
return process.env.STATS === "1";
|
||||
},
|
||||
isTravis() {
|
||||
return process.env.TRAVIS === "true";
|
||||
},
|
||||
isNetlify() {
|
||||
return process.env.NETLIFY === "true";
|
||||
},
|
||||
};
|
@ -1,16 +0,0 @@
|
||||
// @ts-check
|
||||
|
||||
import tseslint from "typescript-eslint";
|
||||
import rootConfig from "../eslint.config.mjs";
|
||||
|
||||
export default tseslint.config(...rootConfig, {
|
||||
rules: {
|
||||
"no-console": "off",
|
||||
"import/no-extraneous-dependencies": "off",
|
||||
"import/extensions": "off",
|
||||
"import/no-dynamic-require": "off",
|
||||
"global-require": "off",
|
||||
"@typescript-eslint/no-require-imports": "off",
|
||||
"prefer-arrow-callback": "off",
|
||||
},
|
||||
});
|
@ -1,14 +1,16 @@
|
||||
import gulp from "gulp";
|
||||
import env from "../env.cjs";
|
||||
import "./clean.js";
|
||||
import "./compress.js";
|
||||
import "./entry-html.js";
|
||||
import "./gather-static.js";
|
||||
import "./gen-icons-json.js";
|
||||
import "./locale-data.js";
|
||||
import "./service-worker.js";
|
||||
import "./translations.js";
|
||||
import "./rspack.js";
|
||||
// Run HA develop mode
|
||||
const gulp = require("gulp");
|
||||
|
||||
const envVars = require("../env");
|
||||
|
||||
require("./clean.js");
|
||||
require("./translations.js");
|
||||
require("./gen-icons.js");
|
||||
require("./gather-static.js");
|
||||
require("./compress.js");
|
||||
require("./webpack.js");
|
||||
require("./service-worker.js");
|
||||
require("./entry-html.js");
|
||||
|
||||
gulp.task(
|
||||
"develop-app",
|
||||
@ -18,14 +20,14 @@ gulp.task(
|
||||
},
|
||||
"clean",
|
||||
gulp.parallel(
|
||||
"gen-service-worker-app-dev",
|
||||
"gen-icons-json",
|
||||
"gen-pages-app-dev",
|
||||
"build-translations",
|
||||
"build-locale-data"
|
||||
"gen-service-worker-dev",
|
||||
gulp.parallel("gen-icons-app", "gen-icons-mdi"),
|
||||
"gen-pages-dev",
|
||||
"gen-index-app-dev",
|
||||
"build-translations"
|
||||
),
|
||||
"copy-static-app",
|
||||
"rspack-watch-app"
|
||||
"copy-static",
|
||||
"webpack-watch-app"
|
||||
)
|
||||
);
|
||||
|
||||
@ -36,22 +38,15 @@ gulp.task(
|
||||
process.env.NODE_ENV = "production";
|
||||
},
|
||||
"clean",
|
||||
gulp.parallel("gen-icons-json", "build-translations", "build-locale-data"),
|
||||
"copy-static-app",
|
||||
"rspack-prod-app",
|
||||
gulp.parallel("gen-pages-app-prod", "gen-service-worker-app-prod"),
|
||||
// Don't compress running tests
|
||||
...(env.isTestBuild() || env.isStatsBuild() ? [] : ["compress-app"])
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"analyze-app",
|
||||
gulp.series(
|
||||
async function setEnv() {
|
||||
process.env.STATS = "1";
|
||||
},
|
||||
"clean",
|
||||
"rspack-prod-app"
|
||||
gulp.parallel("gen-icons-app", "gen-icons-mdi", "build-translations"),
|
||||
"copy-static",
|
||||
"webpack-prod-app",
|
||||
...// Don't compress running tests
|
||||
(envVars.isTravis() ? [] : ["compress-app"]),
|
||||
gulp.parallel(
|
||||
"gen-pages-prod",
|
||||
"gen-index-app-prod",
|
||||
"gen-service-worker-prod"
|
||||
)
|
||||
)
|
||||
);
|
||||
|
@ -1,10 +1,12 @@
|
||||
import gulp from "gulp";
|
||||
import "./clean.js";
|
||||
import "./entry-html.js";
|
||||
import "./gather-static.js";
|
||||
import "./service-worker.js";
|
||||
import "./translations.js";
|
||||
import "./rspack.js";
|
||||
const gulp = require("gulp");
|
||||
|
||||
require("./clean.js");
|
||||
require("./translations.js");
|
||||
require("./gen-icons.js");
|
||||
require("./gather-static.js");
|
||||
require("./webpack.js");
|
||||
require("./service-worker.js");
|
||||
require("./entry-html.js");
|
||||
|
||||
gulp.task(
|
||||
"develop-cast",
|
||||
@ -13,11 +15,14 @@ gulp.task(
|
||||
process.env.NODE_ENV = "development";
|
||||
},
|
||||
"clean-cast",
|
||||
"translations-enable-merge-backend",
|
||||
gulp.parallel("gen-icons-json", "build-translations", "build-locale-data"),
|
||||
gulp.parallel(
|
||||
"gen-icons-app",
|
||||
"gen-icons-mdi",
|
||||
"gen-index-cast-dev",
|
||||
"build-translations"
|
||||
),
|
||||
"copy-static-cast",
|
||||
"gen-pages-cast-dev",
|
||||
"rspack-dev-server-cast"
|
||||
"webpack-dev-server-cast"
|
||||
)
|
||||
);
|
||||
|
||||
@ -28,10 +33,9 @@ gulp.task(
|
||||
process.env.NODE_ENV = "production";
|
||||
},
|
||||
"clean-cast",
|
||||
"translations-enable-merge-backend",
|
||||
gulp.parallel("gen-icons-json", "build-translations", "build-locale-data"),
|
||||
gulp.parallel("gen-icons-app", "gen-icons-mdi", "build-translations"),
|
||||
"copy-static-cast",
|
||||
"rspack-prod-cast",
|
||||
"gen-pages-cast-prod"
|
||||
"webpack-prod-cast",
|
||||
"gen-index-cast-prod"
|
||||
)
|
||||
);
|
||||
|
@ -1,51 +1,39 @@
|
||||
import { deleteSync } from "del";
|
||||
import gulp from "gulp";
|
||||
import paths from "../paths.cjs";
|
||||
import "./translations.js";
|
||||
const del = require("del");
|
||||
const gulp = require("gulp");
|
||||
const config = require("../paths");
|
||||
require("./translations");
|
||||
|
||||
gulp.task(
|
||||
"clean",
|
||||
gulp.parallel("clean-translations", async () =>
|
||||
deleteSync([paths.app_output_root, paths.build_dir])
|
||||
)
|
||||
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
|
||||
return del([config.root, config.build_dir]);
|
||||
})
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"clean-demo",
|
||||
gulp.parallel("clean-translations", async () =>
|
||||
deleteSync([paths.demo_output_root, paths.build_dir])
|
||||
)
|
||||
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
|
||||
return del([config.demo_root, config.build_dir]);
|
||||
})
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"clean-cast",
|
||||
gulp.parallel("clean-translations", async () =>
|
||||
deleteSync([paths.cast_output_root, paths.build_dir])
|
||||
)
|
||||
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
|
||||
return del([config.cast_root, config.build_dir]);
|
||||
})
|
||||
);
|
||||
|
||||
gulp.task("clean-hassio", async () =>
|
||||
deleteSync([paths.hassio_output_root, paths.build_dir])
|
||||
gulp.task(
|
||||
"clean-hassio",
|
||||
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
|
||||
return del([config.hassio_root, config.build_dir]);
|
||||
})
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"clean-gallery",
|
||||
gulp.parallel("clean-translations", async () =>
|
||||
deleteSync([
|
||||
paths.gallery_output_root,
|
||||
paths.gallery_build,
|
||||
paths.build_dir,
|
||||
])
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"clean-landing-page",
|
||||
gulp.parallel("clean-translations", async () =>
|
||||
deleteSync([
|
||||
paths.landingPage_output_root,
|
||||
paths.landingPage_build,
|
||||
paths.build_dir,
|
||||
])
|
||||
)
|
||||
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
|
||||
return del([config.gallery_root, config.build_dir]);
|
||||
})
|
||||
);
|
||||
|
@ -1,86 +1,38 @@
|
||||
// Tasks to compress
|
||||
|
||||
import { constants } from "node:zlib";
|
||||
import gulp from "gulp";
|
||||
import brotli from "gulp-brotli";
|
||||
import zopfli from "gulp-zopfli-green";
|
||||
import paths from "../paths.cjs";
|
||||
const gulp = require("gulp");
|
||||
const zopfli = require("gulp-zopfli-green");
|
||||
const merge = require("merge-stream");
|
||||
const path = require("path");
|
||||
const paths = require("../paths");
|
||||
|
||||
const filesGlob = "*.{js,json,css,svg,xml}";
|
||||
const brotliOptions = {
|
||||
skipLarger: true,
|
||||
params: {
|
||||
[constants.BROTLI_PARAM_QUALITY]: constants.BROTLI_MAX_QUALITY,
|
||||
},
|
||||
};
|
||||
const zopfliOptions = { threshold: 150 };
|
||||
gulp.task("compress-app", function compressApp() {
|
||||
const jsLatest = gulp
|
||||
.src(path.resolve(paths.output, "**/*.js"))
|
||||
.pipe(zopfli())
|
||||
.pipe(gulp.dest(paths.output));
|
||||
|
||||
const compressModern = (rootDir, modernDir, compress) =>
|
||||
gulp
|
||||
.src([`${modernDir}/**/${filesGlob}`, `${rootDir}/sw-modern.js`], {
|
||||
base: rootDir,
|
||||
allowEmpty: true,
|
||||
})
|
||||
.pipe(compress === "zopfli" ? zopfli(zopfliOptions) : brotli(brotliOptions))
|
||||
.pipe(gulp.dest(rootDir));
|
||||
const jsEs5 = gulp
|
||||
.src(path.resolve(paths.output_es5, "**/*.js"))
|
||||
.pipe(zopfli())
|
||||
.pipe(gulp.dest(paths.output_es5));
|
||||
|
||||
const compressOther = (rootDir, modernDir, compress) =>
|
||||
gulp
|
||||
.src(
|
||||
[
|
||||
`${rootDir}/**/${filesGlob}`,
|
||||
`!${modernDir}/**/${filesGlob}`,
|
||||
`!${rootDir}/{sw-modern,service_worker}.js`,
|
||||
`${rootDir}/{authorize,onboarding}.html`,
|
||||
],
|
||||
{ base: rootDir, allowEmpty: true }
|
||||
)
|
||||
.pipe(compress === "zopfli" ? zopfli(zopfliOptions) : brotli(brotliOptions))
|
||||
.pipe(gulp.dest(rootDir));
|
||||
const polyfills = gulp
|
||||
.src(path.resolve(paths.static, "polyfills/*.js"))
|
||||
.pipe(zopfli())
|
||||
.pipe(gulp.dest(path.resolve(paths.static, "polyfills")));
|
||||
|
||||
const compressAppModernBrotli = () =>
|
||||
compressModern(paths.app_output_root, paths.app_output_latest, "brotli");
|
||||
const compressAppModernZopfli = () =>
|
||||
compressModern(paths.app_output_root, paths.app_output_latest, "zopfli");
|
||||
const translations = gulp
|
||||
.src(path.resolve(paths.static, "translations/*.json"))
|
||||
.pipe(zopfli())
|
||||
.pipe(gulp.dest(path.resolve(paths.static, "translations")));
|
||||
|
||||
const compressHassioModernBrotli = () =>
|
||||
compressModern(
|
||||
paths.hassio_output_root,
|
||||
paths.hassio_output_latest,
|
||||
"brotli"
|
||||
);
|
||||
const compressHassioModernZopfli = () =>
|
||||
compressModern(
|
||||
paths.hassio_output_root,
|
||||
paths.hassio_output_latest,
|
||||
"zopfli"
|
||||
);
|
||||
return merge(jsLatest, jsEs5, polyfills, translations);
|
||||
});
|
||||
|
||||
const compressAppOtherBrotli = () =>
|
||||
compressOther(paths.app_output_root, paths.app_output_latest, "brotli");
|
||||
const compressAppOtherZopfli = () =>
|
||||
compressOther(paths.app_output_root, paths.app_output_latest, "zopfli");
|
||||
|
||||
const compressHassioOtherBrotli = () =>
|
||||
compressOther(paths.hassio_output_root, paths.hassio_output_latest, "brotli");
|
||||
const compressHassioOtherZopfli = () =>
|
||||
compressOther(paths.hassio_output_root, paths.hassio_output_latest, "zopfli");
|
||||
|
||||
gulp.task(
|
||||
"compress-app",
|
||||
gulp.parallel(
|
||||
compressAppModernBrotli,
|
||||
compressAppOtherBrotli,
|
||||
compressAppModernZopfli,
|
||||
compressAppOtherZopfli
|
||||
)
|
||||
);
|
||||
gulp.task(
|
||||
"compress-hassio",
|
||||
gulp.parallel(
|
||||
compressHassioModernBrotli,
|
||||
compressHassioOtherBrotli,
|
||||
compressHassioModernZopfli,
|
||||
compressHassioOtherZopfli
|
||||
)
|
||||
);
|
||||
gulp.task("compress-hassio", function compressApp() {
|
||||
return gulp
|
||||
.src(path.resolve(paths.hassio_root, "**/*.js"))
|
||||
.pipe(zopfli())
|
||||
.pipe(gulp.dest(paths.hassio_root));
|
||||
});
|
||||
|
@ -1,11 +1,13 @@
|
||||
import gulp from "gulp";
|
||||
import "./clean.js";
|
||||
import "./entry-html.js";
|
||||
import "./gather-static.js";
|
||||
import "./gen-icons-json.js";
|
||||
import "./service-worker.js";
|
||||
import "./translations.js";
|
||||
import "./rspack.js";
|
||||
// Run demo develop mode
|
||||
const gulp = require("gulp");
|
||||
|
||||
require("./clean.js");
|
||||
require("./translations.js");
|
||||
require("./gen-icons.js");
|
||||
require("./gather-static.js");
|
||||
require("./webpack.js");
|
||||
require("./service-worker.js");
|
||||
require("./entry-html.js");
|
||||
|
||||
gulp.task(
|
||||
"develop-demo",
|
||||
@ -14,15 +16,15 @@ gulp.task(
|
||||
process.env.NODE_ENV = "development";
|
||||
},
|
||||
"clean-demo",
|
||||
"translations-enable-merge-backend",
|
||||
gulp.parallel(
|
||||
"gen-icons-json",
|
||||
"gen-pages-demo-dev",
|
||||
"build-translations",
|
||||
"build-locale-data"
|
||||
"gen-icons-app",
|
||||
"gen-icons-mdi",
|
||||
"gen-icons-demo",
|
||||
"gen-index-demo-dev",
|
||||
"build-translations"
|
||||
),
|
||||
"copy-static-demo",
|
||||
"rspack-dev-server-demo"
|
||||
"webpack-dev-server-demo"
|
||||
)
|
||||
);
|
||||
|
||||
@ -33,22 +35,14 @@ gulp.task(
|
||||
process.env.NODE_ENV = "production";
|
||||
},
|
||||
"clean-demo",
|
||||
// Cast needs to be backwards compatible and older HA has no translations
|
||||
"translations-enable-merge-backend",
|
||||
gulp.parallel("gen-icons-json", "build-translations", "build-locale-data"),
|
||||
gulp.parallel(
|
||||
"gen-icons-app",
|
||||
"gen-icons-mdi",
|
||||
"gen-icons-demo",
|
||||
"build-translations"
|
||||
),
|
||||
"copy-static-demo",
|
||||
"rspack-prod-demo",
|
||||
"gen-pages-demo-prod"
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"analyze-demo",
|
||||
gulp.series(
|
||||
async function setEnv() {
|
||||
process.env.STATS = "1";
|
||||
},
|
||||
"clean",
|
||||
"rspack-prod-demo"
|
||||
"webpack-prod-demo",
|
||||
"gen-index-demo-prod"
|
||||
)
|
||||
);
|
||||
|
@ -1,181 +0,0 @@
|
||||
import fs from "fs/promises";
|
||||
import gulp from "gulp";
|
||||
import path from "path";
|
||||
import mapStream from "map-stream";
|
||||
import transform from "gulp-json-transform";
|
||||
import { LokaliseApi } from "@lokalise/node-api";
|
||||
import JSZip from "jszip";
|
||||
|
||||
const inDir = "translations";
|
||||
const inDirFrontend = `${inDir}/frontend`;
|
||||
const inDirBackend = `${inDir}/backend`;
|
||||
const srcMeta = "src/translations/translationMetadata.json";
|
||||
const encoding = "utf8";
|
||||
|
||||
function hasHtml(data) {
|
||||
return /<\S*>/i.test(data);
|
||||
}
|
||||
|
||||
function recursiveCheckHasHtml(file, data, errors, recKey) {
|
||||
Object.keys(data).forEach(function (key) {
|
||||
if (typeof data[key] === "object") {
|
||||
const nextRecKey = recKey ? `${recKey}.${key}` : key;
|
||||
recursiveCheckHasHtml(file, data[key], errors, nextRecKey);
|
||||
} else if (hasHtml(data[key])) {
|
||||
errors.push(`HTML found in ${file.path} at key ${recKey}.${key}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function checkHtml() {
|
||||
const errors = [];
|
||||
|
||||
return mapStream(function (file, cb) {
|
||||
const content = file.contents;
|
||||
let error;
|
||||
if (content) {
|
||||
if (hasHtml(String(content))) {
|
||||
const data = JSON.parse(String(content));
|
||||
recursiveCheckHasHtml(file, data, errors);
|
||||
if (errors.length > 0) {
|
||||
error = errors.join("\r\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
cb(error, file);
|
||||
});
|
||||
}
|
||||
|
||||
function convertBackendTranslations(data, _file) {
|
||||
const output = { component: {} };
|
||||
if (!data.component) {
|
||||
return output;
|
||||
}
|
||||
Object.keys(data.component).forEach((domain) => {
|
||||
if (!("entity_component" in data.component[domain])) {
|
||||
return;
|
||||
}
|
||||
output.component[domain] = { entity_component: {} };
|
||||
Object.keys(data.component[domain].entity_component).forEach((key) => {
|
||||
output.component[domain].entity_component[key] =
|
||||
data.component[domain].entity_component[key];
|
||||
});
|
||||
});
|
||||
return output;
|
||||
}
|
||||
|
||||
gulp.task("convert-backend-translations", function () {
|
||||
return gulp
|
||||
.src([`${inDirBackend}/*.json`])
|
||||
.pipe(transform((data, file) => convertBackendTranslations(data, file)))
|
||||
.pipe(gulp.dest(inDirBackend));
|
||||
});
|
||||
|
||||
gulp.task("check-translations-html", function () {
|
||||
return gulp
|
||||
.src([`${inDirFrontend}/*.json`, `${inDirBackend}/*.json`])
|
||||
.pipe(checkHtml());
|
||||
});
|
||||
|
||||
gulp.task("check-all-files-exist", async function () {
|
||||
const file = await fs.readFile(srcMeta, { encoding });
|
||||
const meta = JSON.parse(file);
|
||||
const writings = [];
|
||||
Object.keys(meta).forEach((lang) => {
|
||||
writings.push(
|
||||
fs.writeFile(`${inDirFrontend}/${lang}.json`, JSON.stringify({}), {
|
||||
flag: "wx",
|
||||
}),
|
||||
fs.writeFile(`${inDirBackend}/${lang}.json`, JSON.stringify({}), {
|
||||
flag: "wx",
|
||||
})
|
||||
);
|
||||
});
|
||||
await Promise.allSettled(writings);
|
||||
});
|
||||
|
||||
const lokaliseProjects = {
|
||||
backend: "130246255a974bd3b5e8a1.51616605",
|
||||
frontend: "3420425759f6d6d241f598.13594006",
|
||||
};
|
||||
|
||||
gulp.task("fetch-lokalise", async function () {
|
||||
let apiKey;
|
||||
try {
|
||||
apiKey =
|
||||
process.env.LOKALISE_TOKEN ||
|
||||
(await fs.readFile(".lokalise_token", { encoding }));
|
||||
} catch {
|
||||
throw new Error(
|
||||
"An Administrator Lokalise API token is required to download the latest set of translations. Place your token in a new file `.lokalise_token` in the repo root directory."
|
||||
);
|
||||
}
|
||||
const lokaliseApi = new LokaliseApi({ apiKey });
|
||||
|
||||
const mkdirPromise = Promise.all([
|
||||
fs.mkdir(inDirFrontend, { recursive: true }),
|
||||
fs.mkdir(inDirBackend, { recursive: true }),
|
||||
]);
|
||||
|
||||
await Promise.all(
|
||||
Object.entries(lokaliseProjects).map(([project, projectId]) =>
|
||||
lokaliseApi
|
||||
.files()
|
||||
.download(projectId, {
|
||||
format: "json",
|
||||
original_filenames: false,
|
||||
replace_breaks: false,
|
||||
json_unescaped_slashes: true,
|
||||
export_empty_as: "skip",
|
||||
filter_data: ["verified"],
|
||||
})
|
||||
.then((download) => fetch(download.bundle_url))
|
||||
.then((response) => {
|
||||
if (response.status === 200 || response.status === 0) {
|
||||
return response.arrayBuffer();
|
||||
}
|
||||
throw new Error(response.statusText);
|
||||
})
|
||||
.then(JSZip.loadAsync)
|
||||
.then(async (contents) => {
|
||||
await mkdirPromise;
|
||||
return Promise.all(
|
||||
Object.keys(contents.files).map(async (filename) => {
|
||||
const file = contents.file(filename);
|
||||
if (!file) {
|
||||
// no file, probably a directory
|
||||
return Promise.resolve();
|
||||
}
|
||||
return file
|
||||
.async("nodebuffer")
|
||||
.then((content) =>
|
||||
fs.writeFile(
|
||||
path.join(
|
||||
inDir,
|
||||
project,
|
||||
filename.split("/").splice(-1)[0]
|
||||
),
|
||||
content,
|
||||
{ flag: "w", encoding }
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
throw err;
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"download-translations",
|
||||
gulp.series(
|
||||
"fetch-lokalise",
|
||||
"convert-backend-translations",
|
||||
"check-translations-html",
|
||||
"check-all-files-exist"
|
||||
)
|
||||
);
|
73
build-scripts/gulp/download_translations.js
Normal file
73
build-scripts/gulp/download_translations.js
Normal file
@ -0,0 +1,73 @@
|
||||
const del = require("del");
|
||||
const gulp = require("gulp");
|
||||
const mapStream = require("map-stream");
|
||||
|
||||
const inDir = "translations";
|
||||
const downloadDir = inDir + "/downloads";
|
||||
|
||||
const tasks = [];
|
||||
|
||||
function hasHtml(data) {
|
||||
return /<[a-z][\s\S]*>/i.test(data);
|
||||
}
|
||||
|
||||
function recursiveCheckHasHtml(file, data, errors, recKey) {
|
||||
Object.keys(data).forEach(function(key) {
|
||||
if (typeof data[key] === "object") {
|
||||
const nextRecKey = recKey ? `${recKey}.${key}` : key;
|
||||
recursiveCheckHasHtml(file, data[key], errors, nextRecKey);
|
||||
} else if (hasHtml(data[key])) {
|
||||
errors.push(`HTML found in ${file.path} at key ${recKey}.${key}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function checkHtml() {
|
||||
const errors = [];
|
||||
|
||||
return mapStream(function(file, cb) {
|
||||
const content = file.contents;
|
||||
let error;
|
||||
if (content) {
|
||||
if (hasHtml(String(content))) {
|
||||
const data = JSON.parse(String(content));
|
||||
recursiveCheckHasHtml(file, data, errors);
|
||||
if (errors.length > 0) {
|
||||
error = errors.join("\r\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
cb(error, file);
|
||||
});
|
||||
}
|
||||
|
||||
let taskName = "clean-downloaded-translations";
|
||||
gulp.task(taskName, function() {
|
||||
return del([`${downloadDir}/**`]);
|
||||
});
|
||||
tasks.push(taskName);
|
||||
|
||||
taskName = "check-translations-html";
|
||||
gulp.task(taskName, function() {
|
||||
return gulp.src(`${downloadDir}/*.json`).pipe(checkHtml());
|
||||
});
|
||||
tasks.push(taskName);
|
||||
|
||||
taskName = "move-downloaded-translations";
|
||||
gulp.task(taskName, function() {
|
||||
return gulp.src(`${downloadDir}/*.json`).pipe(gulp.dest(inDir));
|
||||
});
|
||||
tasks.push(taskName);
|
||||
|
||||
taskName = "check-downloaded-translations";
|
||||
gulp.task(
|
||||
taskName,
|
||||
gulp.series(
|
||||
"check-translations-html",
|
||||
"move-downloaded-translations",
|
||||
"clean-downloaded-translations"
|
||||
)
|
||||
);
|
||||
tasks.push(taskName);
|
||||
|
||||
module.exports = tasks;
|
@ -1,293 +1,244 @@
|
||||
// Tasks to generate entry HTML
|
||||
/* eslint-disable import/no-dynamic-require */
|
||||
/* eslint-disable global-require */
|
||||
const gulp = require("gulp");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const template = require("lodash.template");
|
||||
const minify = require("html-minifier").minify;
|
||||
const config = require("../paths.js");
|
||||
|
||||
import {
|
||||
applyVersionsToRegexes,
|
||||
compileRegex,
|
||||
getPreUserAgentRegexes,
|
||||
} from "browserslist-useragent-regexp";
|
||||
import fs from "fs-extra";
|
||||
import gulp from "gulp";
|
||||
import { minify } from "html-minifier-terser";
|
||||
import template from "lodash.template";
|
||||
import { dirname, extname, resolve } from "node:path";
|
||||
import { htmlMinifierOptions, terserOptions } from "../bundle.cjs";
|
||||
import paths from "../paths.cjs";
|
||||
const templatePath = (tpl) =>
|
||||
path.resolve(config.polymer_dir, "src/html/", `${tpl}.html.template`);
|
||||
|
||||
// macOS companion app has no way to obtain the Safari version used by WKWebView,
|
||||
// and it is not in the default user agent string. So we add an additional regex
|
||||
// to serve modern based on a minimum macOS version. We take the minimum Safari
|
||||
// major version from browserslist and manually map that to a supported macOS
|
||||
// version. Note this assumes the user has kept Safari updated.
|
||||
const HA_MACOS_REGEX =
|
||||
/Home Assistant\/[\d.]+ \(.+; macOS (\d+)\.(\d+)(?:\.(\d+))?\)/;
|
||||
const SAFARI_TO_MACOS = {
|
||||
15: [10, 15, 0],
|
||||
16: [11, 0, 0],
|
||||
17: [12, 0, 0],
|
||||
18: [13, 0, 0],
|
||||
const readFile = (pth) => fs.readFileSync(pth).toString();
|
||||
|
||||
const renderTemplate = (pth, data = {}, pathFunc = templatePath) => {
|
||||
const compiled = template(readFile(pathFunc(pth)));
|
||||
return compiled({ ...data, renderTemplate });
|
||||
};
|
||||
|
||||
const getCommonTemplateVars = () => {
|
||||
const browserRegexes = getPreUserAgentRegexes({
|
||||
env: "modern",
|
||||
allowHigherVersions: true,
|
||||
mobileToDesktop: true,
|
||||
throwOnMissing: true,
|
||||
const renderDemoTemplate = (pth, data = {}) =>
|
||||
renderTemplate(pth, data, (tpl) =>
|
||||
path.resolve(config.demo_dir, "src/html/", `${tpl}.html.template`)
|
||||
);
|
||||
|
||||
const renderCastTemplate = (pth, data = {}) =>
|
||||
renderTemplate(pth, data, (tpl) =>
|
||||
path.resolve(config.cast_dir, "src/html/", `${tpl}.html.template`)
|
||||
);
|
||||
|
||||
const renderGalleryTemplate = (pth, data = {}) =>
|
||||
renderTemplate(pth, data, (tpl) =>
|
||||
path.resolve(config.gallery_dir, "src/html/", `${tpl}.html.template`)
|
||||
);
|
||||
|
||||
const minifyHtml = (content) =>
|
||||
minify(content, {
|
||||
collapseWhitespace: true,
|
||||
minifyJS: true,
|
||||
minifyCSS: true,
|
||||
removeComments: true,
|
||||
});
|
||||
const minSafariVersion = browserRegexes.find(
|
||||
(regex) => regex.family === "safari"
|
||||
)?.matchedVersions[0][0];
|
||||
const minMacOSVersion = SAFARI_TO_MACOS[minSafariVersion];
|
||||
if (!minMacOSVersion) {
|
||||
throw Error(
|
||||
`Could not find minimum MacOS version for Safari ${minSafariVersion}.`
|
||||
|
||||
const PAGES = ["onboarding", "authorize"];
|
||||
|
||||
gulp.task("gen-pages-dev", (done) => {
|
||||
for (const page of PAGES) {
|
||||
const content = renderTemplate(page, {
|
||||
latestPageJS: `/frontend_latest/${page}.js`,
|
||||
latestHassIconsJS: "/frontend_latest/hass-icons.js",
|
||||
|
||||
es5Compatibility: "/frontend_es5/compatibility.js",
|
||||
es5PageJS: `/frontend_es5/${page}.js`,
|
||||
es5HassIconsJS: "/frontend_es5/hass-icons.js",
|
||||
});
|
||||
|
||||
fs.outputFileSync(path.resolve(config.root, `${page}.html`), content);
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("gen-pages-prod", (done) => {
|
||||
const latestManifest = require(path.resolve(config.output, "manifest.json"));
|
||||
const es5Manifest = require(path.resolve(config.output_es5, "manifest.json"));
|
||||
|
||||
for (const page of PAGES) {
|
||||
const content = renderTemplate(page, {
|
||||
latestPageJS: latestManifest[`${page}.js`],
|
||||
latestHassIconsJS: latestManifest["hass-icons.js"],
|
||||
|
||||
es5Compatibility: es5Manifest["compatibility.js"],
|
||||
es5PageJS: es5Manifest[`${page}.js`],
|
||||
es5HassIconsJS: es5Manifest["hass-icons.js"],
|
||||
});
|
||||
|
||||
fs.outputFileSync(
|
||||
path.resolve(config.root, `${page}.html`),
|
||||
minifyHtml(content)
|
||||
);
|
||||
}
|
||||
const haMacOSRegex = applyVersionsToRegexes(
|
||||
[
|
||||
{
|
||||
family: "ha_macos",
|
||||
regex: HA_MACOS_REGEX,
|
||||
matchedVersions: [minMacOSVersion],
|
||||
requestVersions: [minMacOSVersion],
|
||||
},
|
||||
],
|
||||
{ ignorePatch: true, allowHigherVersions: true }
|
||||
);
|
||||
return {
|
||||
modernRegex: compileRegex(browserRegexes.concat(haMacOSRegex)).toString(),
|
||||
hassUrl: process.env.HASS_URL || "",
|
||||
};
|
||||
};
|
||||
done();
|
||||
});
|
||||
|
||||
const renderTemplate = (templateFile, data = {}) => {
|
||||
const compiled = template(
|
||||
fs.readFileSync(templateFile, { encoding: "utf-8" })
|
||||
);
|
||||
return compiled({
|
||||
...data,
|
||||
// Resolve any child/nested templates relative to the parent and pass the same data
|
||||
renderTemplate: (childTemplate) =>
|
||||
renderTemplate(resolve(dirname(templateFile), childTemplate), data),
|
||||
gulp.task("gen-index-app-dev", (done) => {
|
||||
// In dev mode we don't mangle names, so we hardcode urls. That way we can
|
||||
// run webpack as last in watch mode, which blocks output.
|
||||
const content = renderTemplate("index", {
|
||||
latestAppJS: "/frontend_latest/app.js",
|
||||
latestCoreJS: "/frontend_latest/core.js",
|
||||
latestCustomPanelJS: "/frontend_latest/custom-panel.js",
|
||||
latestHassIconsJS: "/frontend_latest/hass-icons.js",
|
||||
|
||||
es5Compatibility: "/frontend_es5/compatibility.js",
|
||||
es5AppJS: "/frontend_es5/app.js",
|
||||
es5CoreJS: "/frontend_es5/core.js",
|
||||
es5CustomPanelJS: "/frontend_es5/custom-panel.js",
|
||||
es5HassIconsJS: "/frontend_es5/hass-icons.js",
|
||||
}).replace(/#THEMEC/g, "{{ theme_color }}");
|
||||
|
||||
fs.outputFileSync(path.resolve(config.root, "index.html"), content);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("gen-index-app-prod", (done) => {
|
||||
const latestManifest = require(path.resolve(config.output, "manifest.json"));
|
||||
const es5Manifest = require(path.resolve(config.output_es5, "manifest.json"));
|
||||
const content = renderTemplate("index", {
|
||||
latestAppJS: latestManifest["app.js"],
|
||||
latestCoreJS: latestManifest["core.js"],
|
||||
latestCustomPanelJS: latestManifest["custom-panel.js"],
|
||||
latestHassIconsJS: latestManifest["hass-icons.js"],
|
||||
|
||||
es5Compatibility: es5Manifest["compatibility.js"],
|
||||
es5AppJS: es5Manifest["app.js"],
|
||||
es5CoreJS: es5Manifest["core.js"],
|
||||
es5CustomPanelJS: es5Manifest["custom-panel.js"],
|
||||
es5HassIconsJS: es5Manifest["hass-icons.js"],
|
||||
});
|
||||
};
|
||||
const minified = minifyHtml(content).replace(/#THEMEC/g, "{{ theme_color }}");
|
||||
|
||||
const WRAP_TAGS = { ".js": "script", ".css": "style" };
|
||||
fs.outputFileSync(path.resolve(config.root, "index.html"), minified);
|
||||
done();
|
||||
});
|
||||
|
||||
const minifyHtml = (content, ext) => {
|
||||
const wrapTag = WRAP_TAGS[ext] || "";
|
||||
const begTag = wrapTag && `<${wrapTag}>`;
|
||||
const endTag = wrapTag && `</${wrapTag}>`;
|
||||
return minify(begTag + content + endTag, {
|
||||
...htmlMinifierOptions,
|
||||
conservativeCollapse: false,
|
||||
minifyJS: terserOptions({
|
||||
latestBuild: false, // Shared scripts should be ES5
|
||||
isTestBuild: true, // Don't need source maps
|
||||
}),
|
||||
}).then((wrapped) =>
|
||||
wrapTag ? wrapped.slice(begTag.length, -endTag.length) : wrapped
|
||||
gulp.task("gen-index-cast-dev", (done) => {
|
||||
const contentReceiver = renderCastTemplate("receiver", {
|
||||
latestReceiverJS: "/frontend_latest/receiver.js",
|
||||
});
|
||||
fs.outputFileSync(
|
||||
path.resolve(config.cast_root, "receiver.html"),
|
||||
contentReceiver
|
||||
);
|
||||
};
|
||||
|
||||
// Function to generate a dev task for each project's configuration
|
||||
const genPagesDevTask =
|
||||
(
|
||||
pageEntries,
|
||||
inputRoot,
|
||||
outputRoot,
|
||||
inputSub = "src/html",
|
||||
publicRoot = ""
|
||||
) =>
|
||||
async () => {
|
||||
const commonVars = getCommonTemplateVars();
|
||||
for (const [page, entries] of Object.entries(pageEntries)) {
|
||||
const content = renderTemplate(
|
||||
resolve(inputRoot, inputSub, `${page}.template`),
|
||||
{
|
||||
...commonVars,
|
||||
latestEntryJS: entries.map(
|
||||
(entry) => `${publicRoot}/frontend_latest/${entry}.js`
|
||||
),
|
||||
es5EntryJS: entries.map(
|
||||
(entry) => `${publicRoot}/frontend_es5/${entry}.js`
|
||||
),
|
||||
latestCustomPanelJS: `${publicRoot}/frontend_latest/custom-panel.js`,
|
||||
es5CustomPanelJS: `${publicRoot}/frontend_es5/custom-panel.js`,
|
||||
}
|
||||
);
|
||||
fs.outputFileSync(resolve(outputRoot, page), content);
|
||||
}
|
||||
};
|
||||
const contentFAQ = renderCastTemplate("launcher-faq", {
|
||||
latestLauncherJS: "/frontend_latest/launcher.js",
|
||||
es5LauncherJS: "/frontend_es5/launcher.js",
|
||||
});
|
||||
fs.outputFileSync(path.resolve(config.cast_root, "faq.html"), contentFAQ);
|
||||
|
||||
// Same as previous but for production builds
|
||||
// (includes minification and hashed file names from manifest)
|
||||
const genPagesProdTask =
|
||||
(
|
||||
pageEntries,
|
||||
inputRoot,
|
||||
outputRoot,
|
||||
outputLatest,
|
||||
outputES5,
|
||||
inputSub = "src/html"
|
||||
) =>
|
||||
async () => {
|
||||
const latestManifest = fs.readJsonSync(
|
||||
resolve(outputLatest, "manifest.json")
|
||||
);
|
||||
const es5Manifest = outputES5
|
||||
? fs.readJsonSync(resolve(outputES5, "manifest.json"))
|
||||
: {};
|
||||
const commonVars = getCommonTemplateVars();
|
||||
const minifiedHTML = [];
|
||||
for (const [page, entries] of Object.entries(pageEntries)) {
|
||||
const content = renderTemplate(
|
||||
resolve(inputRoot, inputSub, `${page}.template`),
|
||||
{
|
||||
...commonVars,
|
||||
latestEntryJS: entries.map((entry) => latestManifest[`${entry}.js`]),
|
||||
es5EntryJS: entries.map((entry) => es5Manifest[`${entry}.js`]),
|
||||
latestCustomPanelJS: latestManifest["custom-panel.js"],
|
||||
es5CustomPanelJS: es5Manifest["custom-panel.js"],
|
||||
}
|
||||
);
|
||||
minifiedHTML.push(
|
||||
minifyHtml(content, extname(page)).then((minified) =>
|
||||
fs.outputFileSync(resolve(outputRoot, page), minified)
|
||||
)
|
||||
);
|
||||
}
|
||||
await Promise.all(minifiedHTML);
|
||||
};
|
||||
const contentLauncher = renderCastTemplate("launcher", {
|
||||
latestLauncherJS: "/frontend_latest/launcher.js",
|
||||
es5LauncherJS: "/frontend_es5/launcher.js",
|
||||
});
|
||||
fs.outputFileSync(
|
||||
path.resolve(config.cast_root, "index.html"),
|
||||
contentLauncher
|
||||
);
|
||||
done();
|
||||
});
|
||||
|
||||
// Map HTML pages to their required entrypoints
|
||||
const APP_PAGE_ENTRIES = {
|
||||
"authorize.html": ["authorize"],
|
||||
"onboarding.html": ["onboarding"],
|
||||
"index.html": ["core", "app"],
|
||||
};
|
||||
gulp.task("gen-index-cast-prod", (done) => {
|
||||
const latestManifest = require(path.resolve(
|
||||
config.cast_output,
|
||||
"manifest.json"
|
||||
));
|
||||
const es5Manifest = require(path.resolve(
|
||||
config.cast_output_es5,
|
||||
"manifest.json"
|
||||
));
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-app-dev",
|
||||
genPagesDevTask(APP_PAGE_ENTRIES, paths.root_dir, paths.app_output_root)
|
||||
);
|
||||
const contentReceiver = renderCastTemplate("receiver", {
|
||||
latestReceiverJS: latestManifest["receiver.js"],
|
||||
});
|
||||
fs.outputFileSync(
|
||||
path.resolve(config.cast_root, "receiver.html"),
|
||||
contentReceiver
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-app-prod",
|
||||
genPagesProdTask(
|
||||
APP_PAGE_ENTRIES,
|
||||
paths.root_dir,
|
||||
paths.app_output_root,
|
||||
paths.app_output_latest,
|
||||
paths.app_output_es5
|
||||
)
|
||||
);
|
||||
const contentFAQ = renderCastTemplate("launcher-faq", {
|
||||
latestLauncherJS: latestManifest["launcher.js"],
|
||||
es5LauncherJS: es5Manifest["launcher.js"],
|
||||
});
|
||||
fs.outputFileSync(path.resolve(config.cast_root, "faq.html"), contentFAQ);
|
||||
|
||||
const CAST_PAGE_ENTRIES = {
|
||||
"faq.html": ["launcher"],
|
||||
"index.html": ["launcher"],
|
||||
"media.html": ["media"],
|
||||
"receiver.html": ["receiver"],
|
||||
};
|
||||
const contentLauncher = renderCastTemplate("launcher", {
|
||||
latestLauncherJS: latestManifest["launcher.js"],
|
||||
es5LauncherJS: es5Manifest["launcher.js"],
|
||||
});
|
||||
fs.outputFileSync(
|
||||
path.resolve(config.cast_root, "index.html"),
|
||||
contentLauncher
|
||||
);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-cast-dev",
|
||||
genPagesDevTask(CAST_PAGE_ENTRIES, paths.cast_dir, paths.cast_output_root)
|
||||
);
|
||||
gulp.task("gen-index-demo-dev", (done) => {
|
||||
// In dev mode we don't mangle names, so we hardcode urls. That way we can
|
||||
// run webpack as last in watch mode, which blocks output.
|
||||
const content = renderDemoTemplate("index", {
|
||||
latestDemoJS: "/frontend_latest/main.js",
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-cast-prod",
|
||||
genPagesProdTask(
|
||||
CAST_PAGE_ENTRIES,
|
||||
paths.cast_dir,
|
||||
paths.cast_output_root,
|
||||
paths.cast_output_latest,
|
||||
paths.cast_output_es5
|
||||
)
|
||||
);
|
||||
es5Compatibility: "/frontend_es5/compatibility.js",
|
||||
es5DemoJS: "/frontend_es5/main.js",
|
||||
});
|
||||
|
||||
const DEMO_PAGE_ENTRIES = { "index.html": ["main"] };
|
||||
fs.outputFileSync(path.resolve(config.demo_root, "index.html"), content);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-demo-dev",
|
||||
genPagesDevTask(DEMO_PAGE_ENTRIES, paths.demo_dir, paths.demo_output_root)
|
||||
);
|
||||
gulp.task("gen-index-demo-prod", (done) => {
|
||||
const latestManifest = require(path.resolve(
|
||||
config.demo_output,
|
||||
"manifest.json"
|
||||
));
|
||||
const es5Manifest = require(path.resolve(
|
||||
config.demo_output_es5,
|
||||
"manifest.json"
|
||||
));
|
||||
const content = renderDemoTemplate("index", {
|
||||
latestDemoJS: latestManifest["main.js"],
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-demo-prod",
|
||||
genPagesProdTask(
|
||||
DEMO_PAGE_ENTRIES,
|
||||
paths.demo_dir,
|
||||
paths.demo_output_root,
|
||||
paths.demo_output_latest,
|
||||
paths.demo_output_es5
|
||||
)
|
||||
);
|
||||
es5Compatibility: es5Manifest["compatibility.js"],
|
||||
es5DemoJS: es5Manifest["main.js"],
|
||||
});
|
||||
const minified = minifyHtml(content);
|
||||
|
||||
const GALLERY_PAGE_ENTRIES = { "index.html": ["entrypoint"] };
|
||||
fs.outputFileSync(path.resolve(config.demo_root, "index.html"), minified);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-gallery-dev",
|
||||
genPagesDevTask(
|
||||
GALLERY_PAGE_ENTRIES,
|
||||
paths.gallery_dir,
|
||||
paths.gallery_output_root
|
||||
)
|
||||
);
|
||||
gulp.task("gen-index-gallery-dev", (done) => {
|
||||
// In dev mode we don't mangle names, so we hardcode urls. That way we can
|
||||
// run webpack as last in watch mode, which blocks output.
|
||||
const content = renderGalleryTemplate("index", {
|
||||
latestGalleryJS: "./entrypoint.js",
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-gallery-prod",
|
||||
genPagesProdTask(
|
||||
GALLERY_PAGE_ENTRIES,
|
||||
paths.gallery_dir,
|
||||
paths.gallery_output_root,
|
||||
paths.gallery_output_latest
|
||||
)
|
||||
);
|
||||
fs.outputFileSync(path.resolve(config.gallery_root, "index.html"), content);
|
||||
done();
|
||||
});
|
||||
|
||||
const LANDING_PAGE_PAGE_ENTRIES = { "index.html": ["entrypoint"] };
|
||||
gulp.task("gen-index-gallery-prod", (done) => {
|
||||
const latestManifest = require(path.resolve(
|
||||
config.gallery_output,
|
||||
"manifest.json"
|
||||
));
|
||||
const content = renderGalleryTemplate("index", {
|
||||
latestGalleryJS: latestManifest["entrypoint.js"],
|
||||
});
|
||||
const minified = minifyHtml(content);
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-landing-page-dev",
|
||||
genPagesDevTask(
|
||||
LANDING_PAGE_PAGE_ENTRIES,
|
||||
paths.landingPage_dir,
|
||||
paths.landingPage_output_root
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-landing-page-prod",
|
||||
genPagesProdTask(
|
||||
LANDING_PAGE_PAGE_ENTRIES,
|
||||
paths.landingPage_dir,
|
||||
paths.landingPage_output_root,
|
||||
paths.landingPage_output_latest,
|
||||
paths.landingPage_output_es5
|
||||
)
|
||||
);
|
||||
|
||||
const HASSIO_PAGE_ENTRIES = { "entrypoint.js": ["entrypoint"] };
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-hassio-dev",
|
||||
genPagesDevTask(
|
||||
HASSIO_PAGE_ENTRIES,
|
||||
paths.hassio_dir,
|
||||
paths.hassio_output_root,
|
||||
"src",
|
||||
paths.hassio_publicPath
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"gen-pages-hassio-prod",
|
||||
genPagesProdTask(
|
||||
HASSIO_PAGE_ENTRIES,
|
||||
paths.hassio_dir,
|
||||
paths.hassio_output_root,
|
||||
paths.hassio_output_latest,
|
||||
paths.hassio_output_es5,
|
||||
"src"
|
||||
)
|
||||
);
|
||||
fs.outputFileSync(path.resolve(config.gallery_root, "index.html"), minified);
|
||||
done();
|
||||
});
|
||||
|
@ -1,171 +0,0 @@
|
||||
// Task to download the latest Lokalise translations from the nightly workflow artifacts
|
||||
|
||||
import { createOAuthDeviceAuth } from "@octokit/auth-oauth-device";
|
||||
import { retry } from "@octokit/plugin-retry";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { deleteAsync } from "del";
|
||||
import { mkdir, readFile, writeFile } from "fs/promises";
|
||||
import gulp from "gulp";
|
||||
import jszip from "jszip";
|
||||
import path from "path";
|
||||
import process from "process";
|
||||
import { extract } from "tar";
|
||||
|
||||
const MAX_AGE = 24; // hours
|
||||
const OWNER = "home-assistant";
|
||||
const REPO = "frontend";
|
||||
const WORKFLOW_NAME = "nightly.yaml";
|
||||
const ARTIFACT_NAME = "translations";
|
||||
const CLIENT_ID = "Iv1.3914e28cb27834d1";
|
||||
const EXTRACT_DIR = "translations";
|
||||
const TOKEN_FILE = path.posix.join(EXTRACT_DIR, "token.json");
|
||||
const ARTIFACT_FILE = path.posix.join(EXTRACT_DIR, "artifact.json");
|
||||
|
||||
let allowTokenSetup = false;
|
||||
gulp.task("allow-setup-fetch-nightly-translations", (done) => {
|
||||
allowTokenSetup = true;
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("fetch-nightly-translations", async function () {
|
||||
// Skip all when environment flag is set (assumes translations are already in place)
|
||||
if (process.env?.SKIP_FETCH_NIGHTLY_TRANSLATIONS) {
|
||||
console.log("Skipping fetch due to environment signal");
|
||||
return;
|
||||
}
|
||||
|
||||
// Read current translations artifact info if it exists,
|
||||
// and stop if they are not old enough
|
||||
let currentArtifact;
|
||||
try {
|
||||
currentArtifact = JSON.parse(await readFile(ARTIFACT_FILE, "utf-8"));
|
||||
const currentAge =
|
||||
(Date.now() - Date.parse(currentArtifact.created_at)) / 3600000;
|
||||
if (currentAge < MAX_AGE) {
|
||||
console.log(
|
||||
"Keeping current translations (only %s hours old)",
|
||||
currentAge.toFixed(1)
|
||||
);
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
currentArtifact = null;
|
||||
}
|
||||
|
||||
// To store file writing promises
|
||||
const createExtractDir = mkdir(EXTRACT_DIR, { recursive: true });
|
||||
const writings = [];
|
||||
|
||||
// Authenticate to GitHub using GitHub action token if it exists,
|
||||
// otherwise look for a saved user token or generate a new one if none
|
||||
let tokenAuth;
|
||||
if (process.env.GITHUB_TOKEN) {
|
||||
tokenAuth = { token: process.env.GITHUB_TOKEN };
|
||||
} else {
|
||||
try {
|
||||
tokenAuth = JSON.parse(await readFile(TOKEN_FILE, "utf-8"));
|
||||
} catch {
|
||||
if (!allowTokenSetup) {
|
||||
console.log("No token found so build will continue with English only");
|
||||
return;
|
||||
}
|
||||
const auth = createOAuthDeviceAuth({
|
||||
clientType: "github-app",
|
||||
clientId: CLIENT_ID,
|
||||
onVerification: (verification) => {
|
||||
console.log(
|
||||
"Task needs to authenticate to GitHub to fetch the translations from nightly workflow\n" +
|
||||
"Please go to %s to authorize this task\n" +
|
||||
"\nEnter user code: %s\n\n" +
|
||||
"This code will expire in %s minutes\n" +
|
||||
"Task will automatically continue after authorization and token will be saved for future use",
|
||||
verification.verification_uri,
|
||||
verification.user_code,
|
||||
(verification.expires_in / 60).toFixed(0)
|
||||
);
|
||||
},
|
||||
});
|
||||
tokenAuth = await auth({ type: "oauth" });
|
||||
writings.push(
|
||||
createExtractDir.then(
|
||||
writeFile(TOKEN_FILE, JSON.stringify(tokenAuth, null, 2))
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Authenticate with token and request workflow runs from GitHub
|
||||
console.log("Fetching new translations...");
|
||||
const octokit = new (Octokit.plugin(retry))({
|
||||
userAgent: "Fetch Nightly Translations",
|
||||
auth: tokenAuth.token,
|
||||
});
|
||||
|
||||
const workflowRunsResponse = await octokit.rest.actions.listWorkflowRuns({
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
workflow_id: WORKFLOW_NAME,
|
||||
status: "success",
|
||||
event: "schedule",
|
||||
per_page: 1,
|
||||
exclude_pull_requests: true,
|
||||
});
|
||||
if (workflowRunsResponse.data.total_count === 0) {
|
||||
throw Error("No successful nightly workflow runs found");
|
||||
}
|
||||
const latestNightlyRun = workflowRunsResponse.data.workflow_runs[0];
|
||||
|
||||
// Stop if current is already the latest, otherwise Find the translations artifact
|
||||
if (currentArtifact?.workflow_run.id === latestNightlyRun.id) {
|
||||
console.log("Stopping because current translations are still the latest");
|
||||
return;
|
||||
}
|
||||
const latestArtifact = (
|
||||
await octokit.actions.listWorkflowRunArtifacts({
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
run_id: latestNightlyRun.id,
|
||||
})
|
||||
).data.artifacts.find((artifact) => artifact.name === ARTIFACT_NAME);
|
||||
if (!latestArtifact) {
|
||||
throw Error("Latest nightly workflow run has no translations artifact");
|
||||
}
|
||||
writings.push(
|
||||
createExtractDir.then(
|
||||
writeFile(ARTIFACT_FILE, JSON.stringify(latestArtifact, null, 2))
|
||||
)
|
||||
);
|
||||
|
||||
// Remove the current translations
|
||||
const deleteCurrent = Promise.all(writings).then(
|
||||
deleteAsync([`${EXTRACT_DIR}/*`, `!${ARTIFACT_FILE}`, `!${TOKEN_FILE}`])
|
||||
);
|
||||
|
||||
// Get the download URL and follow the redirect to download (stored as ArrayBuffer)
|
||||
const downloadResponse = await octokit.actions.downloadArtifact({
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
artifact_id: latestArtifact.id,
|
||||
archive_format: "zip",
|
||||
});
|
||||
if (downloadResponse.status !== 200) {
|
||||
throw Error("Failure downloading translations artifact");
|
||||
}
|
||||
|
||||
// Artifact is a tarball, but GitHub adds it to a zip file
|
||||
console.log("Unpacking downloaded translations...");
|
||||
const zip = await jszip.loadAsync(downloadResponse.data);
|
||||
await deleteCurrent;
|
||||
const extractStream = zip.file(/.*/)[0].nodeStream().pipe(extract());
|
||||
await new Promise((resolve, reject) => {
|
||||
extractStream.on("close", resolve).on("error", reject);
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"setup-and-fetch-nightly-translations",
|
||||
gulp.series(
|
||||
"allow-setup-fetch-nightly-translations",
|
||||
"fetch-nightly-translations"
|
||||
)
|
||||
);
|
@ -1,143 +1,13 @@
|
||||
import fs from "fs";
|
||||
import { glob } from "glob";
|
||||
import gulp from "gulp";
|
||||
import yaml from "js-yaml";
|
||||
import { marked } from "marked";
|
||||
import path from "path";
|
||||
import paths from "../paths.cjs";
|
||||
import "./clean.js";
|
||||
import "./entry-html.js";
|
||||
import "./gather-static.js";
|
||||
import "./gen-icons-json.js";
|
||||
import "./service-worker.js";
|
||||
import "./translations.js";
|
||||
import "./rspack.js";
|
||||
// Run demo develop mode
|
||||
const gulp = require("gulp");
|
||||
|
||||
gulp.task("gather-gallery-pages", async function gatherPages() {
|
||||
const pageDir = path.resolve(paths.gallery_dir, "src/pages");
|
||||
const files = await glob(path.resolve(pageDir, "**/*"));
|
||||
|
||||
const galleryBuild = path.resolve(paths.gallery_dir, "build");
|
||||
fs.mkdirSync(galleryBuild, { recursive: true });
|
||||
|
||||
let content = "export const PAGES = {\n";
|
||||
|
||||
const processed = new Set();
|
||||
|
||||
for (const file of files) {
|
||||
if (fs.lstatSync(file).isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const pageId = file.substring(pageDir.length + 1, file.lastIndexOf("."));
|
||||
|
||||
if (processed.has(pageId)) {
|
||||
continue;
|
||||
}
|
||||
processed.add(pageId);
|
||||
|
||||
const [category] = pageId.split("/", 2);
|
||||
|
||||
const demoFile = path.resolve(pageDir, `${pageId}.ts`);
|
||||
const descriptionFile = path.resolve(pageDir, `${pageId}.markdown`);
|
||||
const hasDemo = fs.existsSync(demoFile);
|
||||
let hasDescription = fs.existsSync(descriptionFile);
|
||||
let metadata = {};
|
||||
if (hasDescription) {
|
||||
let descriptionContent = fs.readFileSync(descriptionFile, "utf-8");
|
||||
|
||||
if (descriptionContent.startsWith("---")) {
|
||||
const metadataEnd = descriptionContent.indexOf("---", 3);
|
||||
metadata = yaml.load(descriptionContent.substring(3, metadataEnd));
|
||||
descriptionContent = descriptionContent
|
||||
.substring(metadataEnd + 3)
|
||||
.trim();
|
||||
}
|
||||
|
||||
// If description is just metadata
|
||||
if (descriptionContent === "") {
|
||||
hasDescription = false;
|
||||
} else {
|
||||
descriptionContent = marked(descriptionContent).replace(/`/g, "\\`");
|
||||
fs.mkdirSync(path.resolve(galleryBuild, category), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.resolve(galleryBuild, `${pageId}-description.ts`),
|
||||
`
|
||||
import {html} from "lit";
|
||||
export default html\`${descriptionContent}\`
|
||||
`
|
||||
);
|
||||
}
|
||||
}
|
||||
content += ` "${pageId}": {
|
||||
metadata: ${JSON.stringify(metadata)},
|
||||
${
|
||||
hasDescription
|
||||
? `description: () => import("./${pageId}-description").then(m => m.default),`
|
||||
: ""
|
||||
}
|
||||
${hasDemo ? `demo: () => import("../src/pages/${pageId}")` : ""}
|
||||
|
||||
},\n`;
|
||||
}
|
||||
|
||||
content += "};\n";
|
||||
|
||||
// Generate sidebar
|
||||
const sidebarPath = path.resolve(paths.gallery_dir, "sidebar.js");
|
||||
const sidebar = (await import(sidebarPath)).default;
|
||||
|
||||
const pagesToProcess = {};
|
||||
for (const key of processed) {
|
||||
const [category, page] = key.split("/", 2);
|
||||
if (!(category in pagesToProcess)) {
|
||||
pagesToProcess[category] = new Set();
|
||||
}
|
||||
pagesToProcess[category].add(page);
|
||||
}
|
||||
|
||||
for (const group of Object.values(sidebar)) {
|
||||
const toProcess = pagesToProcess[group.category];
|
||||
delete pagesToProcess[group.category];
|
||||
|
||||
if (!toProcess) {
|
||||
console.error("Unknown category", group.category);
|
||||
if (!group.pages) {
|
||||
group.pages = [];
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Any pre-defined groups will not be sorted.
|
||||
if (group.pages) {
|
||||
for (const page of group.pages) {
|
||||
if (!toProcess.delete(page)) {
|
||||
console.error("Found unreferenced demo", page);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
group.pages = [];
|
||||
}
|
||||
for (const page of Array.from(toProcess).sort()) {
|
||||
group.pages.push(page);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [category, pages] of Object.entries(pagesToProcess)) {
|
||||
sidebar.push({
|
||||
category,
|
||||
header: category,
|
||||
pages: Array.from(pages).sort(),
|
||||
});
|
||||
}
|
||||
|
||||
content += `export const SIDEBAR = ${JSON.stringify(sidebar, null, 2)};\n`;
|
||||
|
||||
fs.writeFileSync(
|
||||
path.resolve(galleryBuild, "import-pages.ts"),
|
||||
content,
|
||||
"utf-8"
|
||||
);
|
||||
});
|
||||
require("./clean.js");
|
||||
require("./translations.js");
|
||||
require("./gen-icons.js");
|
||||
require("./gather-static.js");
|
||||
require("./webpack.js");
|
||||
require("./service-worker.js");
|
||||
require("./entry-html.js");
|
||||
|
||||
gulp.task(
|
||||
"develop-gallery",
|
||||
@ -146,27 +16,10 @@ gulp.task(
|
||||
process.env.NODE_ENV = "development";
|
||||
},
|
||||
"clean-gallery",
|
||||
"translations-enable-merge-backend",
|
||||
gulp.parallel(
|
||||
"gen-icons-json",
|
||||
"build-translations",
|
||||
"build-locale-data",
|
||||
"gather-gallery-pages"
|
||||
),
|
||||
gulp.parallel("gen-icons-app", "gen-icons-mdi", "build-translations"),
|
||||
"copy-static-gallery",
|
||||
"gen-pages-gallery-dev",
|
||||
gulp.parallel(
|
||||
"rspack-dev-server-gallery",
|
||||
async function watchMarkdownFiles() {
|
||||
gulp.watch(
|
||||
[
|
||||
path.resolve(paths.gallery_dir, "src/pages/**/*.markdown"),
|
||||
path.resolve(paths.gallery_dir, "sidebar.js"),
|
||||
],
|
||||
gulp.series("gather-gallery-pages")
|
||||
);
|
||||
}
|
||||
)
|
||||
"gen-index-gallery-dev",
|
||||
"webpack-dev-server-gallery"
|
||||
)
|
||||
);
|
||||
|
||||
@ -177,15 +30,9 @@ gulp.task(
|
||||
process.env.NODE_ENV = "production";
|
||||
},
|
||||
"clean-gallery",
|
||||
"translations-enable-merge-backend",
|
||||
gulp.parallel(
|
||||
"gen-icons-json",
|
||||
"build-translations",
|
||||
"build-locale-data",
|
||||
"gather-gallery-pages"
|
||||
),
|
||||
gulp.parallel("gen-icons-app", "gen-icons-mdi", "build-translations"),
|
||||
"copy-static-gallery",
|
||||
"rspack-prod-gallery",
|
||||
"gen-pages-gallery-prod"
|
||||
"webpack-prod-gallery",
|
||||
"gen-index-gallery-prod"
|
||||
)
|
||||
);
|
||||
|
@ -1,56 +1,39 @@
|
||||
// Gulp task to gather all static files.
|
||||
|
||||
import fs from "fs-extra";
|
||||
import gulp from "gulp";
|
||||
import path from "path";
|
||||
import paths from "../paths.cjs";
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const cpx = require("cpx");
|
||||
const fs = require("fs-extra");
|
||||
const paths = require("../paths");
|
||||
|
||||
const npmPath = (...parts) =>
|
||||
path.resolve(paths.root_dir, "node_modules", ...parts);
|
||||
const polyPath = (...parts) => path.resolve(paths.root_dir, ...parts);
|
||||
path.resolve(paths.polymer_dir, "node_modules", ...parts);
|
||||
const polyPath = (...parts) => path.resolve(paths.polymer_dir, ...parts);
|
||||
|
||||
const copyFileDir = (fromFile, toDir) =>
|
||||
fs.copySync(fromFile, path.join(toDir, path.basename(fromFile)));
|
||||
|
||||
const genStaticPath =
|
||||
(staticDir) =>
|
||||
(...parts) =>
|
||||
path.resolve(staticDir, ...parts);
|
||||
const genStaticPath = (staticDir) => (...parts) =>
|
||||
path.resolve(staticDir, ...parts);
|
||||
|
||||
function copyTranslations(staticDir) {
|
||||
const staticPath = genStaticPath(staticDir);
|
||||
|
||||
// Translation output
|
||||
fs.copySync(
|
||||
polyPath("build/translations/output"),
|
||||
polyPath("build-translations/output"),
|
||||
staticPath("translations")
|
||||
);
|
||||
}
|
||||
|
||||
function copyLocaleData(staticDir) {
|
||||
const staticPath = genStaticPath(staticDir);
|
||||
|
||||
// Locale data output
|
||||
fs.copySync(polyPath("build/locale-data"), staticPath("locale-data"));
|
||||
}
|
||||
|
||||
function copyMdiIcons(staticDir) {
|
||||
const staticPath = genStaticPath(staticDir);
|
||||
|
||||
// MDI icons output
|
||||
fs.copySync(polyPath("build/mdi"), staticPath("mdi"));
|
||||
}
|
||||
|
||||
function copyPolyfills(staticDir) {
|
||||
const staticPath = genStaticPath(staticDir);
|
||||
|
||||
// For custom panels using ES5 builds that don't use Babel 7+
|
||||
// Web Component polyfills and adapters
|
||||
copyFileDir(
|
||||
npmPath("@webcomponents/webcomponentsjs/custom-elements-es5-adapter.js"),
|
||||
staticPath("polyfills/")
|
||||
);
|
||||
|
||||
// Web Component polyfills and adapters
|
||||
copyFileDir(
|
||||
npmPath("@webcomponents/webcomponentsjs/webcomponents-bundle.js"),
|
||||
staticPath("polyfills/")
|
||||
@ -59,156 +42,90 @@ function copyPolyfills(staticDir) {
|
||||
npmPath("@webcomponents/webcomponentsjs/webcomponents-bundle.js.map"),
|
||||
staticPath("polyfills/")
|
||||
);
|
||||
// Lit polyfill support
|
||||
fs.copySync(
|
||||
npmPath("lit/polyfill-support.js"),
|
||||
path.join(staticPath("polyfills/"), "lit-polyfill-support.js")
|
||||
);
|
||||
|
||||
// dialog-polyfill css
|
||||
copyFileDir(
|
||||
npmPath("dialog-polyfill/dialog-polyfill.css"),
|
||||
staticPath("polyfills/")
|
||||
);
|
||||
}
|
||||
|
||||
function copyFonts(staticDir) {
|
||||
const staticPath = genStaticPath(staticDir);
|
||||
// Local fonts
|
||||
fs.copySync(
|
||||
npmPath("roboto-fontface/fonts/roboto/"),
|
||||
staticPath("fonts/roboto/"),
|
||||
{
|
||||
filter: (src) => !src.includes(".") || src.endsWith(".woff2"),
|
||||
}
|
||||
cpx.copySync(
|
||||
npmPath("roboto-fontface/fonts/roboto/*.woff2"),
|
||||
staticPath("fonts/roboto")
|
||||
);
|
||||
}
|
||||
|
||||
function copyQrScannerWorker(staticDir) {
|
||||
const staticPath = genStaticPath(staticDir);
|
||||
copyFileDir(npmPath("qr-scanner/qr-scanner-worker.min.js"), staticPath("js"));
|
||||
}
|
||||
|
||||
function copyMapPanel(staticDir) {
|
||||
const staticPath = genStaticPath(staticDir);
|
||||
copyFileDir(
|
||||
npmPath("leaflet/dist/leaflet.css"),
|
||||
staticPath("images/leaflet/")
|
||||
);
|
||||
copyFileDir(
|
||||
npmPath("leaflet.markercluster/dist/MarkerCluster.css"),
|
||||
staticPath("images/leaflet/")
|
||||
);
|
||||
fs.copySync(
|
||||
npmPath("leaflet/dist/images"),
|
||||
staticPath("images/leaflet/images/")
|
||||
);
|
||||
}
|
||||
|
||||
function copyZXingWasm(staticDir) {
|
||||
const staticPath = genStaticPath(staticDir);
|
||||
copyFileDir(
|
||||
npmPath("zxing-wasm/dist/reader/zxing_reader.wasm"),
|
||||
staticPath("js")
|
||||
);
|
||||
}
|
||||
|
||||
gulp.task("copy-locale-data", async () => {
|
||||
const staticDir = paths.app_output_static;
|
||||
copyLocaleData(staticDir);
|
||||
});
|
||||
|
||||
gulp.task("copy-translations-app", async () => {
|
||||
const staticDir = paths.app_output_static;
|
||||
gulp.task("copy-translations", (done) => {
|
||||
const staticDir = paths.static;
|
||||
copyTranslations(staticDir);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("copy-translations-supervisor", async () => {
|
||||
const staticDir = paths.hassio_output_static;
|
||||
copyTranslations(staticDir);
|
||||
});
|
||||
|
||||
gulp.task("copy-translations-landing-page", async () => {
|
||||
const staticDir = paths.landingPage_output_static;
|
||||
copyTranslations(staticDir);
|
||||
});
|
||||
|
||||
gulp.task("copy-static-supervisor", async () => {
|
||||
const staticDir = paths.hassio_output_static;
|
||||
copyLocaleData(staticDir);
|
||||
copyFonts(staticDir);
|
||||
});
|
||||
|
||||
gulp.task("copy-static-app", async () => {
|
||||
const staticDir = paths.app_output_static;
|
||||
gulp.task("copy-static", (done) => {
|
||||
const staticDir = paths.static;
|
||||
const staticPath = genStaticPath(paths.static);
|
||||
// Basic static files
|
||||
fs.copySync(polyPath("public"), paths.app_output_root);
|
||||
fs.copySync(polyPath("public"), paths.root);
|
||||
|
||||
copyPolyfills(staticDir);
|
||||
copyFonts(staticDir);
|
||||
copyTranslations(staticDir);
|
||||
copyLocaleData(staticDir);
|
||||
copyMdiIcons(staticDir);
|
||||
|
||||
// Panel assets
|
||||
copyFileDir(
|
||||
npmPath("react-big-calendar/lib/css/react-big-calendar.css"),
|
||||
staticPath("panels/calendar/")
|
||||
);
|
||||
copyMapPanel(staticDir);
|
||||
|
||||
// Qr Scanner assets
|
||||
copyZXingWasm(staticDir);
|
||||
copyQrScannerWorker(staticDir);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("copy-static-demo", async () => {
|
||||
gulp.task("copy-static-demo", (done) => {
|
||||
// Copy app static files
|
||||
fs.copySync(
|
||||
polyPath("public/static"),
|
||||
path.resolve(paths.demo_output_root, "static")
|
||||
path.resolve(paths.demo_root, "static")
|
||||
);
|
||||
// Copy demo static files
|
||||
fs.copySync(path.resolve(paths.demo_dir, "public"), paths.demo_output_root);
|
||||
copyPolyfills(paths.demo_output_static);
|
||||
copyMapPanel(paths.demo_output_static);
|
||||
copyFonts(paths.demo_output_static);
|
||||
copyTranslations(paths.demo_output_static);
|
||||
copyLocaleData(paths.demo_output_static);
|
||||
copyMdiIcons(paths.demo_output_static);
|
||||
fs.copySync(path.resolve(paths.demo_dir, "public"), paths.demo_root);
|
||||
|
||||
copyPolyfills(paths.demo_static);
|
||||
copyMapPanel(paths.demo_static);
|
||||
copyFonts(paths.demo_static);
|
||||
copyTranslations(paths.demo_static);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("copy-static-cast", async () => {
|
||||
gulp.task("copy-static-cast", (done) => {
|
||||
// Copy app static files
|
||||
fs.copySync(polyPath("public/static"), paths.cast_output_static);
|
||||
fs.copySync(polyPath("public/static"), paths.cast_static);
|
||||
// Copy cast static files
|
||||
fs.copySync(path.resolve(paths.cast_dir, "public"), paths.cast_output_root);
|
||||
copyPolyfills(paths.cast_output_static);
|
||||
copyMapPanel(paths.cast_output_static);
|
||||
copyFonts(paths.cast_output_static);
|
||||
copyTranslations(paths.cast_output_static);
|
||||
copyLocaleData(paths.cast_output_static);
|
||||
copyMdiIcons(paths.cast_output_static);
|
||||
fs.copySync(path.resolve(paths.cast_dir, "public"), paths.cast_root);
|
||||
|
||||
copyMapPanel(paths.cast_static);
|
||||
copyFonts(paths.cast_static);
|
||||
copyTranslations(paths.cast_static);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("copy-static-gallery", async () => {
|
||||
gulp.task("copy-static-gallery", (done) => {
|
||||
// Copy app static files
|
||||
fs.copySync(polyPath("public/static"), paths.gallery_output_static);
|
||||
fs.copySync(polyPath("public/static"), paths.gallery_static);
|
||||
// Copy gallery static files
|
||||
fs.copySync(
|
||||
path.resolve(paths.gallery_dir, "public"),
|
||||
paths.gallery_output_root
|
||||
);
|
||||
fs.copySync(path.resolve(paths.gallery_dir, "public"), paths.gallery_root);
|
||||
|
||||
copyMapPanel(paths.gallery_output_static);
|
||||
copyFonts(paths.gallery_output_static);
|
||||
copyTranslations(paths.gallery_output_static);
|
||||
copyLocaleData(paths.gallery_output_static);
|
||||
copyMdiIcons(paths.gallery_output_static);
|
||||
});
|
||||
|
||||
gulp.task("copy-static-landing-page", async () => {
|
||||
// Copy landing-page static files
|
||||
fs.copySync(
|
||||
path.resolve(paths.landingPage_dir, "public"),
|
||||
paths.landingPage_output_root
|
||||
);
|
||||
|
||||
copyFonts(paths.landingPage_output_static);
|
||||
copyTranslations(paths.landingPage_output_static);
|
||||
copyMapPanel(paths.gallery_static);
|
||||
copyFonts(paths.gallery_static);
|
||||
copyTranslations(paths.gallery_static);
|
||||
done();
|
||||
});
|
||||
|
@ -1,165 +0,0 @@
|
||||
import fs from "fs";
|
||||
import gulp from "gulp";
|
||||
import hash from "object-hash";
|
||||
import path from "path";
|
||||
import paths from "../paths.cjs";
|
||||
|
||||
const ICON_PACKAGE_PATH = path.resolve("node_modules/@mdi/svg/");
|
||||
const META_PATH = path.resolve(ICON_PACKAGE_PATH, "meta.json");
|
||||
const PACKAGE_PATH = path.resolve(ICON_PACKAGE_PATH, "package.json");
|
||||
const ICON_PATH = path.resolve(ICON_PACKAGE_PATH, "svg");
|
||||
const OUTPUT_DIR = path.resolve(paths.build_dir, "mdi");
|
||||
const REMOVED_ICONS_PATH = new URL("../removedIcons.json", import.meta.url);
|
||||
|
||||
const encoding = "utf8";
|
||||
|
||||
const getMeta = () => {
|
||||
const file = fs.readFileSync(META_PATH, { encoding });
|
||||
const meta = JSON.parse(file);
|
||||
return meta.map((icon) => {
|
||||
const svg = fs.readFileSync(`${ICON_PATH}/${icon.name}.svg`, {
|
||||
encoding,
|
||||
});
|
||||
return {
|
||||
path: svg.match(/ d="([^"]+)"/)[1],
|
||||
name: icon.name,
|
||||
tags: icon.tags,
|
||||
aliases: icon.aliases,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
const addRemovedMeta = (meta) => {
|
||||
const file = fs.readFileSync(REMOVED_ICONS_PATH, { encoding });
|
||||
const removed = JSON.parse(file);
|
||||
const removedMeta = removed.map((removeIcon) => ({
|
||||
path: removeIcon.path,
|
||||
name: removeIcon.name,
|
||||
tags: [],
|
||||
aliases: [],
|
||||
}));
|
||||
const combinedMeta = [...meta, ...removedMeta];
|
||||
return combinedMeta.sort((a, b) => a.name.localeCompare(b.name));
|
||||
};
|
||||
|
||||
const homeAutomationTag = "Home Automation";
|
||||
|
||||
const orderMeta = (meta) => {
|
||||
const homeAutomationMeta = meta.filter((icon) =>
|
||||
icon.tags.includes(homeAutomationTag)
|
||||
);
|
||||
const otherMeta = meta.filter(
|
||||
(icon) => !icon.tags.includes(homeAutomationTag)
|
||||
);
|
||||
return [...homeAutomationMeta, ...otherMeta];
|
||||
};
|
||||
|
||||
const splitBySize = (meta) => {
|
||||
const chunks = [];
|
||||
const CHUNK_SIZE = 50000;
|
||||
|
||||
let curSize = 0;
|
||||
let startKey;
|
||||
let icons = [];
|
||||
|
||||
Object.values(meta).forEach((icon) => {
|
||||
if (startKey === undefined) {
|
||||
startKey = icon.name;
|
||||
}
|
||||
curSize += icon.path.length;
|
||||
icons.push(icon);
|
||||
if (curSize > CHUNK_SIZE) {
|
||||
chunks.push({
|
||||
startKey,
|
||||
endKey: icon.name,
|
||||
icons,
|
||||
});
|
||||
curSize = 0;
|
||||
startKey = undefined;
|
||||
icons = [];
|
||||
}
|
||||
});
|
||||
|
||||
chunks.push({
|
||||
startKey,
|
||||
icons,
|
||||
});
|
||||
|
||||
return chunks;
|
||||
};
|
||||
|
||||
const findDifferentiator = (curString, prevString) => {
|
||||
for (let i = 0; i < curString.length; i++) {
|
||||
if (curString[i] !== prevString[i]) {
|
||||
return curString.substring(0, i + 1);
|
||||
}
|
||||
}
|
||||
throw new Error("Cannot find differentiator", curString, prevString);
|
||||
};
|
||||
|
||||
gulp.task("gen-icons-json", (done) => {
|
||||
const meta = getMeta();
|
||||
|
||||
const metaAndRemoved = addRemovedMeta(meta);
|
||||
const split = splitBySize(metaAndRemoved);
|
||||
|
||||
if (!fs.existsSync(OUTPUT_DIR)) {
|
||||
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
|
||||
}
|
||||
const parts = [];
|
||||
|
||||
let lastEnd;
|
||||
split.forEach((chunk) => {
|
||||
let startKey;
|
||||
if (lastEnd === undefined) {
|
||||
chunk.startKey = undefined;
|
||||
startKey = undefined;
|
||||
} else {
|
||||
startKey = findDifferentiator(chunk.startKey, lastEnd);
|
||||
}
|
||||
lastEnd = chunk.endKey;
|
||||
|
||||
const output = {};
|
||||
chunk.icons.forEach((icon) => {
|
||||
output[icon.name] = icon.path;
|
||||
});
|
||||
const filename = hash(output);
|
||||
parts.push({ start: startKey, file: filename });
|
||||
fs.writeFileSync(
|
||||
path.resolve(OUTPUT_DIR, `${filename}.json`),
|
||||
JSON.stringify(output)
|
||||
);
|
||||
});
|
||||
|
||||
const file = fs.readFileSync(PACKAGE_PATH, { encoding });
|
||||
const packageMeta = JSON.parse(file);
|
||||
|
||||
fs.writeFileSync(
|
||||
path.resolve(OUTPUT_DIR, "iconMetadata.json"),
|
||||
JSON.stringify({ version: packageMeta.version, parts })
|
||||
);
|
||||
|
||||
fs.writeFileSync(
|
||||
path.resolve(OUTPUT_DIR, "iconList.json"),
|
||||
JSON.stringify(
|
||||
orderMeta(meta).map((icon) => ({
|
||||
name: icon.name,
|
||||
keywords: [
|
||||
...icon.tags.map((t) => t.toLowerCase().replace(/\s\/\s/g, " ")),
|
||||
...icon.aliases,
|
||||
],
|
||||
}))
|
||||
)
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("gen-dummy-icons-json", (done) => {
|
||||
if (!fs.existsSync(OUTPUT_DIR)) {
|
||||
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(path.resolve(OUTPUT_DIR, "iconList.json"), "[]");
|
||||
done();
|
||||
});
|
127
build-scripts/gulp/gen-icons.js
Normal file
127
build-scripts/gulp/gen-icons.js
Normal file
@ -0,0 +1,127 @@
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const paths = require("../paths");
|
||||
const { mapFiles } = require("../util");
|
||||
|
||||
const ICON_PACKAGE_PATH = path.resolve(
|
||||
__dirname,
|
||||
"../../node_modules/@mdi/svg/"
|
||||
);
|
||||
const META_PATH = path.resolve(ICON_PACKAGE_PATH, "meta.json");
|
||||
const ICON_PATH = path.resolve(ICON_PACKAGE_PATH, "svg");
|
||||
const OUTPUT_DIR = path.resolve(__dirname, "../../build");
|
||||
const MDI_OUTPUT_PATH = path.resolve(OUTPUT_DIR, "mdi.html");
|
||||
const HASS_OUTPUT_PATH = path.resolve(OUTPUT_DIR, "hass-icons.html");
|
||||
|
||||
const BUILT_IN_PANEL_ICONS = [
|
||||
"calendar", // Calendar
|
||||
"settings", // Config
|
||||
"home-assistant", // Hass.io
|
||||
"poll-box", // History panel
|
||||
"format-list-bulleted-type", // Logbook
|
||||
"mailbox", // Mailbox
|
||||
"tooltip-account", // Map
|
||||
"cart", // Shopping List
|
||||
"hammer", // developer-tools
|
||||
];
|
||||
|
||||
// Given an icon name, load the SVG file
|
||||
function loadIcon(name) {
|
||||
const iconPath = path.resolve(ICON_PATH, `${name}.svg`);
|
||||
try {
|
||||
return fs.readFileSync(iconPath, "utf-8");
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Given an SVG file, convert it to an iron-iconset-svg definition
|
||||
function transformXMLtoPolymer(name, xml) {
|
||||
const start = xml.indexOf("><path") + 1;
|
||||
const end = xml.length - start - 6;
|
||||
const pth = xml.substr(start, end);
|
||||
return `<g id="${name}">${pth}</g>`;
|
||||
}
|
||||
|
||||
// Given an iconset name and icon names, generate a polymer iconset
|
||||
function generateIconset(iconsetName, iconNames) {
|
||||
const iconDefs = Array.from(iconNames)
|
||||
.map((name) => {
|
||||
const iconDef = loadIcon(name);
|
||||
if (!iconDef) {
|
||||
throw new Error(`Unknown icon referenced: ${name}`);
|
||||
}
|
||||
return transformXMLtoPolymer(name, iconDef);
|
||||
})
|
||||
.join("");
|
||||
return `<ha-iconset-svg name="${iconsetName}" size="24"><svg><defs>${iconDefs}</defs></svg></ha-iconset-svg>`;
|
||||
}
|
||||
|
||||
// Find all icons used by the project.
|
||||
function findIcons(searchPath, iconsetName) {
|
||||
const iconRegex = new RegExp(`${iconsetName}:[\\w-]+`, "g");
|
||||
const icons = new Set();
|
||||
function processFile(filename) {
|
||||
const content = fs.readFileSync(filename);
|
||||
let match;
|
||||
// eslint-disable-next-line
|
||||
while ((match = iconRegex.exec(content))) {
|
||||
// strip off "hass:" and add to set
|
||||
icons.add(match[0].substr(iconsetName.length + 1));
|
||||
}
|
||||
}
|
||||
mapFiles(searchPath, ".js", processFile);
|
||||
mapFiles(searchPath, ".ts", processFile);
|
||||
return icons;
|
||||
}
|
||||
|
||||
gulp.task("gen-icons-mdi", (done) => {
|
||||
const meta = JSON.parse(
|
||||
fs.readFileSync(path.resolve(ICON_PACKAGE_PATH, META_PATH), "UTF-8")
|
||||
);
|
||||
const iconNames = meta.map((iconInfo) => iconInfo.name);
|
||||
if (!fs.existsSync(OUTPUT_DIR)) {
|
||||
fs.mkdirSync(OUTPUT_DIR);
|
||||
}
|
||||
fs.writeFileSync(MDI_OUTPUT_PATH, generateIconset("mdi", iconNames));
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("gen-icons-app", (done) => {
|
||||
const iconNames = findIcons("./src", "hass");
|
||||
BUILT_IN_PANEL_ICONS.forEach((name) => iconNames.add(name));
|
||||
if (!fs.existsSync(OUTPUT_DIR)) {
|
||||
fs.mkdirSync(OUTPUT_DIR);
|
||||
}
|
||||
fs.writeFileSync(HASS_OUTPUT_PATH, generateIconset("hass", iconNames));
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("gen-icons-demo", (done) => {
|
||||
const iconNames = findIcons(path.resolve(paths.demo_dir, "./src"), "hademo");
|
||||
fs.writeFileSync(
|
||||
path.resolve(paths.demo_dir, "hademo-icons.html"),
|
||||
generateIconset("hademo", iconNames)
|
||||
);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("gen-icons-hassio", (done) => {
|
||||
const iconNames = findIcons(
|
||||
path.resolve(paths.hassio_dir, "./src"),
|
||||
"hassio"
|
||||
);
|
||||
// Find hassio icons inside HA main repo.
|
||||
for (const item of findIcons(
|
||||
path.resolve(paths.polymer_dir, "./src"),
|
||||
"hassio"
|
||||
)) {
|
||||
iconNames.add(item);
|
||||
}
|
||||
fs.writeFileSync(
|
||||
path.resolve(paths.hassio_dir, "hassio-icons.html"),
|
||||
generateIconset("hassio", iconNames)
|
||||
);
|
||||
done();
|
||||
});
|
@ -1,12 +1,11 @@
|
||||
import gulp from "gulp";
|
||||
import env from "../env.cjs";
|
||||
import "./clean.js";
|
||||
import "./compress.js";
|
||||
import "./entry-html.js";
|
||||
import "./gather-static.js";
|
||||
import "./gen-icons-json.js";
|
||||
import "./translations.js";
|
||||
import "./rspack.js";
|
||||
const gulp = require("gulp");
|
||||
|
||||
const envVars = require("../env");
|
||||
|
||||
require("./clean.js");
|
||||
require("./gen-icons.js");
|
||||
require("./webpack.js");
|
||||
require("./compress.js");
|
||||
|
||||
gulp.task(
|
||||
"develop-hassio",
|
||||
@ -15,13 +14,8 @@ gulp.task(
|
||||
process.env.NODE_ENV = "development";
|
||||
},
|
||||
"clean-hassio",
|
||||
"gen-dummy-icons-json",
|
||||
"gen-pages-hassio-dev",
|
||||
"build-supervisor-translations",
|
||||
"copy-translations-supervisor",
|
||||
"build-locale-data",
|
||||
"copy-static-supervisor",
|
||||
"rspack-watch-hassio"
|
||||
gulp.parallel("gen-icons-hassio", "gen-icons-mdi"),
|
||||
"webpack-watch-hassio"
|
||||
)
|
||||
);
|
||||
|
||||
@ -32,14 +26,9 @@ gulp.task(
|
||||
process.env.NODE_ENV = "production";
|
||||
},
|
||||
"clean-hassio",
|
||||
"gen-dummy-icons-json",
|
||||
"build-supervisor-translations",
|
||||
"copy-translations-supervisor",
|
||||
"build-locale-data",
|
||||
"copy-static-supervisor",
|
||||
"rspack-prod-hassio",
|
||||
"gen-pages-hassio-prod",
|
||||
gulp.parallel("gen-icons-hassio", "gen-icons-mdi"),
|
||||
"webpack-prod-hassio",
|
||||
...// Don't compress running tests
|
||||
(env.isTestBuild() ? [] : ["compress-hassio"])
|
||||
(envVars.isTravis() ? [] : ["compress-hassio"])
|
||||
)
|
||||
);
|
||||
|
@ -1,17 +0,0 @@
|
||||
import "./app.js";
|
||||
import "./cast.js";
|
||||
import "./clean.js";
|
||||
import "./compress.js";
|
||||
import "./demo.js";
|
||||
import "./download-translations.js";
|
||||
import "./entry-html.js";
|
||||
import "./fetch-nightly-translations.js";
|
||||
import "./gallery.js";
|
||||
import "./gather-static.js";
|
||||
import "./gen-icons-json.js";
|
||||
import "./hassio.js";
|
||||
import "./landing-page.js";
|
||||
import "./locale-data.js";
|
||||
import "./rspack.js";
|
||||
import "./service-worker.js";
|
||||
import "./translations.js";
|
@ -1,41 +0,0 @@
|
||||
import gulp from "gulp";
|
||||
import "./clean.js";
|
||||
import "./compress.js";
|
||||
import "./entry-html.js";
|
||||
import "./gather-static.js";
|
||||
import "./gen-icons-json.js";
|
||||
import "./translations.js";
|
||||
import "./rspack.js";
|
||||
|
||||
gulp.task(
|
||||
"develop-landing-page",
|
||||
gulp.series(
|
||||
async function setEnv() {
|
||||
process.env.NODE_ENV = "development";
|
||||
},
|
||||
"clean-landing-page",
|
||||
"translations-enable-merge-backend",
|
||||
"build-landing-page-translations",
|
||||
"copy-translations-landing-page",
|
||||
"build-locale-data",
|
||||
"copy-static-landing-page",
|
||||
"gen-pages-landing-page-dev",
|
||||
"rspack-watch-landing-page"
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"build-landing-page",
|
||||
gulp.series(
|
||||
async function setEnv() {
|
||||
process.env.NODE_ENV = "production";
|
||||
},
|
||||
"clean-landing-page",
|
||||
"build-landing-page-translations",
|
||||
"copy-translations-landing-page",
|
||||
"build-locale-data",
|
||||
"copy-static-landing-page",
|
||||
"rspack-prod-landing-page",
|
||||
"gen-pages-landing-page-prod"
|
||||
)
|
||||
);
|
@ -1,89 +0,0 @@
|
||||
import { deleteSync } from "del";
|
||||
import { mkdir, readFile, writeFile } from "fs/promises";
|
||||
import gulp from "gulp";
|
||||
import { join, resolve } from "node:path";
|
||||
import paths from "../paths.cjs";
|
||||
|
||||
const formatjsDir = join(paths.root_dir, "node_modules", "@formatjs");
|
||||
const outDir = join(paths.build_dir, "locale-data");
|
||||
|
||||
const INTL_POLYFILLS = {
|
||||
"intl-datetimeformat": "DateTimeFormat",
|
||||
"intl-displaynames": "DisplayNames",
|
||||
"intl-listformat": "ListFormat",
|
||||
"intl-numberformat": "NumberFormat",
|
||||
"intl-relativetimeformat": "RelativeTimeFormat",
|
||||
};
|
||||
|
||||
const convertToJSON = async (
|
||||
pkg,
|
||||
lang,
|
||||
subDir = "locale-data",
|
||||
addFunc = "__addLocaleData",
|
||||
skipMissing = true
|
||||
) => {
|
||||
let localeData;
|
||||
try {
|
||||
// use "pt" for "pt-BR", because "pt-BR" is unsupported by @formatjs
|
||||
const language = lang === "pt-BR" ? "pt" : lang;
|
||||
|
||||
localeData = await readFile(
|
||||
join(formatjsDir, pkg, subDir, `${language}.js`),
|
||||
"utf-8"
|
||||
);
|
||||
} catch (e) {
|
||||
// Ignore if language is missing (i.e. not supported by @formatjs)
|
||||
if (e.code === "ENOENT" && skipMissing) {
|
||||
console.warn(`Skipped missing data for language ${lang} from ${pkg}`);
|
||||
return;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
// Convert to JSON
|
||||
const obj = INTL_POLYFILLS[pkg];
|
||||
const dataRegex = new RegExp(
|
||||
`Intl\\.${obj}\\.${addFunc}\\((?<data>.*)\\)`,
|
||||
"s"
|
||||
);
|
||||
localeData = localeData.match(dataRegex)?.groups?.data;
|
||||
if (!localeData) {
|
||||
throw Error(`Failed to extract data for language ${lang} from ${pkg}`);
|
||||
}
|
||||
// Parse to validate JSON, then stringify to minify
|
||||
localeData = JSON.stringify(JSON.parse(localeData));
|
||||
await writeFile(join(outDir, `${pkg}/${lang}.json`), localeData);
|
||||
};
|
||||
|
||||
gulp.task("clean-locale-data", async () => deleteSync([outDir]));
|
||||
|
||||
gulp.task("create-locale-data", async () => {
|
||||
const translationMeta = JSON.parse(
|
||||
await readFile(
|
||||
resolve(paths.translations_src, "translationMetadata.json"),
|
||||
"utf-8"
|
||||
)
|
||||
);
|
||||
const conversions = [];
|
||||
for (const pkg of Object.keys(INTL_POLYFILLS)) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await mkdir(join(outDir, pkg), { recursive: true });
|
||||
for (const lang of Object.keys(translationMeta)) {
|
||||
conversions.push(convertToJSON(pkg, lang));
|
||||
}
|
||||
}
|
||||
conversions.push(
|
||||
convertToJSON(
|
||||
"intl-datetimeformat",
|
||||
"add-all-tz",
|
||||
".",
|
||||
"__addTZData",
|
||||
false
|
||||
)
|
||||
);
|
||||
await Promise.all(conversions);
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"build-locale-data",
|
||||
gulp.series("clean-locale-data", "create-locale-data")
|
||||
);
|
@ -1,232 +0,0 @@
|
||||
// Tasks to run rspack.
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import log from "fancy-log";
|
||||
import gulp from "gulp";
|
||||
import rspack from "@rspack/core";
|
||||
import { RspackDevServer } from "@rspack/dev-server";
|
||||
import env from "../env.cjs";
|
||||
import paths from "../paths.cjs";
|
||||
import {
|
||||
createAppConfig,
|
||||
createCastConfig,
|
||||
createDemoConfig,
|
||||
createGalleryConfig,
|
||||
createHassioConfig,
|
||||
createLandingPageConfig,
|
||||
} from "../rspack.cjs";
|
||||
|
||||
const bothBuilds = (createConfigFunc, params) => [
|
||||
createConfigFunc({ ...params, latestBuild: true }),
|
||||
createConfigFunc({ ...params, latestBuild: false }),
|
||||
];
|
||||
|
||||
const isWsl =
|
||||
fs.existsSync("/proc/version") &&
|
||||
fs
|
||||
.readFileSync("/proc/version", "utf-8")
|
||||
.toLocaleLowerCase()
|
||||
.includes("microsoft");
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* compiler: import("@rspack/core").Compiler,
|
||||
* contentBase: string,
|
||||
* port: number,
|
||||
* listenHost?: string
|
||||
* }}
|
||||
*/
|
||||
const runDevServer = async ({
|
||||
compiler,
|
||||
contentBase,
|
||||
port,
|
||||
listenHost = undefined,
|
||||
proxy = undefined,
|
||||
}) => {
|
||||
if (listenHost === undefined) {
|
||||
// For dev container, we need to listen on all hosts
|
||||
listenHost = env.isDevContainer() ? "0.0.0.0" : "localhost";
|
||||
}
|
||||
const server = new RspackDevServer(
|
||||
{
|
||||
hot: false,
|
||||
open: true,
|
||||
host: listenHost,
|
||||
port,
|
||||
static: {
|
||||
directory: contentBase,
|
||||
watch: true,
|
||||
},
|
||||
proxy,
|
||||
},
|
||||
compiler
|
||||
);
|
||||
|
||||
await server.start();
|
||||
// Server listening
|
||||
log("[rspack-dev-server]", `Project is running at http://localhost:${port}`);
|
||||
};
|
||||
|
||||
const doneHandler = (done) => (err, stats) => {
|
||||
if (err) {
|
||||
log.error(err.stack || err);
|
||||
if (err.details) {
|
||||
log.error(err.details);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (stats.hasErrors() || stats.hasWarnings()) {
|
||||
console.log(stats.toString("minimal"));
|
||||
}
|
||||
|
||||
log(`Build done @ ${new Date().toLocaleTimeString()}`);
|
||||
|
||||
if (done) {
|
||||
done();
|
||||
}
|
||||
};
|
||||
|
||||
const prodBuild = (conf) =>
|
||||
new Promise((resolve) => {
|
||||
rspack(
|
||||
conf,
|
||||
// Resolve promise when done. Because we pass a callback, rspack closes itself
|
||||
doneHandler(resolve)
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task("rspack-watch-app", () => {
|
||||
// This command will run forever because we don't close compiler
|
||||
rspack(
|
||||
process.env.ES5
|
||||
? bothBuilds(createAppConfig, { isProdBuild: false })
|
||||
: createAppConfig({ isProdBuild: false, latestBuild: true })
|
||||
).watch({ poll: isWsl }, doneHandler());
|
||||
gulp.watch(
|
||||
path.join(paths.translations_src, "en.json"),
|
||||
gulp.series("build-translations", "copy-translations-app")
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task("rspack-prod-app", () =>
|
||||
prodBuild(
|
||||
bothBuilds(createAppConfig, {
|
||||
isProdBuild: true,
|
||||
isStatsBuild: env.isStatsBuild(),
|
||||
isTestBuild: env.isTestBuild(),
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("rspack-dev-server-demo", () =>
|
||||
runDevServer({
|
||||
compiler: rspack(
|
||||
createDemoConfig({ isProdBuild: false, latestBuild: true })
|
||||
),
|
||||
contentBase: paths.demo_output_root,
|
||||
port: 8090,
|
||||
})
|
||||
);
|
||||
|
||||
gulp.task("rspack-prod-demo", () =>
|
||||
prodBuild(
|
||||
bothBuilds(createDemoConfig, {
|
||||
isProdBuild: true,
|
||||
isStatsBuild: env.isStatsBuild(),
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("rspack-dev-server-cast", () =>
|
||||
runDevServer({
|
||||
compiler: rspack(
|
||||
createCastConfig({ isProdBuild: false, latestBuild: true })
|
||||
),
|
||||
contentBase: paths.cast_output_root,
|
||||
port: 8080,
|
||||
// Accessible from the network, because that's how Cast hits it.
|
||||
listenHost: "0.0.0.0",
|
||||
})
|
||||
);
|
||||
|
||||
gulp.task("rspack-prod-cast", () =>
|
||||
prodBuild(
|
||||
bothBuilds(createCastConfig, {
|
||||
isProdBuild: true,
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("rspack-watch-hassio", () => {
|
||||
// This command will run forever because we don't close compiler
|
||||
rspack(
|
||||
createHassioConfig({
|
||||
isProdBuild: false,
|
||||
latestBuild: true,
|
||||
})
|
||||
).watch({ ignored: /build/, poll: isWsl }, doneHandler());
|
||||
|
||||
gulp.watch(
|
||||
path.join(paths.translations_src, "en.json"),
|
||||
gulp.series("build-supervisor-translations", "copy-translations-supervisor")
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task("rspack-prod-hassio", () =>
|
||||
prodBuild(
|
||||
bothBuilds(createHassioConfig, {
|
||||
isProdBuild: true,
|
||||
isStatsBuild: env.isStatsBuild(),
|
||||
isTestBuild: env.isTestBuild(),
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("rspack-dev-server-gallery", () =>
|
||||
runDevServer({
|
||||
compiler: rspack(
|
||||
createGalleryConfig({ isProdBuild: false, latestBuild: true })
|
||||
),
|
||||
contentBase: paths.gallery_output_root,
|
||||
port: 8100,
|
||||
listenHost: "0.0.0.0",
|
||||
})
|
||||
);
|
||||
|
||||
gulp.task("rspack-prod-gallery", () =>
|
||||
prodBuild(
|
||||
createGalleryConfig({
|
||||
isProdBuild: true,
|
||||
latestBuild: true,
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("rspack-watch-landing-page", () => {
|
||||
// This command will run forever because we don't close compiler
|
||||
rspack(
|
||||
process.env.ES5
|
||||
? bothBuilds(createLandingPageConfig, { isProdBuild: false })
|
||||
: createLandingPageConfig({ isProdBuild: false, latestBuild: true })
|
||||
).watch({ poll: isWsl }, doneHandler());
|
||||
|
||||
gulp.watch(
|
||||
path.join(paths.translations_src, "en.json"),
|
||||
gulp.series(
|
||||
"build-landing-page-translations",
|
||||
"copy-translations-landing-page"
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task("rspack-prod-landing-page", () =>
|
||||
prodBuild(
|
||||
bothBuilds(createLandingPageConfig, {
|
||||
isProdBuild: true,
|
||||
isStatsBuild: env.isStatsBuild(),
|
||||
isTestBuild: env.isTestBuild(),
|
||||
})
|
||||
)
|
||||
);
|
@ -1,87 +1,33 @@
|
||||
// Generate service workers
|
||||
// Generate service worker.
|
||||
// Based on manifest, create a file with the content as service_worker.js
|
||||
/* eslint-disable import/no-dynamic-require */
|
||||
/* eslint-disable global-require */
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
const config = require("../paths.js");
|
||||
|
||||
import { deleteAsync } from "del";
|
||||
import gulp from "gulp";
|
||||
import { mkdir, readFile, symlink, writeFile } from "node:fs/promises";
|
||||
import { basename, join, relative } from "node:path";
|
||||
import { injectManifest } from "workbox-build";
|
||||
import paths from "../paths.cjs";
|
||||
const swPath = path.resolve(config.root, "service_worker.js");
|
||||
|
||||
const SW_MAP = {
|
||||
[paths.app_output_latest]: "modern",
|
||||
[paths.app_output_es5]: "legacy",
|
||||
};
|
||||
const writeSW = (content) => fs.outputFileSync(swPath, content.trim() + "\n");
|
||||
|
||||
const SW_DEV =
|
||||
`
|
||||
gulp.task("gen-service-worker-dev", (done) => {
|
||||
writeSW(
|
||||
`
|
||||
console.debug('Service worker disabled in development');
|
||||
|
||||
self.addEventListener('install', (event) => {
|
||||
// This will activate the dev service worker,
|
||||
// removing any prod service worker the dev might have running
|
||||
self.skipWaiting();
|
||||
});
|
||||
`.trim() + "\n";
|
||||
|
||||
gulp.task("gen-service-worker-app-dev", async () => {
|
||||
await mkdir(paths.app_output_root, { recursive: true });
|
||||
await Promise.all(
|
||||
Object.values(SW_MAP).map((build) =>
|
||||
writeFile(join(paths.app_output_root, `sw-${build}.js`), SW_DEV, {
|
||||
encoding: "utf-8",
|
||||
})
|
||||
)
|
||||
`
|
||||
);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task("gen-service-worker-app-prod", () =>
|
||||
Promise.all(
|
||||
Object.entries(SW_MAP).map(async ([outPath, build]) => {
|
||||
const manifest = JSON.parse(
|
||||
await readFile(join(outPath, "manifest.json"), "utf-8")
|
||||
);
|
||||
const swSrc = join(paths.app_output_root, manifest["service-worker.js"]);
|
||||
const swDest = join(paths.app_output_root, `sw-${build}.js`);
|
||||
const buildDir = relative(paths.app_output_root, outPath);
|
||||
const { warnings } = await injectManifest({
|
||||
swSrc,
|
||||
swDest,
|
||||
injectionPoint: "__WB_MANIFEST__",
|
||||
// Files that mach this pattern will be considered unique and skip revision check
|
||||
// ignore JS files + translation files
|
||||
dontCacheBustURLsMatching: new RegExp(
|
||||
`(?:${buildDir}/.+|static/translations/.+)`
|
||||
),
|
||||
globDirectory: paths.app_output_root,
|
||||
globPatterns: [
|
||||
`${buildDir}/*.js`,
|
||||
// Cache all English translations because we catch them as fallback
|
||||
// Using pattern to match hash instead of * to avoid caching en-GB
|
||||
// 'v' added as valid hash letter because in dev we hash with 'dev'
|
||||
"static/translations/**/en-+([a-fv0-9]).json",
|
||||
// Icon shown on splash screen
|
||||
"static/icons/favicon-192x192.png",
|
||||
"static/icons/favicon.ico",
|
||||
// Common fonts
|
||||
"static/fonts/roboto/Roboto-Light.woff2",
|
||||
"static/fonts/roboto/Roboto-Medium.woff2",
|
||||
"static/fonts/roboto/Roboto-Regular.woff2",
|
||||
"static/fonts/roboto/Roboto-Bold.woff2",
|
||||
],
|
||||
globIgnores: [`${buildDir}/service-worker*`],
|
||||
});
|
||||
if (warnings.length > 0) {
|
||||
console.warn(
|
||||
`Problems while injecting ${build} service worker:\n`,
|
||||
warnings.join("\n")
|
||||
);
|
||||
}
|
||||
await deleteAsync(`${swSrc}?(.map)`);
|
||||
// Needed to install new SW from a cached HTML
|
||||
if (build === "modern") {
|
||||
const swOld = join(paths.app_output_root, "service_worker.js");
|
||||
await symlink(basename(swDest), swOld);
|
||||
}
|
||||
})
|
||||
)
|
||||
);
|
||||
gulp.task("gen-service-worker-prod", (done) => {
|
||||
fs.copySync(
|
||||
path.resolve(config.output, "service_worker.js"),
|
||||
path.resolve(config.root, "service_worker.js")
|
||||
);
|
||||
done();
|
||||
});
|
||||
|
@ -1,103 +1,94 @@
|
||||
/* eslint-disable max-classes-per-file */
|
||||
const crypto = require("crypto");
|
||||
const del = require("del");
|
||||
const path = require("path");
|
||||
const source = require("vinyl-source-stream");
|
||||
const vinylBuffer = require("vinyl-buffer");
|
||||
const gulp = require("gulp");
|
||||
const fs = require("fs");
|
||||
const foreach = require("gulp-foreach");
|
||||
const merge = require("gulp-merge-json");
|
||||
const minify = require("gulp-jsonminify");
|
||||
const rename = require("gulp-rename");
|
||||
const transform = require("gulp-json-transform");
|
||||
const { mapFiles } = require("../util");
|
||||
const env = require("../env");
|
||||
const paths = require("../paths");
|
||||
|
||||
import { deleteAsync } from "del";
|
||||
import { glob } from "glob";
|
||||
import gulp from "gulp";
|
||||
import rename from "gulp-rename";
|
||||
import merge from "lodash.merge";
|
||||
import { createHash } from "node:crypto";
|
||||
import { mkdir, readFile } from "node:fs/promises";
|
||||
import { basename, join } from "node:path";
|
||||
import { PassThrough, Transform } from "node:stream";
|
||||
import { finished } from "node:stream/promises";
|
||||
import env from "../env.cjs";
|
||||
import paths from "../paths.cjs";
|
||||
import "./fetch-nightly-translations.js";
|
||||
const inDir = "translations";
|
||||
const workDir = "build-translations";
|
||||
const fullDir = workDir + "/full";
|
||||
const coreDir = workDir + "/core";
|
||||
const outDir = workDir + "/output";
|
||||
|
||||
const inFrontendDir = "translations/frontend";
|
||||
const inBackendDir = "translations/backend";
|
||||
const workDir = "build/translations";
|
||||
const outDir = join(workDir, "output");
|
||||
const EN_SRC = join(paths.translations_src, "en.json");
|
||||
const TEST_LOCALE = "en-x-test";
|
||||
|
||||
let mergeBackend = false;
|
||||
|
||||
gulp.task(
|
||||
"translations-enable-merge-backend",
|
||||
gulp.parallel(async () => {
|
||||
mergeBackend = true;
|
||||
}, "allow-setup-fetch-nightly-translations")
|
||||
);
|
||||
|
||||
// Transform stream to apply a function on Vinyl JSON files (buffer mode only).
|
||||
// The provided function can either return a new object, or an array of
|
||||
// [object, subdirectory] pairs for fragmentizing the JSON.
|
||||
class CustomJSON extends Transform {
|
||||
constructor(func, reviver = null) {
|
||||
super({ objectMode: true });
|
||||
this._func = func;
|
||||
this._reviver = reviver;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
async _transform(file, _, callback) {
|
||||
let obj = JSON.parse(file.contents.toString(), this._reviver);
|
||||
if (this._func) obj = this._func(obj, file.path);
|
||||
for (const [outObj, dir] of Array.isArray(obj) ? obj : [[obj, ""]]) {
|
||||
const outFile = file.clone({ contents: false });
|
||||
outFile.contents = Buffer.from(JSON.stringify(outObj));
|
||||
outFile.dirname += `/${dir}`;
|
||||
this.push(outFile);
|
||||
}
|
||||
callback(null);
|
||||
}
|
||||
}
|
||||
|
||||
// Transform stream to merge Vinyl JSON files (buffer mode only).
|
||||
class MergeJSON extends Transform {
|
||||
_objects = [];
|
||||
|
||||
constructor(stem, startObj = {}, reviver = null) {
|
||||
super({ objectMode: true, allowHalfOpen: false });
|
||||
this._stem = stem;
|
||||
this._startObj = structuredClone(startObj);
|
||||
this._reviver = reviver;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
async _transform(file, _, callback) {
|
||||
this._objects.push(JSON.parse(file.contents.toString(), this._reviver));
|
||||
if (!this._outFile) this._outFile = file.clone({ contents: false });
|
||||
callback(null);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
async _flush(callback) {
|
||||
const mergedObj = merge(this._startObj, ...this._objects);
|
||||
this._outFile.contents = Buffer.from(JSON.stringify(mergedObj));
|
||||
this._outFile.stem = this._stem;
|
||||
callback(null, this._outFile);
|
||||
}
|
||||
}
|
||||
|
||||
// Utility to flatten object keys to single level using separator
|
||||
const flatten = (data, prefix = "", sep = ".") => {
|
||||
const output = {};
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (typeof value === "object") {
|
||||
Object.assign(output, flatten(value, prefix + key + sep, sep));
|
||||
} else {
|
||||
output[prefix + key] = value;
|
||||
}
|
||||
}
|
||||
return output;
|
||||
String.prototype.rsplit = function(sep, maxsplit) {
|
||||
var split = this.split(sep);
|
||||
return maxsplit
|
||||
? [split.slice(0, -maxsplit).join(sep)].concat(split.slice(-maxsplit))
|
||||
: split;
|
||||
};
|
||||
|
||||
// Filter functions that can be passed directly to JSON.parse()
|
||||
const emptyReviver = (_key, value) => value || undefined;
|
||||
const testReviver = (_key, value) =>
|
||||
value && typeof value === "string" ? "TRANSLATED" : value;
|
||||
// Panel translations which should be split from the core translations. These
|
||||
// should mirror the fragment definitions in polymer.json, so that we load
|
||||
// additional resources at equivalent points.
|
||||
const TRANSLATION_FRAGMENTS = [
|
||||
"config",
|
||||
"history",
|
||||
"logbook",
|
||||
"mailbox",
|
||||
"profile",
|
||||
"shopping-list",
|
||||
"page-authorize",
|
||||
"page-demo",
|
||||
"page-onboarding",
|
||||
"developer-tools",
|
||||
];
|
||||
|
||||
function recursiveFlatten(prefix, data) {
|
||||
let output = {};
|
||||
Object.keys(data).forEach(function(key) {
|
||||
if (typeof data[key] === "object") {
|
||||
output = {
|
||||
...output,
|
||||
...recursiveFlatten(prefix + key + ".", data[key]),
|
||||
};
|
||||
} else {
|
||||
output[prefix + key] = data[key];
|
||||
}
|
||||
});
|
||||
return output;
|
||||
}
|
||||
|
||||
function flatten(data) {
|
||||
return recursiveFlatten("", data);
|
||||
}
|
||||
|
||||
function emptyFilter(data) {
|
||||
const newData = {};
|
||||
Object.keys(data).forEach((key) => {
|
||||
if (data[key]) {
|
||||
if (typeof data[key] === "object") {
|
||||
newData[key] = emptyFilter(data[key]);
|
||||
} else {
|
||||
newData[key] = data[key];
|
||||
}
|
||||
}
|
||||
});
|
||||
return newData;
|
||||
}
|
||||
|
||||
function recursiveEmpty(data) {
|
||||
const newData = {};
|
||||
Object.keys(data).forEach((key) => {
|
||||
if (data[key]) {
|
||||
if (typeof data[key] === "object") {
|
||||
newData[key] = recursiveEmpty(data[key]);
|
||||
} else {
|
||||
newData[key] = "TRANSLATED";
|
||||
}
|
||||
}
|
||||
});
|
||||
return newData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace Lokalise key placeholders with their actual values.
|
||||
@ -106,44 +97,64 @@ const testReviver = (_key, value) =>
|
||||
* be included in src/translations/en.json, but still be usable while
|
||||
* developing locally.
|
||||
*
|
||||
* @link https://docs.lokalise.com/en/articles/1400528-key-referencing
|
||||
* @link https://docs.lokalise.co/article/KO5SZWLLsy-key-referencing
|
||||
*/
|
||||
const KEY_REFERENCE = /\[%key:([^%]+)%\]/;
|
||||
const lokaliseTransform = (data, path, original = data) => {
|
||||
const re_key_reference = /\[%key:([^%]+)%\]/;
|
||||
function lokaliseTransform(data, original, file) {
|
||||
const output = {};
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (typeof value === "object") {
|
||||
output[key] = lokaliseTransform(value, path, original);
|
||||
Object.entries(data).forEach(([key, value]) => {
|
||||
if (value instanceof Object) {
|
||||
output[key] = lokaliseTransform(value, original, file);
|
||||
} else {
|
||||
output[key] = value.replace(KEY_REFERENCE, (_match, lokalise_key) => {
|
||||
const replace = lokalise_key.split("::").reduce((tr, k) => {
|
||||
if (!tr) {
|
||||
throw Error(`Invalid key placeholder ${lokalise_key} in ${path}`);
|
||||
}
|
||||
return tr[k];
|
||||
}, original);
|
||||
output[key] = value.replace(re_key_reference, (match, key) => {
|
||||
const replace = key.split("::").reduce((tr, k) => tr[k], original);
|
||||
if (typeof replace !== "string") {
|
||||
throw Error(`Invalid key placeholder ${lokalise_key} in ${path}`);
|
||||
throw Error(`Invalid key placeholder ${key} in ${file.path}`);
|
||||
}
|
||||
return replace;
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
return output;
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task("clean-translations", () => deleteAsync([workDir]));
|
||||
gulp.task("clean-translations", function() {
|
||||
return del([workDir]);
|
||||
});
|
||||
|
||||
const makeWorkDir = () => mkdir(workDir, { recursive: true });
|
||||
gulp.task("ensure-translations-build-dir", (done) => {
|
||||
if (!fs.existsSync(workDir)) {
|
||||
fs.mkdirSync(workDir);
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
const createTestTranslation = () =>
|
||||
env.isProdBuild()
|
||||
? Promise.resolve()
|
||||
: gulp
|
||||
.src(EN_SRC)
|
||||
.pipe(new CustomJSON(null, testReviver))
|
||||
.pipe(rename(`${TEST_LOCALE}.json`))
|
||||
.pipe(gulp.dest(workDir));
|
||||
gulp.task("create-test-metadata", function(cb) {
|
||||
fs.writeFile(
|
||||
workDir + "/testMetadata.json",
|
||||
JSON.stringify({
|
||||
test: {
|
||||
nativeName: "Test",
|
||||
},
|
||||
}),
|
||||
cb
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"create-test-translation",
|
||||
gulp.series("create-test-metadata", function createTestTranslation() {
|
||||
return gulp
|
||||
.src(path.join(paths.translations_src, "en.json"))
|
||||
.pipe(
|
||||
transform(function(data, file) {
|
||||
return recursiveEmpty(data);
|
||||
})
|
||||
)
|
||||
.pipe(rename("test.json"))
|
||||
.pipe(gulp.dest(workDir));
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* This task will build a master translation file, to be used as the base for
|
||||
@ -154,174 +165,216 @@ const createTestTranslation = () =>
|
||||
* project is buildable immediately after merging new translation keys, since
|
||||
* the Lokalise update to translations/en.json will not happen immediately.
|
||||
*/
|
||||
const createMasterTranslation = () =>
|
||||
gulp
|
||||
.src([EN_SRC, ...(mergeBackend ? [`${inBackendDir}/en.json`] : [])])
|
||||
.pipe(new CustomJSON(lokaliseTransform))
|
||||
.pipe(new MergeJSON("en"))
|
||||
.pipe(gulp.dest(workDir));
|
||||
|
||||
const FRAGMENTS = ["base"];
|
||||
|
||||
const setFragment = (fragment) => async () => {
|
||||
FRAGMENTS[0] = fragment;
|
||||
};
|
||||
|
||||
const panelFragment = (fragment) =>
|
||||
fragment !== "base" &&
|
||||
fragment !== "supervisor" &&
|
||||
fragment !== "landing-page";
|
||||
|
||||
const HASHES = new Map();
|
||||
|
||||
const createTranslations = async () => {
|
||||
// Parse and store the master to avoid repeating this for each locale, then
|
||||
// add the panel fragments when processing the app.
|
||||
const enMaster = JSON.parse(await readFile(`${workDir}/en.json`, "utf-8"));
|
||||
if (FRAGMENTS[0] === "base") {
|
||||
FRAGMENTS.push(...Object.keys(enMaster.ui.panel));
|
||||
}
|
||||
|
||||
// The downstream pipeline is setup first. It hashes the merged data for
|
||||
// each locale, then fragmentizes and flattens the data for final output.
|
||||
const translationFiles = await glob([
|
||||
`${inFrontendDir}/!(en).json`,
|
||||
...(env.isProdBuild() ? [] : [`${workDir}/${TEST_LOCALE}.json`]),
|
||||
]);
|
||||
const hashStream = new Transform({
|
||||
objectMode: true,
|
||||
transform: async (file, _, callback) => {
|
||||
const hash = env.isProdBuild()
|
||||
? createHash("md5").update(file.contents).digest("hex")
|
||||
: "dev";
|
||||
HASHES.set(file.stem, hash);
|
||||
file.stem += `-${hash}`;
|
||||
callback(null, file);
|
||||
},
|
||||
}).setMaxListeners(translationFiles.length + 1);
|
||||
const fragmentsStream = hashStream
|
||||
gulp.task("build-master-translation", function() {
|
||||
return gulp
|
||||
.src(path.join(paths.translations_src, "en.json"))
|
||||
.pipe(
|
||||
new CustomJSON((data) =>
|
||||
FRAGMENTS.map((fragment) => {
|
||||
switch (fragment) {
|
||||
case "base":
|
||||
// Remove the panels and supervisor to create the base translations
|
||||
return [
|
||||
flatten({
|
||||
...data,
|
||||
ui: { ...data.ui, panel: undefined },
|
||||
supervisor: undefined,
|
||||
}),
|
||||
"",
|
||||
];
|
||||
case "supervisor":
|
||||
// Supervisor key is at the top level
|
||||
return [flatten(data.supervisor), ""];
|
||||
case "landing-page":
|
||||
// landing-page key is at the top level
|
||||
return [flatten(data["landing-page"]), ""];
|
||||
default:
|
||||
// Create a fragment with only the given panel
|
||||
return [
|
||||
flatten(data.ui.panel[fragment], `ui.panel.${fragment}.`),
|
||||
fragment,
|
||||
];
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
.pipe(gulp.dest(outDir));
|
||||
|
||||
// Send the English master downstream first, then for each other locale
|
||||
// generate merged JSON data to continue piping. It begins with the master
|
||||
// translation as a failsafe for untranslated strings, and merges all parent
|
||||
// tags into one file for each specific subtag
|
||||
//
|
||||
// TODO: This is a naive interpretation of BCP47 that should be improved.
|
||||
// Will be OK for now as long as we don't have anything more complicated
|
||||
// than a base translation + region.
|
||||
const masterStream = gulp
|
||||
.src(`${workDir}/en.json`)
|
||||
.pipe(new PassThrough({ objectMode: true }));
|
||||
masterStream.pipe(hashStream, { end: false });
|
||||
const mergesFinished = [finished(masterStream)];
|
||||
for (const translationFile of translationFiles) {
|
||||
const locale = basename(translationFile, ".json");
|
||||
const subtags = locale.split("-");
|
||||
const mergeFiles = [];
|
||||
for (let i = 1; i <= subtags.length; i++) {
|
||||
const lang = subtags.slice(0, i).join("-");
|
||||
if (lang === TEST_LOCALE) {
|
||||
mergeFiles.push(`${workDir}/${TEST_LOCALE}.json`);
|
||||
} else if (lang !== "en") {
|
||||
mergeFiles.push(`${inFrontendDir}/${lang}.json`);
|
||||
if (mergeBackend) {
|
||||
mergeFiles.push(`${inBackendDir}/${lang}.json`);
|
||||
}
|
||||
}
|
||||
}
|
||||
const mergeStream = gulp
|
||||
.src(mergeFiles, { allowEmpty: true })
|
||||
.pipe(new MergeJSON(locale, enMaster, emptyReviver));
|
||||
mergesFinished.push(finished(mergeStream));
|
||||
mergeStream.pipe(hashStream, { end: false });
|
||||
}
|
||||
|
||||
// Wait for all merges to finish, then it's safe to end writing to the
|
||||
// downstream pipeline and wait for all fragments to finish writing.
|
||||
await Promise.all(mergesFinished);
|
||||
hashStream.end();
|
||||
await finished(fragmentsStream);
|
||||
};
|
||||
|
||||
const writeTranslationMetaData = () =>
|
||||
gulp
|
||||
.src([`${paths.translations_src}/translationMetadata.json`])
|
||||
.pipe(
|
||||
new CustomJSON((meta) => {
|
||||
// Add the test translation in development.
|
||||
if (!env.isProdBuild()) {
|
||||
meta[TEST_LOCALE] = { nativeName: "Translation Test" };
|
||||
}
|
||||
// Filter out locales without a native name, and add the hashes.
|
||||
for (const locale of Object.keys(meta)) {
|
||||
if (!meta[locale].nativeName) {
|
||||
meta[locale] = undefined;
|
||||
console.warn(
|
||||
`Skipping locale ${locale} because native name is not translated.`
|
||||
);
|
||||
} else {
|
||||
meta[locale].hash = HASHES.get(locale);
|
||||
}
|
||||
}
|
||||
return {
|
||||
fragments: FRAGMENTS.filter(panelFragment),
|
||||
translations: meta,
|
||||
};
|
||||
transform(function(data, file) {
|
||||
return lokaliseTransform(data, data, file);
|
||||
})
|
||||
)
|
||||
.pipe(rename("translationMaster.json"))
|
||||
.pipe(gulp.dest(workDir));
|
||||
});
|
||||
|
||||
gulp.task("build-merged-translations", function() {
|
||||
return gulp
|
||||
.src([inDir + "/*.json", workDir + "/test.json"], { allowEmpty: true })
|
||||
.pipe(
|
||||
transform(function(data, file) {
|
||||
return lokaliseTransform(data, data, file);
|
||||
})
|
||||
)
|
||||
.pipe(
|
||||
foreach(function(stream, file) {
|
||||
// For each language generate a merged json file. It begins with the master
|
||||
// translation as a failsafe for untranslated strings, and merges all parent
|
||||
// tags into one file for each specific subtag
|
||||
//
|
||||
// TODO: This is a naive interpretation of BCP47 that should be improved.
|
||||
// Will be OK for now as long as we don't have anything more complicated
|
||||
// than a base translation + region.
|
||||
const tr = path.basename(file.history[0], ".json");
|
||||
const subtags = tr.split("-");
|
||||
const src = [workDir + "/translationMaster.json"];
|
||||
for (let i = 1; i <= subtags.length; i++) {
|
||||
const lang = subtags.slice(0, i).join("-");
|
||||
if (lang === "test") {
|
||||
src.push(workDir + "/test.json");
|
||||
} else if (lang !== "en") {
|
||||
src.push(inDir + "/" + lang + ".json");
|
||||
}
|
||||
}
|
||||
return gulp
|
||||
.src(src, { allowEmpty: true })
|
||||
.pipe(transform((data) => emptyFilter(data)))
|
||||
.pipe(
|
||||
merge({
|
||||
fileName: tr + ".json",
|
||||
})
|
||||
)
|
||||
.pipe(gulp.dest(fullDir));
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
var taskName;
|
||||
|
||||
const splitTasks = [];
|
||||
TRANSLATION_FRAGMENTS.forEach((fragment) => {
|
||||
taskName = "build-translation-fragment-" + fragment;
|
||||
gulp.task(taskName, function() {
|
||||
// Return only the translations for this fragment.
|
||||
return gulp
|
||||
.src(fullDir + "/*.json")
|
||||
.pipe(
|
||||
transform((data) => ({
|
||||
ui: {
|
||||
panel: {
|
||||
[fragment]: data.ui.panel[fragment],
|
||||
},
|
||||
},
|
||||
}))
|
||||
)
|
||||
.pipe(gulp.dest(workDir + "/" + fragment));
|
||||
});
|
||||
splitTasks.push(taskName);
|
||||
});
|
||||
|
||||
taskName = "build-translation-core";
|
||||
gulp.task(taskName, function() {
|
||||
// Remove the fragment translations from the core translation.
|
||||
return gulp
|
||||
.src(fullDir + "/*.json")
|
||||
.pipe(
|
||||
transform((data) => {
|
||||
TRANSLATION_FRAGMENTS.forEach((fragment) => {
|
||||
delete data.ui.panel[fragment];
|
||||
});
|
||||
return data;
|
||||
})
|
||||
)
|
||||
.pipe(gulp.dest(coreDir));
|
||||
});
|
||||
|
||||
splitTasks.push(taskName);
|
||||
|
||||
gulp.task("build-flattened-translations", function() {
|
||||
// Flatten the split versions of our translations, and move them into outDir
|
||||
return gulp
|
||||
.src(
|
||||
TRANSLATION_FRAGMENTS.map(
|
||||
(fragment) => workDir + "/" + fragment + "/*.json"
|
||||
).concat(coreDir + "/*.json"),
|
||||
{ base: workDir }
|
||||
)
|
||||
.pipe(
|
||||
transform(function(data) {
|
||||
// Polymer.AppLocalizeBehavior requires flattened json
|
||||
return flatten(data);
|
||||
})
|
||||
)
|
||||
.pipe(minify())
|
||||
.pipe(
|
||||
rename((filePath) => {
|
||||
if (filePath.dirname === "core") {
|
||||
filePath.dirname = "";
|
||||
}
|
||||
})
|
||||
)
|
||||
.pipe(gulp.dest(outDir));
|
||||
});
|
||||
|
||||
const fingerprints = {};
|
||||
|
||||
gulp.task(
|
||||
"build-translation-fingerprints",
|
||||
function fingerprintTranslationFiles() {
|
||||
// Fingerprint full file of each language
|
||||
const files = fs.readdirSync(fullDir);
|
||||
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
fingerprints[files[i].split(".")[0]] = {
|
||||
// In dev we create fake hashes
|
||||
hash: env.isProdBuild()
|
||||
? crypto
|
||||
.createHash("md5")
|
||||
.update(fs.readFileSync(path.join(fullDir, files[i]), "utf-8"))
|
||||
.digest("hex")
|
||||
: "dev",
|
||||
};
|
||||
}
|
||||
|
||||
mapFiles(outDir, ".json", (filename) => {
|
||||
const parsed = path.parse(filename);
|
||||
|
||||
// nl.json -> nl-<hash>.json
|
||||
if (!(parsed.name in fingerprints)) {
|
||||
throw new Error(`Unable to find hash for ${filename}`);
|
||||
}
|
||||
|
||||
fs.renameSync(
|
||||
filename,
|
||||
`${parsed.dir}/${parsed.name}-${fingerprints[parsed.name].hash}${
|
||||
parsed.ext
|
||||
}`
|
||||
);
|
||||
});
|
||||
|
||||
const stream = source("translationFingerprints.json");
|
||||
stream.write(JSON.stringify(fingerprints));
|
||||
process.nextTick(() => stream.end());
|
||||
return stream.pipe(vinylBuffer()).pipe(gulp.dest(workDir));
|
||||
}
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"build-translations",
|
||||
gulp.series(
|
||||
gulp.parallel(
|
||||
"fetch-nightly-translations",
|
||||
gulp.series("clean-translations", makeWorkDir)
|
||||
),
|
||||
createTestTranslation,
|
||||
createMasterTranslation,
|
||||
createTranslations,
|
||||
writeTranslationMetaData
|
||||
"clean-translations",
|
||||
"ensure-translations-build-dir",
|
||||
env.isProdBuild() ? (done) => done() : "create-test-translation",
|
||||
"build-master-translation",
|
||||
"build-merged-translations",
|
||||
gulp.parallel(...splitTasks),
|
||||
"build-flattened-translations",
|
||||
"build-translation-fingerprints",
|
||||
function writeMetadata() {
|
||||
return gulp
|
||||
.src(
|
||||
[
|
||||
path.join(paths.translations_src, "translationMetadata.json"),
|
||||
workDir + "/testMetadata.json",
|
||||
workDir + "/translationFingerprints.json",
|
||||
],
|
||||
{ allowEmpty: true }
|
||||
)
|
||||
.pipe(merge({}))
|
||||
.pipe(
|
||||
transform(function(data) {
|
||||
const newData = {};
|
||||
Object.entries(data).forEach(([key, value]) => {
|
||||
// Filter out translations without native name.
|
||||
if (data[key].nativeName) {
|
||||
newData[key] = data[key];
|
||||
} else {
|
||||
console.warn(
|
||||
`Skipping language ${key}. Native name was not translated.`
|
||||
);
|
||||
}
|
||||
if (data[key]) newData[key] = value;
|
||||
});
|
||||
return newData;
|
||||
})
|
||||
)
|
||||
.pipe(
|
||||
transform((data) => ({
|
||||
fragments: TRANSLATION_FRAGMENTS,
|
||||
translations: data,
|
||||
}))
|
||||
)
|
||||
.pipe(rename("translationMetadata.json"))
|
||||
.pipe(gulp.dest(workDir));
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"build-supervisor-translations",
|
||||
gulp.series(setFragment("supervisor"), "build-translations")
|
||||
);
|
||||
|
||||
gulp.task(
|
||||
"build-landing-page-translations",
|
||||
gulp.series(setFragment("landing-page"), "build-translations")
|
||||
);
|
||||
|
174
build-scripts/gulp/webpack.js
Normal file
174
build-scripts/gulp/webpack.js
Normal file
@ -0,0 +1,174 @@
|
||||
// Tasks to run webpack.
|
||||
const gulp = require("gulp");
|
||||
const webpack = require("webpack");
|
||||
const WebpackDevServer = require("webpack-dev-server");
|
||||
const log = require("fancy-log");
|
||||
const path = require("path");
|
||||
const paths = require("../paths");
|
||||
const {
|
||||
createAppConfig,
|
||||
createDemoConfig,
|
||||
createCastConfig,
|
||||
createHassioConfig,
|
||||
createGalleryConfig,
|
||||
} = require("../webpack");
|
||||
|
||||
const bothBuilds = (createConfigFunc, params) => [
|
||||
createConfigFunc({ ...params, latestBuild: true }),
|
||||
createConfigFunc({ ...params, latestBuild: false }),
|
||||
];
|
||||
|
||||
const runDevServer = ({
|
||||
compiler,
|
||||
contentBase,
|
||||
port,
|
||||
listenHost = "localhost",
|
||||
}) =>
|
||||
new WebpackDevServer(compiler, {
|
||||
open: true,
|
||||
watchContentBase: true,
|
||||
contentBase,
|
||||
}).listen(port, listenHost, function(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
// Server listening
|
||||
log("[webpack-dev-server]", `http://localhost:${port}`);
|
||||
});
|
||||
|
||||
const handler = (done) => (err, stats) => {
|
||||
if (err) {
|
||||
console.log(err.stack || err);
|
||||
if (err.details) {
|
||||
console.log(err.details);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
log(`Build done @ ${new Date().toLocaleTimeString()}`);
|
||||
|
||||
if (stats.hasErrors() || stats.hasWarnings()) {
|
||||
console.log(stats.toString("minimal"));
|
||||
}
|
||||
|
||||
if (done) {
|
||||
done();
|
||||
}
|
||||
};
|
||||
|
||||
gulp.task("webpack-watch-app", () => {
|
||||
// we are not calling done, so this command will run forever
|
||||
webpack(createAppConfig({ isProdBuild: false, latestBuild: true })).watch(
|
||||
{ ignored: /build-translations/ },
|
||||
handler()
|
||||
);
|
||||
gulp.watch(
|
||||
path.join(paths.translations_src, "en.json"),
|
||||
gulp.series("build-translations", "copy-translations")
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"webpack-prod-app",
|
||||
() =>
|
||||
new Promise((resolve) =>
|
||||
webpack(
|
||||
bothBuilds(createAppConfig, { isProdBuild: true }),
|
||||
handler(resolve)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("webpack-dev-server-demo", () => {
|
||||
runDevServer({
|
||||
compiler: webpack(bothBuilds(createDemoConfig, { isProdBuild: false })),
|
||||
contentBase: paths.demo_root,
|
||||
port: 8090,
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"webpack-prod-demo",
|
||||
() =>
|
||||
new Promise((resolve) =>
|
||||
webpack(
|
||||
bothBuilds(createDemoConfig, {
|
||||
isProdBuild: true,
|
||||
}),
|
||||
handler(resolve)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("webpack-dev-server-cast", () => {
|
||||
runDevServer({
|
||||
compiler: webpack(bothBuilds(createCastConfig, { isProdBuild: false })),
|
||||
contentBase: paths.cast_root,
|
||||
port: 8080,
|
||||
// Accessible from the network, because that's how Cast hits it.
|
||||
listenHost: "0.0.0.0",
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"webpack-prod-cast",
|
||||
() =>
|
||||
new Promise((resolve) =>
|
||||
webpack(
|
||||
bothBuilds(createCastConfig, {
|
||||
isProdBuild: true,
|
||||
}),
|
||||
|
||||
handler(resolve)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("webpack-watch-hassio", () => {
|
||||
// we are not calling done, so this command will run forever
|
||||
webpack(
|
||||
createHassioConfig({
|
||||
isProdBuild: false,
|
||||
latestBuild: false,
|
||||
})
|
||||
).watch({}, handler());
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"webpack-prod-hassio",
|
||||
() =>
|
||||
new Promise((resolve) =>
|
||||
webpack(
|
||||
createHassioConfig({
|
||||
isProdBuild: true,
|
||||
latestBuild: false,
|
||||
}),
|
||||
handler(resolve)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task("webpack-dev-server-gallery", () => {
|
||||
runDevServer({
|
||||
compiler: webpack(
|
||||
createGalleryConfig({ latestBuild: true, isProdBuild: false })
|
||||
),
|
||||
contentBase: paths.gallery_root,
|
||||
port: 8100,
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
"webpack-prod-gallery",
|
||||
() =>
|
||||
new Promise((resolve) =>
|
||||
webpack(
|
||||
createGalleryConfig({
|
||||
isProdBuild: true,
|
||||
latestBuild: true,
|
||||
}),
|
||||
|
||||
handler(resolve)
|
||||
)
|
||||
)
|
||||
);
|
@ -1,84 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
// Script to print Babel plugins and Core JS polyfills that will be used by browserslist environments
|
||||
|
||||
import { version as babelVersion } from "@babel/core";
|
||||
import presetEnv from "@babel/preset-env";
|
||||
import compilationTargets from "@babel/helper-compilation-targets";
|
||||
import coreJSCompat from "core-js-compat";
|
||||
import { logPlugin } from "@babel/preset-env/lib/debug.js";
|
||||
// eslint-disable-next-line import/no-relative-packages
|
||||
import shippedPolyfills from "../node_modules/babel-plugin-polyfill-corejs3/lib/shipped-proposals.js";
|
||||
import { babelOptions } from "./bundle.cjs";
|
||||
|
||||
const detailsOpen = (heading) =>
|
||||
`<details>\n<summary><h4>${heading}</h4></summary>\n`;
|
||||
const detailsClose = "</details>\n";
|
||||
|
||||
const dummyAPI = {
|
||||
version: babelVersion,
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
assertVersion: () => {},
|
||||
caller: (callback) =>
|
||||
callback({
|
||||
name: "Dummy Bundler",
|
||||
supportsStaticESM: true,
|
||||
supportsDynamicImport: true,
|
||||
supportsTopLevelAwait: true,
|
||||
supportsExportNamespaceFrom: true,
|
||||
}),
|
||||
targets: () => ({}),
|
||||
};
|
||||
|
||||
// Generate filter function based on proposal/method inputs
|
||||
// Copied and adapted from babel-plugin-polyfill-corejs3/esm/index.mjs
|
||||
const polyfillFilter = (method, proposals, shippedProposals) => (name) => {
|
||||
if (proposals || method === "entry-global") return true;
|
||||
if (shippedProposals && shippedPolyfills.default.has(name)) {
|
||||
return true;
|
||||
}
|
||||
if (name.startsWith("esnext.")) {
|
||||
const esName = `es.${name.slice(7)}`;
|
||||
// If its imaginative esName is not in latest compat data, it means the proposal is not stage 4
|
||||
return esName in coreJSCompat.data;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
// Log the plugins and polyfills for each build environment
|
||||
for (const buildType of ["Modern", "Legacy"]) {
|
||||
const browserslistEnv = buildType.toLowerCase();
|
||||
const babelOpts = babelOptions({ latestBuild: browserslistEnv === "modern" });
|
||||
const presetEnvOpts = babelOpts.presets[0][1];
|
||||
|
||||
// Invoking preset-env in debug mode will log the included plugins
|
||||
console.log(detailsOpen(`${buildType} Build Babel Plugins`));
|
||||
presetEnv.default(dummyAPI, {
|
||||
...presetEnvOpts,
|
||||
browserslistEnv,
|
||||
debug: true,
|
||||
});
|
||||
console.log(detailsClose);
|
||||
|
||||
// Manually log the Core-JS polyfills using the same technique
|
||||
if (presetEnvOpts.useBuiltIns) {
|
||||
console.log(detailsOpen(`${buildType} Build Core-JS Polyfills`));
|
||||
const targets = compilationTargets.default(babelOpts?.targets, {
|
||||
browserslistEnv,
|
||||
});
|
||||
const polyfillList = coreJSCompat({ targets }).list.filter(
|
||||
polyfillFilter(
|
||||
`${presetEnvOpts.useBuiltIns}-global`,
|
||||
presetEnvOpts?.corejs?.proposals,
|
||||
presetEnvOpts?.shippedProposals
|
||||
)
|
||||
);
|
||||
console.log(
|
||||
"The following %i polyfills may be injected by Babel:\n",
|
||||
polyfillList.length
|
||||
);
|
||||
for (const polyfill of polyfillList) {
|
||||
logPlugin(polyfill, targets, coreJSCompat.data);
|
||||
}
|
||||
console.log(detailsClose);
|
||||
}
|
||||
}
|
@ -1,63 +0,0 @@
|
||||
const path = require("path");
|
||||
|
||||
module.exports = {
|
||||
root_dir: path.resolve(__dirname, ".."),
|
||||
|
||||
build_dir: path.resolve(__dirname, "../build"),
|
||||
app_output_root: path.resolve(__dirname, "../hass_frontend"),
|
||||
app_output_static: path.resolve(__dirname, "../hass_frontend/static"),
|
||||
app_output_latest: path.resolve(
|
||||
__dirname,
|
||||
"../hass_frontend/frontend_latest"
|
||||
),
|
||||
app_output_es5: path.resolve(__dirname, "../hass_frontend/frontend_es5"),
|
||||
|
||||
demo_dir: path.resolve(__dirname, "../demo"),
|
||||
demo_output_root: path.resolve(__dirname, "../demo/dist"),
|
||||
demo_output_static: path.resolve(__dirname, "../demo/dist/static"),
|
||||
demo_output_latest: path.resolve(__dirname, "../demo/dist/frontend_latest"),
|
||||
demo_output_es5: path.resolve(__dirname, "../demo/dist/frontend_es5"),
|
||||
|
||||
cast_dir: path.resolve(__dirname, "../cast"),
|
||||
cast_output_root: path.resolve(__dirname, "../cast/dist"),
|
||||
cast_output_static: path.resolve(__dirname, "../cast/dist/static"),
|
||||
cast_output_latest: path.resolve(__dirname, "../cast/dist/frontend_latest"),
|
||||
cast_output_es5: path.resolve(__dirname, "../cast/dist/frontend_es5"),
|
||||
|
||||
gallery_dir: path.resolve(__dirname, "../gallery"),
|
||||
gallery_build: path.resolve(__dirname, "../gallery/build"),
|
||||
gallery_output_root: path.resolve(__dirname, "../gallery/dist"),
|
||||
gallery_output_latest: path.resolve(
|
||||
__dirname,
|
||||
"../gallery/dist/frontend_latest"
|
||||
),
|
||||
gallery_output_static: path.resolve(__dirname, "../gallery/dist/static"),
|
||||
|
||||
landingPage_dir: path.resolve(__dirname, "../landing-page"),
|
||||
landingPage_build: path.resolve(__dirname, "../landing-page/build"),
|
||||
landingPage_output_root: path.resolve(__dirname, "../landing-page/dist"),
|
||||
landingPage_output_latest: path.resolve(
|
||||
__dirname,
|
||||
"../landing-page/dist/frontend_latest"
|
||||
),
|
||||
landingPage_output_es5: path.resolve(
|
||||
__dirname,
|
||||
"../landing-page/dist/frontend_es5"
|
||||
),
|
||||
landingPage_output_static: path.resolve(
|
||||
__dirname,
|
||||
"../landing-page/dist/static"
|
||||
),
|
||||
|
||||
hassio_dir: path.resolve(__dirname, "../hassio"),
|
||||
hassio_output_root: path.resolve(__dirname, "../hassio/build"),
|
||||
hassio_output_static: path.resolve(__dirname, "../hassio/build/static"),
|
||||
hassio_output_latest: path.resolve(
|
||||
__dirname,
|
||||
"../hassio/build/frontend_latest"
|
||||
),
|
||||
hassio_output_es5: path.resolve(__dirname, "../hassio/build/frontend_es5"),
|
||||
hassio_publicPath: "/api/hassio/app",
|
||||
|
||||
translations_src: path.resolve(__dirname, "../src/translations"),
|
||||
};
|
34
build-scripts/paths.js
Normal file
34
build-scripts/paths.js
Normal file
@ -0,0 +1,34 @@
|
||||
var path = require("path");
|
||||
|
||||
module.exports = {
|
||||
polymer_dir: path.resolve(__dirname, ".."),
|
||||
|
||||
build_dir: path.resolve(__dirname, "../build"),
|
||||
root: path.resolve(__dirname, "../hass_frontend"),
|
||||
static: path.resolve(__dirname, "../hass_frontend/static"),
|
||||
output: path.resolve(__dirname, "../hass_frontend/frontend_latest"),
|
||||
output_es5: path.resolve(__dirname, "../hass_frontend/frontend_es5"),
|
||||
|
||||
demo_dir: path.resolve(__dirname, "../demo"),
|
||||
demo_root: path.resolve(__dirname, "../demo/dist"),
|
||||
demo_static: path.resolve(__dirname, "../demo/dist/static"),
|
||||
demo_output: path.resolve(__dirname, "../demo/dist/frontend_latest"),
|
||||
demo_output_es5: path.resolve(__dirname, "../demo/dist/frontend_es5"),
|
||||
|
||||
cast_dir: path.resolve(__dirname, "../cast"),
|
||||
cast_root: path.resolve(__dirname, "../cast/dist"),
|
||||
cast_static: path.resolve(__dirname, "../cast/dist/static"),
|
||||
cast_output: path.resolve(__dirname, "../cast/dist/frontend_latest"),
|
||||
cast_output_es5: path.resolve(__dirname, "../cast/dist/frontend_es5"),
|
||||
|
||||
gallery_dir: path.resolve(__dirname, "../gallery"),
|
||||
gallery_root: path.resolve(__dirname, "../gallery/dist"),
|
||||
gallery_output: path.resolve(__dirname, "../gallery/dist/frontend_latest"),
|
||||
gallery_static: path.resolve(__dirname, "../gallery/dist/static"),
|
||||
|
||||
hassio_dir: path.resolve(__dirname, "../hassio"),
|
||||
hassio_root: path.resolve(__dirname, "../hassio/build"),
|
||||
hassio_publicPath: "/api/hassio/app/",
|
||||
|
||||
translations_src: path.resolve(__dirname, "../src/translations"),
|
||||
};
|
@ -1 +0,0 @@
|
||||
[]
|
@ -1,313 +0,0 @@
|
||||
const { existsSync } = require("fs");
|
||||
const path = require("path");
|
||||
const rspack = require("@rspack/core");
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { RsdoctorRspackPlugin } = require("@rsdoctor/rspack-plugin");
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { StatsWriterPlugin } = require("webpack-stats-plugin");
|
||||
const filterStats = require("@bundle-stats/plugin-webpack-filter");
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const TerserPlugin = require("terser-webpack-plugin");
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { WebpackManifestPlugin } = require("rspack-manifest-plugin");
|
||||
const log = require("fancy-log");
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const WebpackBar = require("webpackbar/rspack");
|
||||
const paths = require("./paths.cjs");
|
||||
const bundle = require("./bundle.cjs");
|
||||
|
||||
class LogStartCompilePlugin {
|
||||
ignoredFirst = false;
|
||||
|
||||
apply(compiler) {
|
||||
compiler.hooks.beforeCompile.tap("LogStartCompilePlugin", () => {
|
||||
if (!this.ignoredFirst) {
|
||||
this.ignoredFirst = true;
|
||||
return;
|
||||
}
|
||||
log("Changes detected. Starting compilation");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const createRspackConfig = ({
|
||||
name,
|
||||
entry,
|
||||
outputPath,
|
||||
publicPath,
|
||||
defineOverlay,
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
isTestBuild,
|
||||
isHassioBuild,
|
||||
dontHash,
|
||||
}) => {
|
||||
if (!dontHash) {
|
||||
dontHash = new Set();
|
||||
}
|
||||
const ignorePackages = bundle.ignorePackages({ latestBuild });
|
||||
return {
|
||||
name,
|
||||
mode: isProdBuild ? "production" : "development",
|
||||
target: `browserslist:${latestBuild ? "modern" : "legacy"}`,
|
||||
// For tests/CI, source maps are skipped to gain build speed
|
||||
// For production, generate source maps for accurate stack traces without source code
|
||||
// For development, generate "cheap" versions that can map to original line numbers
|
||||
devtool: isTestBuild
|
||||
? false
|
||||
: isProdBuild
|
||||
? "nosources-source-map"
|
||||
: "eval-cheap-module-source-map",
|
||||
entry,
|
||||
node: false,
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.m?js$|\.ts$/,
|
||||
exclude: /node_modules[\\/]core-js/,
|
||||
use: (info) => [
|
||||
{
|
||||
loader: "babel-loader",
|
||||
options: {
|
||||
...bundle.babelOptions({
|
||||
latestBuild,
|
||||
isProdBuild,
|
||||
isTestBuild,
|
||||
sw: info.issuerLayer === "sw",
|
||||
}),
|
||||
cacheDirectory: !isProdBuild,
|
||||
cacheCompression: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: "builtin:swc-loader",
|
||||
options: bundle.swcOptions(),
|
||||
},
|
||||
],
|
||||
resolve: {
|
||||
fullySpecified: false,
|
||||
},
|
||||
parser: {
|
||||
worker: ["*context.audioWorklet.addModule()", "..."],
|
||||
},
|
||||
},
|
||||
{
|
||||
test: /\.css$/,
|
||||
type: "asset/source",
|
||||
},
|
||||
],
|
||||
},
|
||||
optimization: {
|
||||
minimizer: [
|
||||
new TerserPlugin({
|
||||
parallel: true,
|
||||
extractComments: true,
|
||||
terserOptions: bundle.terserOptions({ latestBuild, isTestBuild }),
|
||||
}),
|
||||
],
|
||||
moduleIds: isProdBuild && !isStatsBuild ? "deterministic" : "named",
|
||||
chunkIds: isProdBuild && !isStatsBuild ? "deterministic" : "named",
|
||||
splitChunks: {
|
||||
// Disable splitting for web workers and worklets because imports of
|
||||
// external chunks are broken for:
|
||||
chunks: !isProdBuild
|
||||
? // improve incremental build speed, but blows up bundle size
|
||||
new RegExp(
|
||||
`^(?!(${Object.keys(entry).join("|")}|.*work(?:er|let))$)`
|
||||
)
|
||||
: // - ESM output: https://github.com/webpack/webpack/issues/17014
|
||||
// - Worklets use `importScripts`: https://github.com/webpack/webpack/issues/11543
|
||||
(chunk) =>
|
||||
!chunk.canBeInitial() &&
|
||||
!new RegExp(
|
||||
`^.+-work${latestBuild ? "(?:let|er)" : "let"}$`
|
||||
).test(chunk.name),
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
!isStatsBuild && new WebpackBar({ fancy: !isProdBuild }),
|
||||
new WebpackManifestPlugin({
|
||||
// Only include the JS of entrypoints
|
||||
filter: (file) => file.isInitial && !file.name.endsWith(".map"),
|
||||
}),
|
||||
new rspack.DefinePlugin(
|
||||
bundle.definedVars({ isProdBuild, latestBuild, defineOverlay })
|
||||
),
|
||||
new rspack.IgnorePlugin({
|
||||
checkResource(resource, context) {
|
||||
// Only use ignore to intercept imports that we don't control
|
||||
// inside node_module dependencies.
|
||||
if (
|
||||
!context.includes("/node_modules/") ||
|
||||
// calling define.amd will call require("!!webpack amd options")
|
||||
resource.startsWith("!!webpack") ||
|
||||
// loaded by webpack dev server but doesn't exist.
|
||||
resource === "webpack/hot" ||
|
||||
resource.startsWith("@swc/helpers")
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
let fullPath;
|
||||
try {
|
||||
fullPath = resource.startsWith(".")
|
||||
? path.resolve(context, resource)
|
||||
: require.resolve(resource);
|
||||
} catch (err) {
|
||||
console.error(
|
||||
"Error in Home Assistant ignore plugin",
|
||||
resource,
|
||||
context
|
||||
);
|
||||
throw err;
|
||||
}
|
||||
|
||||
return ignorePackages.some((toIgnorePath) =>
|
||||
fullPath.startsWith(toIgnorePath)
|
||||
);
|
||||
},
|
||||
}),
|
||||
new rspack.NormalModuleReplacementPlugin(
|
||||
new RegExp(bundle.emptyPackages({ isHassioBuild }).join("|")),
|
||||
path.resolve(paths.root_dir, "src/util/empty.js")
|
||||
),
|
||||
!isProdBuild && new LogStartCompilePlugin(),
|
||||
isProdBuild &&
|
||||
new StatsWriterPlugin({
|
||||
filename: path.relative(
|
||||
outputPath,
|
||||
path.join(paths.build_dir, "stats", `${name}.json`)
|
||||
),
|
||||
stats: { assets: true, chunks: true, modules: true },
|
||||
transform: (stats) => JSON.stringify(filterStats(stats)),
|
||||
}),
|
||||
isProdBuild &&
|
||||
isStatsBuild &&
|
||||
new RsdoctorRspackPlugin({
|
||||
reportDir: path.join(paths.build_dir, "rsdoctor"),
|
||||
features: ["plugins", "bundle"],
|
||||
supports: {
|
||||
generateTileGraph: true,
|
||||
},
|
||||
}),
|
||||
].filter(Boolean),
|
||||
resolve: {
|
||||
extensions: [".ts", ".js", ".json"],
|
||||
alias: {
|
||||
"lit/static-html$": "lit/static-html.js",
|
||||
"lit/decorators$": "lit/decorators.js",
|
||||
"lit/directive$": "lit/directive.js",
|
||||
"lit/directives/until$": "lit/directives/until.js",
|
||||
"lit/directives/class-map$": "lit/directives/class-map.js",
|
||||
"lit/directives/style-map$": "lit/directives/style-map.js",
|
||||
"lit/directives/if-defined$": "lit/directives/if-defined.js",
|
||||
"lit/directives/guard$": "lit/directives/guard.js",
|
||||
"lit/directives/cache$": "lit/directives/cache.js",
|
||||
"lit/directives/join$": "lit/directives/join.js",
|
||||
"lit/directives/repeat$": "lit/directives/repeat.js",
|
||||
"lit/directives/live$": "lit/directives/live.js",
|
||||
"lit/directives/keyed$": "lit/directives/keyed.js",
|
||||
"lit/polyfill-support$": "lit/polyfill-support.js",
|
||||
"@lit-labs/virtualizer/layouts/grid":
|
||||
"@lit-labs/virtualizer/layouts/grid.js",
|
||||
"@lit-labs/virtualizer/polyfills/resize-observer-polyfill/ResizeObserver":
|
||||
"@lit-labs/virtualizer/polyfills/resize-observer-polyfill/ResizeObserver.js",
|
||||
"@lit-labs/observers/resize-controller":
|
||||
"@lit-labs/observers/resize-controller.js",
|
||||
},
|
||||
},
|
||||
output: {
|
||||
module: latestBuild,
|
||||
filename: ({ chunk }) =>
|
||||
!isProdBuild || isStatsBuild || dontHash.has(chunk.name)
|
||||
? "[name].js"
|
||||
: "[name].[contenthash].js",
|
||||
chunkFilename:
|
||||
isProdBuild && !isStatsBuild ? "[name].[contenthash].js" : "[name].js",
|
||||
assetModuleFilename:
|
||||
isProdBuild && !isStatsBuild ? "[id].[contenthash][ext]" : "[id][ext]",
|
||||
crossOriginLoading: "use-credentials",
|
||||
hashFunction: "xxhash64",
|
||||
path: outputPath,
|
||||
publicPath,
|
||||
// To silence warning in worker plugin
|
||||
globalObject: "self",
|
||||
// Since production source maps don't include sources, we need to point to them elsewhere
|
||||
// For dependencies, just provide the path (no source in browser)
|
||||
// Otherwise, point to the raw code on GitHub for browser to load
|
||||
...Object.fromEntries(
|
||||
["", "Fallback"].map((v) => [
|
||||
`devtool${v}ModuleFilenameTemplate`,
|
||||
!isTestBuild && isProdBuild
|
||||
? (info) => {
|
||||
if (
|
||||
!path.isAbsolute(info.absoluteResourcePath) ||
|
||||
!existsSync(info.resourcePath) ||
|
||||
info.resourcePath.startsWith("./node_modules")
|
||||
) {
|
||||
// Source URLs are unknown for dependencies, so we use a relative URL with a
|
||||
// non - existent top directory. This results in a clean source tree in browser
|
||||
// dev tools, and they stay happy getting 404s with valid requests.
|
||||
return `/unknown${path.resolve("/", info.resourcePath)}`;
|
||||
}
|
||||
return new URL(info.resourcePath, bundle.sourceMapURL()).href;
|
||||
}
|
||||
: undefined,
|
||||
])
|
||||
),
|
||||
},
|
||||
experiments: {
|
||||
layers: true,
|
||||
outputModule: true,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const createAppConfig = ({
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
isTestBuild,
|
||||
}) =>
|
||||
createRspackConfig(
|
||||
bundle.config.app({ isProdBuild, latestBuild, isStatsBuild, isTestBuild })
|
||||
);
|
||||
|
||||
const createDemoConfig = ({ isProdBuild, latestBuild, isStatsBuild }) =>
|
||||
createRspackConfig(
|
||||
bundle.config.demo({ isProdBuild, latestBuild, isStatsBuild })
|
||||
);
|
||||
|
||||
const createCastConfig = ({ isProdBuild, latestBuild }) =>
|
||||
createRspackConfig(bundle.config.cast({ isProdBuild, latestBuild }));
|
||||
|
||||
const createHassioConfig = ({
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
isTestBuild,
|
||||
}) =>
|
||||
createRspackConfig(
|
||||
bundle.config.hassio({
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
isTestBuild,
|
||||
})
|
||||
);
|
||||
|
||||
const createGalleryConfig = ({ isProdBuild, latestBuild }) =>
|
||||
createRspackConfig(bundle.config.gallery({ isProdBuild, latestBuild }));
|
||||
|
||||
const createLandingPageConfig = ({ isProdBuild, latestBuild }) =>
|
||||
createRspackConfig(bundle.config.landingPage({ isProdBuild, latestBuild }));
|
||||
|
||||
module.exports = {
|
||||
createAppConfig,
|
||||
createDemoConfig,
|
||||
createCastConfig,
|
||||
createHassioConfig,
|
||||
createGalleryConfig,
|
||||
createRspackConfig,
|
||||
createLandingPageConfig,
|
||||
};
|
16
build-scripts/util.js
Normal file
16
build-scripts/util.js
Normal file
@ -0,0 +1,16 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
|
||||
// Helper function to map recursively over files in a folder and it's subfolders
|
||||
module.exports.mapFiles = function mapFiles(startPath, filter, mapFunc) {
|
||||
const files = fs.readdirSync(startPath);
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const filename = path.join(startPath, files[i]);
|
||||
const stat = fs.lstatSync(filename);
|
||||
if (stat.isDirectory()) {
|
||||
mapFiles(filename, filter, mapFunc);
|
||||
} else if (filename.indexOf(filter) >= 0) {
|
||||
mapFunc(filename);
|
||||
}
|
||||
}
|
||||
};
|
269
build-scripts/webpack.js
Normal file
269
build-scripts/webpack.js
Normal file
@ -0,0 +1,269 @@
|
||||
const webpack = require("webpack");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const TerserPlugin = require("terser-webpack-plugin");
|
||||
const WorkboxPlugin = require("workbox-webpack-plugin");
|
||||
const ManifestPlugin = require("webpack-manifest-plugin");
|
||||
const paths = require("./paths.js");
|
||||
const { babelLoaderConfig } = require("./babel.js");
|
||||
|
||||
let version = fs
|
||||
.readFileSync(path.resolve(paths.polymer_dir, "setup.py"), "utf8")
|
||||
.match(/\d{8}\.\d+/);
|
||||
if (!version) {
|
||||
throw Error("Version not found");
|
||||
}
|
||||
version = version[0];
|
||||
|
||||
const createWebpackConfig = ({
|
||||
entry,
|
||||
outputRoot,
|
||||
defineOverlay,
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
dontHash,
|
||||
}) => {
|
||||
if (!dontHash) {
|
||||
dontHash = new Set();
|
||||
}
|
||||
return {
|
||||
mode: isProdBuild ? "production" : "development",
|
||||
devtool: isProdBuild ? "source-map" : "inline-cheap-module-source-map",
|
||||
entry,
|
||||
module: {
|
||||
rules: [
|
||||
babelLoaderConfig({ latestBuild }),
|
||||
{
|
||||
test: /\.css$/,
|
||||
use: "raw-loader",
|
||||
},
|
||||
{
|
||||
test: /\.(html)$/,
|
||||
use: {
|
||||
loader: "html-loader",
|
||||
options: {
|
||||
exportAsEs6Default: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
optimization: {
|
||||
minimizer: [
|
||||
new TerserPlugin({
|
||||
cache: true,
|
||||
parallel: true,
|
||||
extractComments: true,
|
||||
sourceMap: true,
|
||||
terserOptions: {
|
||||
safari10: true,
|
||||
ecma: latestBuild ? undefined : 5,
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
plugins: [
|
||||
new ManifestPlugin(),
|
||||
new webpack.DefinePlugin({
|
||||
__DEV__: !isProdBuild,
|
||||
__BUILD__: JSON.stringify(latestBuild ? "latest" : "es5"),
|
||||
__VERSION__: JSON.stringify(version),
|
||||
__DEMO__: false,
|
||||
__STATIC_PATH__: "/static/",
|
||||
"process.env.NODE_ENV": JSON.stringify(
|
||||
isProdBuild ? "production" : "development"
|
||||
),
|
||||
...defineOverlay,
|
||||
}),
|
||||
// Ignore moment.js locales
|
||||
new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/),
|
||||
// Color.js is bloated, it contains all color definitions for all material color sets.
|
||||
new webpack.NormalModuleReplacementPlugin(
|
||||
/@polymer\/paper-styles\/color\.js$/,
|
||||
path.resolve(paths.polymer_dir, "src/util/empty.js")
|
||||
),
|
||||
// Ignore roboto pointing at CDN. We use local font-roboto-local.
|
||||
new webpack.NormalModuleReplacementPlugin(
|
||||
/@polymer\/font-roboto\/roboto\.js$/,
|
||||
path.resolve(paths.polymer_dir, "src/util/empty.js")
|
||||
),
|
||||
// Ignore mwc icons pointing at CDN.
|
||||
new webpack.NormalModuleReplacementPlugin(
|
||||
/@material\/mwc-icon\/mwc-icon-font\.js$/,
|
||||
path.resolve(paths.polymer_dir, "src/util/empty.js")
|
||||
),
|
||||
].filter(Boolean),
|
||||
resolve: {
|
||||
extensions: [".ts", ".js", ".json"],
|
||||
alias: {
|
||||
react: "preact-compat",
|
||||
"react-dom": "preact-compat",
|
||||
// Not necessary unless you consume a module using `createClass`
|
||||
"create-react-class": "preact-compat/lib/create-react-class",
|
||||
// Not necessary unless you consume a module requiring `react-dom-factories`
|
||||
"react-dom-factories": "preact-compat/lib/react-dom-factories",
|
||||
},
|
||||
},
|
||||
output: {
|
||||
filename: ({ chunk }) => {
|
||||
if (!isProdBuild || dontHash.has(chunk.name)) {
|
||||
return `${chunk.name}.js`;
|
||||
}
|
||||
return `${chunk.name}.${chunk.hash.substr(0, 8)}.js`;
|
||||
},
|
||||
chunkFilename:
|
||||
isProdBuild && !isStatsBuild
|
||||
? "chunk.[chunkhash].js"
|
||||
: "[name].chunk.js",
|
||||
path: path.resolve(
|
||||
outputRoot,
|
||||
latestBuild ? "frontend_latest" : "frontend_es5"
|
||||
),
|
||||
publicPath: latestBuild ? "/frontend_latest/" : "/frontend_es5/",
|
||||
// For workerize loader
|
||||
globalObject: "self",
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const createAppConfig = ({ isProdBuild, latestBuild, isStatsBuild }) => {
|
||||
const config = createWebpackConfig({
|
||||
entry: {
|
||||
app: "./src/entrypoints/app.ts",
|
||||
authorize: "./src/entrypoints/authorize.ts",
|
||||
onboarding: "./src/entrypoints/onboarding.ts",
|
||||
core: "./src/entrypoints/core.ts",
|
||||
compatibility: "./src/entrypoints/compatibility.ts",
|
||||
"custom-panel": "./src/entrypoints/custom-panel.ts",
|
||||
"hass-icons": "./src/entrypoints/hass-icons.ts",
|
||||
},
|
||||
outputRoot: paths.root,
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
});
|
||||
|
||||
if (latestBuild) {
|
||||
// Create an object mapping browser urls to their paths during build
|
||||
const translationMetadata = require("../build-translations/translationMetadata.json");
|
||||
const workBoxTranslationsTemplatedURLs = {};
|
||||
const englishFilename = `en-${translationMetadata.translations.en.hash}.json`;
|
||||
|
||||
// core
|
||||
workBoxTranslationsTemplatedURLs[
|
||||
`/static/translations/${englishFilename}`
|
||||
] = `build-translations/output/${englishFilename}`;
|
||||
|
||||
translationMetadata.fragments.forEach((fragment) => {
|
||||
workBoxTranslationsTemplatedURLs[
|
||||
`/static/translations/${fragment}/${englishFilename}`
|
||||
] = `build-translations/output/${fragment}/${englishFilename}`;
|
||||
});
|
||||
|
||||
config.plugins.push(
|
||||
new WorkboxPlugin.InjectManifest({
|
||||
swSrc: "./src/entrypoints/service-worker-hass.js",
|
||||
swDest: "service_worker.js",
|
||||
importWorkboxFrom: "local",
|
||||
include: [/\.js$/],
|
||||
templatedURLs: {
|
||||
...workBoxTranslationsTemplatedURLs,
|
||||
"/static/icons/favicon-192x192.png":
|
||||
"public/icons/favicon-192x192.png",
|
||||
"/static/fonts/roboto/Roboto-Light.woff2":
|
||||
"node_modules/roboto-fontface/fonts/roboto/Roboto-Light.woff2",
|
||||
"/static/fonts/roboto/Roboto-Medium.woff2":
|
||||
"node_modules/roboto-fontface/fonts/roboto/Roboto-Medium.woff2",
|
||||
"/static/fonts/roboto/Roboto-Regular.woff2":
|
||||
"node_modules/roboto-fontface/fonts/roboto/Roboto-Regular.woff2",
|
||||
"/static/fonts/roboto/Roboto-Bold.woff2":
|
||||
"node_modules/roboto-fontface/fonts/roboto/Roboto-Bold.woff2",
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
const createDemoConfig = ({ isProdBuild, latestBuild, isStatsBuild }) => {
|
||||
return createWebpackConfig({
|
||||
entry: {
|
||||
main: path.resolve(paths.demo_dir, "src/entrypoint.ts"),
|
||||
compatibility: path.resolve(
|
||||
paths.polymer_dir,
|
||||
"src/entrypoints/compatibility.ts"
|
||||
),
|
||||
},
|
||||
outputRoot: paths.demo_root,
|
||||
defineOverlay: {
|
||||
__VERSION__: JSON.stringify(`DEMO-${version}`),
|
||||
__DEMO__: true,
|
||||
},
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
isStatsBuild,
|
||||
});
|
||||
};
|
||||
|
||||
const createCastConfig = ({ isProdBuild, latestBuild }) => {
|
||||
const entry = {
|
||||
launcher: path.resolve(paths.cast_dir, "src/launcher/entrypoint.ts"),
|
||||
};
|
||||
|
||||
if (latestBuild) {
|
||||
entry.receiver = path.resolve(paths.cast_dir, "src/receiver/entrypoint.ts");
|
||||
}
|
||||
|
||||
return createWebpackConfig({
|
||||
entry,
|
||||
outputRoot: paths.cast_root,
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
});
|
||||
};
|
||||
|
||||
const createHassioConfig = ({ isProdBuild, latestBuild }) => {
|
||||
if (latestBuild) {
|
||||
throw new Error("Hass.io does not support latest build!");
|
||||
}
|
||||
const config = createWebpackConfig({
|
||||
entry: {
|
||||
entrypoint: path.resolve(paths.hassio_dir, "src/entrypoint.ts"),
|
||||
},
|
||||
outputRoot: "",
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
dontHash: new Set(["entrypoint"]),
|
||||
});
|
||||
|
||||
config.output.path = paths.hassio_root;
|
||||
config.output.publicPath = paths.hassio_publicPath;
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
const createGalleryConfig = ({ isProdBuild, latestBuild }) => {
|
||||
if (!latestBuild) {
|
||||
throw new Error("Gallery only supports latest build!");
|
||||
}
|
||||
const config = createWebpackConfig({
|
||||
entry: {
|
||||
entrypoint: path.resolve(paths.gallery_dir, "src/entrypoint.js"),
|
||||
},
|
||||
outputRoot: paths.gallery_root,
|
||||
isProdBuild,
|
||||
latestBuild,
|
||||
});
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createAppConfig,
|
||||
createDemoConfig,
|
||||
createCastConfig,
|
||||
createHassioConfig,
|
||||
createGalleryConfig,
|
||||
};
|
@ -25,7 +25,7 @@ Home Assistant Cast is made up of two separate applications:
|
||||
|
||||
### Setting dev variables
|
||||
|
||||
Open `src/cast/dev_const.ts` and change `CAST_DEV_APP_ID` to the ID of the app you just created. And set the `CAST_DEV_HASS_URL` to the url of your development machine.
|
||||
Open `src/cast/dev_const.ts` and change `CAST_DEV_APP_ID` to the ID of the app you just created. And set the `CAST_DEV_HASS_URL` to the url of you development machine.
|
||||
|
||||
### Changing configuration
|
||||
|
||||
|
@ -1,9 +0,0 @@
|
||||
# These redirects are handled by Netlify
|
||||
#
|
||||
|
||||
# Some custom cards are not prefixing the instance URL when fetching data
|
||||
# and can end up fetching the data from the Cast domain instead of HA.
|
||||
# This will make sure that some common ones are replaced with a placeholder.
|
||||
/api/camera_proxy/* /images/google-nest-hub.png
|
||||
/api/camera_proxy_stream/* /images/google-nest-hub.png
|
||||
/api/media_player_proxy/* /images/google-nest-hub.png
|
Binary file not shown.
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 18 KiB |
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user