Compare commits

..

1 Commits

Author SHA1 Message Date
Paulus Schoutsen
368973b668 Add app alias 2020-06-21 12:37:12 -07:00
2653 changed files with 206113 additions and 298720 deletions

View File

@@ -1,42 +0,0 @@
[modern]
# Modern builds target recent browsers supporting the latest features to minimize transpilation, polyfills, etc.
# It is served to browsers meeting the following requirements:
# - released in the last year + current alpha/beta versions
# - Firefox extended support release (ESR)
# - with global utilization at or above 0.5%
# - must support dynamic import of ES modules
# - exclude browsers no longer being maintained
# - exclude KaiOS, QQ, and UC browsers due to lack of sufficient feature support data
unreleased versions
last 1 year
Firefox ESR
>= 0.5% and supports es6-module-dynamic-import
not dead
not KaiOS > 0
not QQAndroid > 0
not UCAndroid > 0
[legacy]
# Legacy builds are served when modern requirements are not met and support browsers:
# - released in the last 7 years + current alpha/beta versionss
# - with global utilization at or above 0.05%
# The lattermost query ensures that support for popular old browsers is not dropped too early
# (e.g. IE 11, Android 4.4, or Samsung 4).
#
# In addition, legacy browsers must support some minimum features that cannot be polyfilled:
# - ES5 (strict mode)
# - web sockets to communicate with backend
# - inline SVG used widely in buttons, widgets, etc.
# - custom events used for most user interactions
# - CSS flexbox used in the majority of the layout
# Nearly all of these are redundant with the above rules.
# As of May 2023, only web sockets must be added to the query.
unreleased versions
last 7 years
>= 0.05% and supports websockets
[legacy-sw]
# Same as legacy plus supports service workers
unreleased versions
last 7 years
>= 0.05% and supports websockets and supports serviceworkers

View File

@@ -1,7 +0,0 @@
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.148.1/containers/python-3/.devcontainer/base.Dockerfile
FROM mcr.microsoft.com/devcontainers/python:3.12
ENV \
DEBIAN_FRONTEND=noninteractive \
DEVCONTAINER=true \
PATH=$PATH:./node_modules/.bin

View File

@@ -1,45 +0,0 @@
{
"name": "Home Assistant Frontend",
"build": {
"dockerfile": "Dockerfile",
"context": ".."
},
"appPort": "8124:8123",
"postCreateCommand": "sudo apt update && sudo apt upgrade -y && sudo apt install -y libpcap-dev",
"postStartCommand": "script/bootstrap",
"containerEnv": {
"DEV_CONTAINER": "1",
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
"customizations": {
"vscode": {
"extensions": [
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"runem.lit-plugin",
"github.vscode-pull-request-github",
"eamodio.gitlens"
],
"settings": {
"files.eol": "\n",
"editor.tabSize": 2,
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"editor.renderWhitespace": "boundary",
"editor.rulers": [80],
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"files.trimTrailingWhitespace": true,
"terminal.integrated.shell.linux": "/usr/bin/zsh",
"gitlens.showWelcomeOnInstall": false,
"gitlens.showWhatsNewAfterUpgrades": false,
"workbench.startupEditor": "none"
}
}
}
}

View File

@@ -1,12 +1,11 @@
{
"extends": [
"airbnb-base",
"airbnb-typescript/base",
"plugin:@typescript-eslint/recommended",
"airbnb-typescript/base",
"plugin:wc/recommended",
"plugin:lit/all",
"plugin:lit-a11y/recommended",
"prettier"
"plugin:lit/recommended",
"prettier",
"prettier/@typescript-eslint"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
@@ -20,7 +19,7 @@
"settings": {
"import/resolver": {
"webpack": {
"config": "./webpack.config.cjs"
"config": "./webpack.config.js"
}
}
},
@@ -30,101 +29,56 @@
"__BUILD__": false,
"__VERSION__": false,
"__STATIC_PATH__": false,
"__SUPERVISOR__": false,
"Polymer": true
"Polymer": true,
"webkitSpeechRecognition": false,
"ResizeObserver": false
},
"env": {
"browser": true,
"es6": true
},
"rules": {
"class-methods-use-this": "off",
"new-cap": "off",
"prefer-template": "off",
"object-shorthand": "off",
"func-names": "off",
"no-underscore-dangle": "off",
"strict": "off",
"no-plusplus": "off",
"no-bitwise": "error",
"comma-dangle": "off",
"vars-on-top": "off",
"no-continue": "off",
"no-param-reassign": "off",
"no-multi-assign": "off",
"no-console": "error",
"radix": "off",
"no-alert": "off",
"no-nested-ternary": "off",
"prefer-destructuring": "off",
"class-methods-use-this": 0,
"new-cap": 0,
"prefer-template": 0,
"object-shorthand": 0,
"func-names": 0,
"prefer-arrow-callback": 0,
"no-underscore-dangle": 0,
"no-var": 0,
"strict": 0,
"prefer-spread": 0,
"no-plusplus": 0,
"no-bitwise": 0,
"comma-dangle": 0,
"vars-on-top": 0,
"no-continue": 0,
"no-param-reassign": 0,
"no-multi-assign": 0,
"radix": 0,
"no-alert": 0,
"no-return-await": 0,
"no-nested-ternary": 0,
"prefer-destructuring": 0,
"no-restricted-globals": [2, "event"],
"prefer-promise-reject-errors": "off",
"import/prefer-default-export": "off",
"import/no-default-export": "off",
"import/no-unresolved": "off",
"import/no-cycle": "off",
"import/extensions": [
"error",
"ignorePackages",
{
"ts": "never",
"js": "never"
}
],
"prefer-promise-reject-errors": 0,
"import/order": 0,
"import/prefer-default-export": 0,
"import/no-unresolved": 0,
"import/no-cycle": 0,
"import/extensions": 0,
"no-restricted-syntax": ["error", "LabeledStatement", "WithStatement"],
"object-curly-newline": "off",
"default-case": "off",
"wc/no-self-class": "off",
"no-shadow": "off",
"@typescript-eslint/camelcase": "off",
"@typescript-eslint/ban-ts-comment": "off",
"@typescript-eslint/no-use-before-define": "off",
"@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/explicit-function-return-type": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-shadow": ["error"],
"@typescript-eslint/naming-convention": [
"off",
{
"selector": "default",
"format": ["camelCase", "snake_case"],
"leadingUnderscore": "allow",
"trailingUnderscore": "allow"
},
{
"selector": ["variable"],
"format": ["camelCase", "snake_case", "UPPER_CASE"],
"leadingUnderscore": "allow",
"trailingUnderscore": "allow"
},
{
"selector": "typeLike",
"format": ["PascalCase"]
}
],
"@typescript-eslint/no-unused-vars": "off",
"unused-imports/no-unused-vars": [
"error",
{
"vars": "all",
"varsIgnorePattern": "^_",
"args": "after-used",
"argsIgnorePattern": "^_",
"ignoreRestSiblings": true
}
],
"unused-imports/no-unused-imports": "error",
"lit/attribute-names": "warn",
"lit/attribute-value-entities": "off",
"lit/no-template-map": "off",
"lit/no-native-attributes": "warn",
"lit/no-this-assign-in-render": "warn",
"lit-a11y/click-events-have-key-events": ["off"],
"lit-a11y/no-autofocus": "off",
"lit-a11y/alt-text": "warn",
"lit-a11y/anchor-is-valid": "warn",
"lit-a11y/role-has-required-aria-attrs": "warn"
"object-curly-newline": 0,
"default-case": 0,
"wc/no-self-class": 0,
"@typescript-eslint/camelcase": 0,
"@typescript-eslint/ban-ts-ignore": 0,
"@typescript-eslint/no-use-before-define": 0,
"@typescript-eslint/no-non-null-assertion": 0,
"@typescript-eslint/no-explicit-any": 0,
"@typescript-eslint/no-unused-vars": 0,
"@typescript-eslint/explicit-function-return-type": 0
},
"plugins": ["unused-imports"]
"plugins": ["disable", "import", "lit", "prettier", "@typescript-eslint"],
"processor": "disable/disable"
}

View File

@@ -51,7 +51,7 @@ DO NOT DELETE ANY TEXT from this template! Otherwise, your issue may be closed w
<!--
Provide details about the versions you are using, which helps us reproducing
and finding the issue quicker. Version information is found in the
Home Assistant frontend: Settings -> About.
Home Assistant frontend: Developer tools -> Info.
Browser version and operating system is important! Please try to replicate
your issue in a different browser and be sure to include your findings.
@@ -74,12 +74,12 @@ DO NOT DELETE ANY TEXT from this template! Otherwise, your issue may be closed w
```
## Problem-relevant frontend configuration
## Problem-relevant configuration
<!--
An example configuration that caused the problem for you, e.g. the YAML configuration
of the used cards. Fill this out even if it seems unimportant to you. Please be sure
to remove personal information like passwords, private URLs and other credentials.
An example configuration that caused the problem for you. Fill this out even
if it seems unimportant to you. Please be sure to remove personal information
like passwords, private URLs and other credentials.
-->
```yaml
@@ -89,7 +89,7 @@ DO NOT DELETE ANY TEXT from this template! Otherwise, your issue may be closed w
## Javascript errors shown in your browser console/inspector
<!--
If you come across any Javascript or other error logs, e.g. in your browser
If you come across any javascript or other error logs, e.g., in your browser
console/inspector please provide them.
-->

View File

@@ -0,0 +1,26 @@
---
name: Request a feature for the UI, Frontend or Lovelace
about: Request an new feature for the Home Assistant frontend.
labels: feature request
---
<!--
DO NOT DELETE ANY TEXT from this template!
Otherwise, your request may be closed without comment.
-->
## The request
<!--
Describe to our maintainers, the feature you would like to be added.
Please be clear and concise and, if possible, provide a screenshot or mockup.
-->
## The alternatives
<!--
Are you currently using, or have you considered alternatives?
If so, could you please describe those?
-->
## Additional information

View File

@@ -1,122 +0,0 @@
name: Report a bug with the UI / Dashboards
description: Report an issue related to the Home Assistant frontend.
labels: bug
body:
- type: markdown
attributes:
value: |
Make sure you are running the [latest version of Home Assistant][releases] before reporting an issue.
If you have a feature or enhancement request for the frontend, please [start an discussion][fr] instead of creating an issue.
**Please do not report issues for custom cards.**
[fr]: https://github.com/home-assistant/frontend/discussions
[releases]: https://github.com/home-assistant/home-assistant/releases
- type: checkboxes
attributes:
label: Checklist
description: Please verify that you've followed these steps
options:
- label: I have updated to the latest available Home Assistant version.
required: true
- label: I have cleared the cache of my browser.
required: true
- label: I have tried a different browser to see if it is related to my browser.
required: true
- label: I have tried reproducing the issue in [safe mode](https://www.home-assistant.io/blog/2023/11/01/release-202311/#restarting-into-safe-mode) to rule out problems with unsupported custom resources.
- type: markdown
attributes:
value: |
## The problem
- type: textarea
validations:
required: true
attributes:
label: Describe the issue you are experiencing
description: Provide a clear and concise description of what the bug is.
- type: textarea
validations:
required: true
attributes:
label: Describe the behavior you expected
description: Describe what you expected to happen or it should look/behave.
- type: textarea
validations:
required: true
attributes:
label: Steps to reproduce the issue
description: |
Please tell us exactly how to reproduce your issue.
Provide clear and concise step by step instructions and add code snippets if needed.
value: |
1.
2.
3.
...
- type: markdown
attributes:
value: |
## Environment
- type: input
validations:
required: true
attributes:
label: What version of Home Assistant Core has the issue?
placeholder: core-
description: >
Can be found in: [Settings -> About](https://my.home-assistant.io/redirect/info/).
- type: input
attributes:
label: What was the last working version of Home Assistant Core?
placeholder: core-
description: >
If known, otherwise leave blank.
- type: input
attributes:
label: In which browser are you experiencing the issue with?
placeholder: Google Chrome 88.0.4324.150
description: >
Provide the full name and don't forget to add the version!
- type: input
attributes:
label: Which operating system are you using to run this browser?
placeholder: macOS Big Sur (1.11)
description: >
Don't forget to add the version!
- type: markdown
attributes:
value: |
# Details
- type: textarea
attributes:
label: State of relevant entities
description: >
If your issue is about how an entity is shown in the UI, please add the
state and attributes for all situations. You can find this information
at Developer Tools -> States.
render: txt
- type: textarea
attributes:
label: Problem-relevant frontend configuration
description: >
An example configuration that caused the problem for you, e.g., the YAML
configuration of the used cards. Fill this out even if it seems
unimportant to you. Please be sure to remove personal information like
passwords, private URLs and other credentials.
render: yaml
- type: textarea
attributes:
label: Javascript errors shown in your browser console/inspector
description: >
If you come across any Javascript or other error logs, e.g., in your
browser console/inspector please provide them.
render: txt
- type: textarea
attributes:
label: Additional information
description: >
If you have any additional information for us, use the field below.
Please note, you can attach screenshots or screen recordings here, by
dragging and dropping files in the field below.

View File

@@ -1,17 +1,14 @@
blank_issues_enabled: false
contact_links:
- name: Request a feature for the UI / Dashboards
url: https://github.com/home-assistant/frontend/discussions/category_choices
about: Request an new feature for the Home Assistant frontend.
- name: Report a bug that is NOT related to the UI / Dashboards
- name: Report a bug that is NOT related to the UI, Frontend or Lovelace
url: https://github.com/home-assistant/core/issues
about: This is the issue tracker for our frontend. Please report other issues in the backend ("core") repository.
about: This is the issue tracker for our frontend. Please report other issues with the backend repository.
- name: Report incorrect or missing information on our website
url: https://github.com/home-assistant/home-assistant.io/issues
about: Our documentation has its own issue tracker. Please report issues with the website there.
- name: I have a question or need support
url: https://www.home-assistant.io/help
about: We use GitHub for tracking bugs. Check our website for resources on getting help.
about: We use GitHub for tracking bugs, check our website for resources on getting help.
- name: I'm unsure where to go
url: https://www.home-assistant.io/join-chat
about: If you are unsure where to go, then joining our chat is recommended; Just ask!

View File

@@ -2,7 +2,9 @@
You are amazing! Thanks for contributing to our project!
Please, DO NOT DELETE ANY TEXT from this template! (unless instructed).
-->
## Breaking change
<!--
If your PR contains a breaking change for existing users, it is important
to tell them what breaks, how to make it work again and why we did this.
@@ -11,17 +13,17 @@
Note: Remove this section if this PR is NOT a breaking change.
-->
## Proposed change
<!--
Describe the big picture of your changes here to communicate to the
maintainers why we should accept this pull request. If it fixes a bug
or resolves a feature request, be sure to link to that issue or discussion
in the additional information section.
or resolves a feature request, be sure to link to that issue in the
additional information section.
-->
## Type of change
<!--
What type of change does your PR introduce to the Home Assistant frontend?
NOTE: Please, check only 1! box!
@@ -36,6 +38,7 @@
- [ ] Code quality improvements to existing code or addition of tests
## Example configuration
<!--
Supplying a configuration snippet, makes it easier for a maintainer to test
your PR.
@@ -46,16 +49,18 @@
```
## Additional information
<!--
Details are important, and help maintainers processing your PR.
Please be sure to fill out additional details, if applicable.
-->
- This PR fixes or closes issue: fixes #
- This PR is related to issue or discussion:
- This PR is related to issue:
- Link to documentation pull request:
## Checklist
<!--
Put an `x` in the boxes that apply. You can also fill these out after
creating the PR. If you're unsure about any of them, don't hesitate to ask.

View File

@@ -1,11 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: weekly
time: "06:00"
open-pull-requests-limit: 10
labels:
- Dependencies
- GitHub Actions

51
.github/labeler.yml vendored
View File

@@ -1,51 +0,0 @@
Build:
- changed-files:
- any-glob-to-any-file:
- build-scripts/**
- .browserslistrc
- gulpfile.js
Cast:
- changed-files:
- any-glob-to-any-file:
- cast/src/**
- src/cast/**
Demo:
- changed-files:
- any-glob-to-any-file:
- demo/src/**
- src/fake_data/**
Design:
- changed-files:
- any-glob-to-any-file:
- gallery/src/**
- src/fake_data/**
Dependencies:
- any:
- changed-files:
# Match when only these files are changed (i.e. don't match PRs that happen to add or remove packages)
- any-glob-to-all-files:
- package.json
- renovate.json
- yarn.lock
- .yarn/**
- .yarnrc.yml
- .nvmrc
# Dependabot and Renovate branches always match (i.e. compatibility tweaks by members considered minor)
- head-branch:
- "^renovate/"
- "^dependabot/"
GitHub Actions:
- changed-files:
- any-glob-to-any-file:
- .github/workflows/**
- .github/*.yml
Supervisor:
- changed-files:
- any-glob-to-any-file:
- hassio/src/**

27
.github/lock.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
# Number of days of inactivity before a closed issue or pull request is locked
daysUntilLock: 1
# Skip issues and pull requests created before a given timestamp. Timestamp must
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
skipCreatedBefore: 2020-01-01
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
exemptLabels: []
# Label to add before locking, such as `outdated`. Set to `false` to disable
lockLabel: false
# Comment to post before locking. Set to `false` to disable
lockComment: false
# Assign `resolved` as the reason for locking. Set to `false` to disable
setLockReason: false
# Limit to only `issues` or `pulls`
only: pulls
# Optionally, specify configuration settings just for `issues` or `pulls`
issues:
daysUntilLock: 30

View File

@@ -1,8 +1,3 @@
categories:
- title: "Dependency updates"
collapse-after: 3
labels:
- "Dependencies"
template: |
## What's Changed

56
.github/stale.yml vendored Normal file
View File

@@ -0,0 +1,56 @@
# Configuration for probot-stale - https://github.com/probot/stale
# Number of days of inactivity before an Issue or Pull Request becomes stale
daysUntilStale: 90
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
daysUntilClose: 7
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
onlyLabels: []
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
exemptLabels:
- feature request
- Help wanted
- to do
# Set to true to ignore issues in a project (defaults to false)
exemptProjects: true
# Set to true to ignore issues in a milestone (defaults to false)
exemptMilestones: true
# Set to true to ignore issues with an assignee (defaults to false)
exemptAssignees: false
# Label to use when marking as stale
staleLabel: stale
# Comment to post when marking as stale. Set to `false` to disable
markComment: >
There hasn't been any activity on this issue recently. Due to the high number
of incoming GitHub notifications, we have to clean some of the old issues,
as many of them have already been resolved with the latest updates.
Please make sure to update to the latest Home Assistant version and check
if that solves the issue. Let us know if that works for you by adding a
comment 👍
This issue now has been marked as stale and will be closed if no further
activity occurs. Thank you for your contributions.
# Comment to post when removing the stale label.
# unmarkComment: >
# Your comment here.
# Comment to post when closing a stale Issue or Pull Request.
# closeComment: >
# Your comment here.
# Limit the number of actions per hour, from 1-30. Default is 30
limitPerRun: 30
# Limit to only `issues` or `pulls`
only: issues

View File

@@ -1,85 +0,0 @@
name: Cast deployment
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * *"
push:
branches:
- master
env:
NODE_OPTIONS: --max_old_space_size=6144
jobs:
deploy_dev:
runs-on: ubuntu-latest
name: Deploy Development
if: github.event_name != 'push'
environment:
name: Cast Development
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
with:
ref: dev
- name: Setup Node
uses: actions/setup-node@v4.0.4
with:
node-version-file: ".nvmrc"
cache: yarn
- name: Install dependencies
run: yarn install --immutable
- name: Build Cast
run: ./node_modules/.bin/gulp build-cast
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Deploy to Netlify
id: deploy
uses: netlify/actions/cli@master
with:
args: deploy --dir=cast/dist --alias dev
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_CAST_SITE_ID }}
deploy_master:
runs-on: ubuntu-latest
name: Deploy Production
if: github.event_name == 'push'
environment:
name: Cast Production
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
with:
ref: master
- name: Setup Node
uses: actions/setup-node@v4.0.4
with:
node-version-file: ".nvmrc"
cache: yarn
- name: Install dependencies
run: yarn install --immutable
- name: Build Cast
run: ./node_modules/.bin/gulp build-cast
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Deploy to Netlify
id: deploy
uses: netlify/actions/cli@master
with:
args: deploy --dir=cast/dist --prod
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_CAST_SITE_ID }}

View File

@@ -10,111 +10,116 @@ on:
- dev
- master
env:
NODE_OPTIONS: --max_old_space_size=6144
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
lint:
name: Lint and check format
runs-on: ubuntu-latest
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
- name: Setup Node
uses: actions/setup-node@v4.0.4
uses: actions/checkout@v2
- name: Setting up Node.js
uses: actions/setup-node@v1
with:
node-version-file: ".nvmrc"
cache: yarn
node-version: 12.x
- name: Get yarn cache path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Fetching Yarn cache
uses: actions/cache@v1
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install dependencies
run: yarn install --immutable
- name: Check for duplicate dependencies
run: yarn dedupe --check
- name: Build resources
run: ./node_modules/.bin/gulp gen-icons-json build-translations build-locale-data gather-gallery-pages
- name: Setup lint cache
uses: actions/cache@v4.1.0
with:
path: |
node_modules/.cache/prettier
node_modules/.cache/eslint
node_modules/.cache/typescript
key: lint-${{ github.sha }}
restore-keys: lint-
run: yarn install
env:
CI: true
- name: Build icons
run: ./node_modules/.bin/gulp gen-icons-json
- name: Build translations
run: ./node_modules/.bin/gulp build-translations
- name: Run eslint
run: yarn run lint:eslint --quiet
run: ./node_modules/.bin/eslint '{**/src,src}/**/*.{js,ts,html}' --ignore-path .gitignore
- name: Run tsc
run: yarn run lint:types
- name: Run lit-analyzer
run: yarn run lint:lit --quiet
- name: Run prettier
run: yarn run lint:prettier
run: ./node_modules/.bin/tsc
test:
name: Run tests
runs-on: ubuntu-latest
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
- name: Setup Node
uses: actions/setup-node@v4.0.4
uses: actions/checkout@v2
- name: Setting up Node.js
uses: actions/setup-node@v1
with:
node-version-file: ".nvmrc"
cache: yarn
node-version: 12.x
- name: Get yarn cache path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Fetching Yarn cache
uses: actions/cache@v1
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install dependencies
run: yarn install --immutable
- name: Build resources
run: ./node_modules/.bin/gulp gen-icons-json build-translations build-locale-data
- name: Run Tests
run: yarn run test
run: yarn install
env:
CI: true
- name: Run Mocha
run: npm run mocha
build:
name: Build frontend
needs: [lint, test]
runs-on: ubuntu-latest
needs: [lint, test]
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
- name: Setup Node
uses: actions/setup-node@v4.0.4
uses: actions/checkout@v2
- name: Setting up Node.js
uses: actions/setup-node@v1
with:
node-version-file: ".nvmrc"
cache: yarn
node-version: 12.x
- name: Get yarn cache path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Fetching Yarn cache
uses: actions/cache@v1
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install dependencies
run: yarn install --immutable
run: yarn install
env:
CI: true
- name: Build Application
run: ./node_modules/.bin/gulp build-app
env:
IS_TEST: "true"
- name: Upload bundle stats
uses: actions/upload-artifact@v4.4.0
with:
name: frontend-bundle-stats
path: build/stats/*.json
if-no-files-found: error
supervisor:
name: Build supervisor
needs: [lint, test]
runs-on: ubuntu-latest
needs: [lint, test]
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
- name: Setup Node
uses: actions/setup-node@v4.0.4
uses: actions/checkout@v2
- name: Setting up Node.js
uses: actions/setup-node@v1
with:
node-version-file: ".nvmrc"
cache: yarn
node-version: 12.x
- name: Get yarn cache path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Fetching Yarn cache
uses: actions/cache@v1
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install dependencies
run: yarn install --immutable
run: yarn install
env:
CI: true
- name: Build Application
run: ./node_modules/.bin/gulp build-hassio
env:
IS_TEST: "true"
- name: Upload bundle stats
uses: actions/upload-artifact@v4.4.0
with:
name: supervisor-bundle-stats
path: build/stats/*.json
if-no-files-found: error

View File

@@ -1,60 +0,0 @@
name: "CodeQL"
on:
push:
branches: [dev, master]
pull_request:
# The branches below must be a subset of the branches above
branches: [dev]
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# Override automatic language detection by changing the below list
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
language: ["javascript"]
# Learn more...
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
steps:
- name: Checkout repository
uses: actions/checkout@v4.2.0
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
fetch-depth: 2
# If this run was triggered by a pull request event, then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

39
.github/workflows/demo.yaml vendored Normal file
View File

@@ -0,0 +1,39 @@
name: Demo
on:
push:
branches:
- dev
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Check out files from GitHub
uses: actions/checkout@v2
- name: Setting up Node.js
uses: actions/setup-node@v1
with:
node-version: 12.x
- name: Get yarn cache path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Fetching Yarn cache
uses: actions/cache@v1
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install dependencies
run: yarn install
env:
CI: true
- name: Build Demo
run: ./node_modules/.bin/gulp build-demo
- name: Deploy to Netlify
uses: netlify/actions/cli@master
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_DEMO_DEV_SITE_ID }}
with:
args: deploy --dir=demo/dist --prod

View File

@@ -1,86 +0,0 @@
name: Demo deployment
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * *"
push:
branches:
- dev
- master
env:
NODE_OPTIONS: --max_old_space_size=6144
jobs:
deploy_dev:
runs-on: ubuntu-latest
name: Demo Development
if: github.event_name != 'push' || github.ref_name != 'master'
environment:
name: Demo Development
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
with:
ref: dev
- name: Setup Node
uses: actions/setup-node@v4.0.4
with:
node-version-file: ".nvmrc"
cache: yarn
- name: Install dependencies
run: yarn install --immutable
- name: Build Demo
run: ./node_modules/.bin/gulp build-demo
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Deploy to Netlify
id: deploy
uses: netlify/actions/cli@master
with:
args: deploy --dir=demo/dist --prod
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_DEMO_DEV_SITE_ID }}
deploy_master:
runs-on: ubuntu-latest
name: Demo Production
if: github.event_name == 'push' && github.ref_name == 'master'
environment:
name: Demo Production
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
with:
ref: master
- name: Setup Node
uses: actions/setup-node@v4.0.4
with:
node-version-file: ".nvmrc"
cache: yarn
- name: Install dependencies
run: yarn install --immutable
- name: Build Demo
run: ./node_modules/.bin/gulp build-demo
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Deploy to Netlify
id: deploy
uses: netlify/actions/cli@master
with:
args: deploy --dir=demo/dist --prod
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_DEMO_SITE_ID }}

View File

@@ -1,42 +0,0 @@
name: Design deployment
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * *"
env:
NODE_OPTIONS: --max_old_space_size=6144
jobs:
deploy:
runs-on: ubuntu-latest
environment:
name: Design
url: ${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
- name: Setup Node
uses: actions/setup-node@v4.0.4
with:
node-version-file: ".nvmrc"
cache: yarn
- name: Install dependencies
run: yarn install --immutable
- name: Build Gallery
run: ./node_modules/.bin/gulp build-gallery
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Deploy to Netlify
id: deploy
uses: netlify/actions/cli@master
with:
args: deploy --dir=gallery/dist --prod
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_GALLERY_SITE_ID }}

View File

@@ -1,51 +0,0 @@
name: Design preview
on:
pull_request:
types:
- opened
- synchronize
- reopened
- labeled
branches:
- dev
env:
NODE_OPTIONS: --max_old_space_size=6144
jobs:
preview:
runs-on: ubuntu-latest
# Skip running on forks since it won't have access to secrets
# Skip running PRs without 'needs design preview' label
if: github.repository == 'home-assistant/frontend' && contains(github.event.pull_request.labels.*.name, 'needs design preview')
steps:
- name: Check out files from GitHub
uses: actions/checkout@v4.2.0
- name: Setup Node
uses: actions/setup-node@v4.0.4
with:
node-version-file: ".nvmrc"
cache: yarn
- name: Install dependencies
run: yarn install --immutable
- name: Build Gallery
run: ./node_modules/.bin/gulp build-gallery
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Deploy preview to Netlify
id: deploy
uses: netlify/actions/cli@master
with:
args: deploy --dir=gallery/dist --alias "deploy-preview-${{ github.event.number }}"
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_GALLERY_SITE_ID }}
- name: Generate summary
run: |
echo "${{ steps.deploy.outputs.NETLIFY_LIVE_URL || steps.deploy.outputs.NETLIFY_URL }}" >> "$GITHUB_STEP_SUMMARY"

View File

@@ -1,15 +0,0 @@
name: "Pull Request Labeler"
on: pull_request_target
jobs:
triage:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- name: Apply labels
uses: actions/labeler@v5.0.0
with:
sync-labels: true

View File

@@ -1,21 +0,0 @@
name: Lock
# yamllint disable-line rule:truthy
on:
schedule:
- cron: "0 * * * *"
jobs:
lock:
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v5.0.1
with:
github-token: ${{ github.token }}
process-only: "issues, prs"
issue-lock-inactive-days: "30"
issue-exclude-created-before: "2020-10-01T00:00:00Z"
issue-lock-reason: ""
pr-lock-inactive-days: "1"
pr-exclude-created-before: "2020-11-01T00:00:00Z"
pr-lock-reason: ""

View File

@@ -1,71 +0,0 @@
name: Nightly
on:
workflow_dispatch:
schedule:
- cron: "0 1 * * *"
env:
PYTHON_VERSION: "3.12"
NODE_OPTIONS: --max_old_space_size=6144
permissions:
actions: none
jobs:
nightly:
name: Nightly
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.0
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Setup Node
uses: actions/setup-node@v4.0.4
with:
node-version-file: ".nvmrc"
cache: yarn
- name: Install dependencies
run: yarn install
- name: Download translations
run: ./script/translations_download
env:
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
- name: Bump version
run: script/version_bump.js nightly
- name: Build nightly Python wheels
run: |
pip install build
yarn install
export SKIP_FETCH_NIGHTLY_TRANSLATIONS=1
script/build_frontend
rm -rf dist home_assistant_frontend.egg-info
python3 -m build
- name: Archive translations
run: tar -czvf translations.tar.gz translations
- name: Upload build artifacts
uses: actions/upload-artifact@v4.4.0
with:
name: wheels
path: dist/home_assistant_frontend*.whl
if-no-files-found: error
- name: Upload translations
uses: actions/upload-artifact@v4.4.0
with:
name: translations
path: translations.tar.gz
if-no-files-found: error

View File

@@ -1,25 +0,0 @@
name: RelativeCI
on:
workflow_run:
workflows: [CI]
types:
- completed
jobs:
upload:
name: Upload stats
if: ${{ github.event.workflow_run.conclusion == 'success' }}
strategy:
matrix:
bundle: [frontend, supervisor]
build: [modern, legacy]
runs-on: ubuntu-latest
steps:
- name: Send bundle stats and build information to RelativeCI
uses: relative-ci/agent-action@v2.1.12
with:
key: ${{ secrets[format('RELATIVE_CI_KEY_{0}_{1}', matrix.bundle, matrix.build)] }}
token: ${{ github.token }}
artifactName: ${{ format('{0}-bundle-stats', matrix.bundle) }}
webpackStatsFile: ${{ format('{0}-{1}.json', matrix.bundle, matrix.build) }}

View File

@@ -5,19 +5,10 @@ on:
branches:
- dev
permissions:
contents: read
jobs:
update_release_draft:
permissions:
# write permission for contents is required to create a github release
contents: write
# write permission for pull-requests is required for autolabeler
# otherwise, read permission is required at least
pull-requests: read
runs-on: ubuntu-latest
steps:
- uses: release-drafter/release-drafter@v6.0.0
- uses: release-drafter/release-drafter@v5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,83 +0,0 @@
name: Release
on:
release:
types:
- published
env:
PYTHON_VERSION: "3.12"
NODE_OPTIONS: --max_old_space_size=6144
# Set default workflow permissions
# All scopes not mentioned here are set to no access
# https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token
permissions:
actions: none
jobs:
release:
name: Release
runs-on: ubuntu-latest
permissions:
contents: write # Required to upload release assets
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.0
- name: Verify version
uses: home-assistant/actions/helpers/verify-version@master
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Setup Node
uses: actions/setup-node@v4.0.4
with:
node-version-file: ".nvmrc"
cache: yarn
- name: Install dependencies
run: yarn install
- name: Download Translations
run: ./script/translations_download
env:
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
- name: Build and release package
run: |
python3 -m pip install twine build
export TWINE_USERNAME="__token__"
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
export SKIP_FETCH_NIGHTLY_TRANSLATIONS=1
script/release
- name: Upload release assets
uses: softprops/action-gh-release@v2.0.8
with:
files: |
dist/*.whl
dist/*.tar.gz
wheels-init:
name: Init wheels build
needs: release
runs-on: ubuntu-latest
steps:
- name: Generate requirements.txt
run: |
# Sleep to give pypi time to populate the new version across mirrors
sleep 240
version=$(echo "${{ github.ref }}" | awk -F"/" '{print $NF}' )
echo "home-assistant-frontend==$version" > ./requirements.txt
- name: Build wheels
uses: home-assistant/wheels@2024.07.1
with:
abi: cp312
tag: musllinux_1_2
arch: amd64
wheels-key: ${{ secrets.WHEELS_KEY }}
requirements: "requirements.txt"

View File

@@ -1,42 +0,0 @@
name: Stale
# yamllint disable-line rule:truthy
on:
schedule:
- cron: "0 * * * *"
jobs:
stale:
runs-on: ubuntu-latest
steps:
- name: 90 days stale policy
uses: actions/stale@v9.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 90
days-before-close: 7
operations-per-run: 25
remove-stale-when-updated: true
stale-issue-label: "stale"
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,feature-request,feature%20request"
stale-issue-message: >
There hasn't been any activity on this issue recently. Due to the
high number of incoming GitHub notifications, we have to clean some
of the old issues, as many of them have already been resolved with
the latest updates.
Please make sure to update to the latest Home Assistant version and
check if that solves the issue. Let us know if that works for you by
adding a comment 👍
This issue has now been marked as stale and will be closed if no
further activity occurs. Thank you for your contributions.
stale-pr-label: "stale"
exempt-pr-labels: "no-stale"
stale-pr-message: >
There hasn't been any activity on this pull request recently. This
pull request has been automatically marked as stale because of that
and will be closed if no further activity occurs within 7 days.
Thank you for your contributions.

View File

@@ -1,21 +0,0 @@
name: Translations
on:
push:
branches:
- dev
paths:
- src/translations/en.json
jobs:
upload:
name: Upload
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.0
- name: Upload Translations
run: |
export LOKALISE_TOKEN="${{ secrets.LOKALISE_TOKEN }}"
./script/translations_upload_base

43
.gitignore vendored
View File

@@ -1,23 +1,10 @@
.DS_Store
.reify-cache
# build
build/
dist/
/hass_frontend/
/translations/
# yarn
.yarn/*
!.yarn/patches
!.yarn/releases
!.yarn/plugins
!.yarn/sdks
!.yarn/versions
.pnp.*
/node_modules/
yarn-error.log
build
build-translations/*
node_modules/*
npm-debug.log
.DS_Store
hass_frontend/*
.reify-cache
# Python stuff
*.py[cod]
@@ -27,26 +14,22 @@ npm-debug.log
# venv stuff
pyvenv.cfg
pip-selfcheck.json
/venv/
venv
.venv
lib
bin
dist
# vscode
.vscode/*
!.vscode/extensions.json
!.vscode/launch.json
!.vscode/tasks.json
# Cast dev settings
# Cast dev settings
src/cast/dev_const.ts
# Secrets
.lokalise_token
yarn-error.log
# asdf
#asdf
.tool-versions
# Home Assistant config
/config/
# Jetbrains
/.idea/

6
.hound.yml Normal file
View File

@@ -0,0 +1,6 @@
jshint:
enabled: false
eslint:
enabled: true
config_file: .eslintrc-hound.json

View File

@@ -1 +0,0 @@
yarn run lint-staged --relative --shell "/bin/bash"

2
.nvmrc
View File

@@ -1 +1 @@
lts/iron
12.1

View File

@@ -1,4 +1,10 @@
CLA.md
CODE_OF_CONDUCT.md
LICENSE.md
PULL_REQUEST_TEMPLATE.md
build
build-translations/*
translations/*
node_modules/*
hass_frontend/*
pip-selfcheck.json
# vscode
.vscode/*
!.vscode/extensions.json

View File

@@ -2,8 +2,7 @@
"recommendations": [
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"runem.lit-plugin",
"github.vscode-pull-request-github",
"eamodio.gitlens"
"bierner.lit-html",
"runem.lit-plugin"
]
}

42
.vscode/launch.json vendored
View File

@@ -1,42 +0,0 @@
{
// https://github.com/microsoft/vscode-js-debug/blob/master/OPTIONS.md
"configurations": [
{
"name": "Debug Frontend",
"request": "launch",
"type": "pwa-chrome",
"url": "http://localhost:8123/",
"webRoot": "${workspaceFolder}/hass_frontend",
"disableNetworkCache": true,
"preLaunchTask": "Develop Frontend",
"outFiles": ["${workspaceFolder}/hass_frontend/frontend_latest/*.js"]
},
{
"name": "Debug Gallery",
"request": "launch",
"type": "pwa-chrome",
"url": "http://localhost:8100/",
"webRoot": "${workspaceFolder}/gallery/dist",
"disableNetworkCache": true,
"preLaunchTask": "Develop Gallery"
},
{
"name": "Debug Demo",
"request": "launch",
"type": "pwa-chrome",
"url": "http://localhost:8090/",
"webRoot": "${workspaceFolder}/demo/dist",
"disableNetworkCache": true,
"preLaunchTask": "Develop Demo"
},
{
"name": "Debug Cast",
"request": "launch",
"type": "pwa-chrome",
"url": "http://localhost:8080/",
"webRoot": "${workspaceFolder}/cast/dist",
"disableNetworkCache": true,
"preLaunchTask": "Develop Cast"
}
]
}

214
.vscode/tasks.json vendored
View File

@@ -1,214 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Develop Frontend",
"type": "gulp",
"task": "develop-app",
// Sync changes here to other tasks until issue resolved
// https://github.com/Microsoft/vscode/issues/61497
"problemMatcher": {
"owner": "ha-build",
"source": "ha-build",
"fileLocation": "absolute",
"severity": "error",
"pattern": [
{
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
"severity": 1,
"file": 2,
"message": 3,
"line": 4,
"column": 5
}
],
"background": {
"activeOnStart": true,
"beginsPattern": "Changes detected. Starting compilation",
"endsPattern": "Build done @"
}
},
"isBackground": true,
"group": {
"kind": "build",
"isDefault": true
},
"runOptions": {
"instanceLimit": 1
}
},
{
"label": "Develop Supervisor panel",
"type": "gulp",
"task": "develop-hassio",
"problemMatcher": {
"owner": "ha-build",
"source": "ha-build",
"fileLocation": "absolute",
"severity": "error",
"pattern": [
{
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
"severity": 1,
"file": 2,
"message": 3,
"line": 4,
"column": 5
}
],
"background": {
"activeOnStart": true,
"beginsPattern": "Changes detected. Starting compilation",
"endsPattern": "Build done @"
}
},
"isBackground": true,
"group": "build",
"runOptions": {
"instanceLimit": 1
}
},
{
"label": "Develop Gallery",
"type": "gulp",
"task": "develop-gallery",
"problemMatcher": {
"owner": "ha-build",
"source": "ha-build",
"fileLocation": "absolute",
"severity": "error",
"pattern": [
{
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
"severity": 1,
"file": 2,
"message": 3,
"line": 4,
"column": 5
}
],
"background": {
"activeOnStart": true,
"beginsPattern": "Changes detected. Starting compilation",
"endsPattern": "Build done @"
}
},
"isBackground": true,
"group": "build",
"runOptions": {
"instanceLimit": 1
}
},
{
"label": "Develop Demo",
"type": "gulp",
"task": "develop-demo",
"problemMatcher": {
"owner": "ha-build",
"source": "ha-build",
"fileLocation": "absolute",
"severity": "error",
"pattern": [
{
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
"severity": 1,
"file": 2,
"message": 3,
"line": 4,
"column": 5
}
],
"background": {
"activeOnStart": true,
"beginsPattern": "Changes detected. Starting compilation",
"endsPattern": "Build done @"
}
},
"isBackground": true,
"group": "build",
"runOptions": {
"instanceLimit": 1
}
},
{
"label": "Develop Cast",
"type": "gulp",
"task": "develop-cast",
"problemMatcher": {
"owner": "ha-build",
"source": "ha-build",
"fileLocation": "absolute",
"severity": "error",
"pattern": [
{
"regexp": "(SyntaxError): (.+): (.+) \\((\\d+):(\\d+)\\)",
"severity": 1,
"file": 2,
"message": 3,
"line": 4,
"column": 5
}
],
"background": {
"activeOnStart": true,
"beginsPattern": "Changes detected. Starting compilation",
"endsPattern": "Build done @"
}
},
"isBackground": true,
"group": "build",
"runOptions": {
"instanceLimit": 1
}
},
{
"label": "Run HA Core in devcontainer",
"type": "shell",
"command": "script/core",
"isBackground": true,
"group": {
"kind": "build",
"isDefault": true
},
"problemMatcher": [],
"runOptions": {
"instanceLimit": 1
}
},
{
"label": "Run HA Core for Supervisor in devcontainer",
"type": "shell",
"command": "SUPERVISOR=${input:supervisorHost} SUPERVISOR_TOKEN=${input:supervisorToken} script/core",
"isBackground": true,
"group": {
"kind": "build",
"isDefault": true
},
"problemMatcher": [],
"runOptions": {
"instanceLimit": 1
}
},
{
"label": "Setup and fetch nightly translations",
"type": "gulp",
"task": "setup-and-fetch-nightly-translations",
"problemMatcher": []
}
],
"inputs": [
{
"id": "supervisorHost",
"type": "promptString",
"description": "The IP of the Supervisor host running the Remote API proxy add-on"
},
{
"id": "supervisorToken",
"type": "promptString",
"description": "The token for the Remote API proxy add-on"
}
]
}

View File

@@ -1,34 +0,0 @@
diff --git a/lib/legacy/class.js b/lib/legacy/class.js
index aee2511be1cd9bf900ee552bc98190c1631c57c0..f2f499d68bf52034cac9c28307c99e8ce6b8417d 100644
--- a/lib/legacy/class.js
+++ b/lib/legacy/class.js
@@ -304,17 +304,23 @@ function GenerateClassFromInfo(info, Base, behaviors) {
// only proceed if the generated class' prototype has not been registered.
const generatedProto = PolymerGenerated.prototype;
if (!generatedProto.hasOwnProperty(JSCompiler_renameProperty('__hasRegisterFinished', generatedProto))) {
- generatedProto.__hasRegisterFinished = true;
+ // make sure legacy lifecycle is called on the *element*'s prototype
+ // and not the generated class prototype; if the element has been
+ // extended, these are *not* the same.
+ const proto = Object.getPrototypeOf(this);
+ // Only set flag when generated prototype itself is registered,
+ // as this element may be extended from, and needs to run `registered`
+ // on all behaviors on the subclass as well.
+ if (proto === generatedProto) {
+ generatedProto.__hasRegisterFinished = true;
+ }
// ensure superclass is registered first.
super._registered();
// copy properties onto the generated class lazily if we're optimizing,
- if (legacyOptimizations) {
+ if (legacyOptimizations && !Object.hasOwnProperty(generatedProto, '__hasCopiedProperties')) {
+ generatedProto.__hasCopiedProperties = true;
copyPropertiesToProto(generatedProto);
}
- // make sure legacy lifecycle is called on the *element*'s prototype
- // and not the generated class prototype; if the element has been
- // extended, these are *not* the same.
- const proto = Object.getPrototypeOf(this);
let list = lifecycle.beforeRegister;
if (list) {
for (let i=0; i < list.length; i++) {

View File

@@ -1,18 +0,0 @@
diff --git a/dist/hls.light.mjs b/dist/hls.light.mjs
index eed9d788fafdb159975e1a2eb08ac88ba9c9ac33..ace881935e6665946f1c8110ebd2f739cde4427e 100644
--- a/dist/hls.light.mjs
+++ b/dist/hls.light.mjs
@@ -20523,9 +20523,9 @@ class Hls {
}
Hls.defaultConfig = void 0;
-var KeySystemFormats = empty.KeySystemFormats;
-var KeySystems = empty.KeySystems;
-var SubtitleStreamController = empty.SubtitleStreamController;
-var TimelineController = empty.TimelineController;
+var KeySystemFormats = empty;
+var KeySystems = empty;
+var SubtitleStreamController = empty;
+var TimelineController = empty;
export { AbrController, AttrList, Cues as AudioStreamController, Cues as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, Cues as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, Cues as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, Level, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, Cues as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
//# sourceMappingURL=hls.light.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -1,60 +0,0 @@
diff --git a/modular/sortable.core.esm.js b/modular/sortable.core.esm.js
index 8b5e49b011713c8859c669069fbe85ce53974e1d..6a0afc92787157b8a31c38cc5f67dfa526090a00 100644
--- a/modular/sortable.core.esm.js
+++ b/modular/sortable.core.esm.js
@@ -1781,11 +1781,16 @@ Sortable.prototype = /** @lends Sortable.prototype */{
}
if (_onMove(rootEl, el, dragEl, dragRect, target, targetRect, evt, !!target) !== false) {
capture();
- if (elLastChild && elLastChild.nextSibling) {
- // the last draggable element is not the last node
- el.insertBefore(dragEl, elLastChild.nextSibling);
- } else {
- el.appendChild(dragEl);
+ try {
+ if (elLastChild && elLastChild.nextSibling) {
+ // the last draggable element is not the last node
+ el.insertBefore(dragEl, elLastChild.nextSibling);
+ } else {
+ el.appendChild(dragEl);
+ }
+ }
+ catch(err) {
+ return completed(false);
}
parentEl = el; // actualization
@@ -1802,7 +1807,12 @@ Sortable.prototype = /** @lends Sortable.prototype */{
targetRect = getRect(target);
if (_onMove(rootEl, el, dragEl, dragRect, target, targetRect, evt, false) !== false) {
capture();
- el.insertBefore(dragEl, firstChild);
+ try {
+ el.insertBefore(dragEl, firstChild);
+ }
+ catch(err) {
+ return completed(false);
+ }
parentEl = el; // actualization
changed();
@@ -1849,10 +1859,15 @@ Sortable.prototype = /** @lends Sortable.prototype */{
_silent = true;
setTimeout(_unsilent, 30);
capture();
- if (after && !nextSibling) {
- el.appendChild(dragEl);
- } else {
- target.parentNode.insertBefore(dragEl, after ? nextSibling : target);
+ try {
+ if (after && !nextSibling) {
+ el.appendChild(dragEl);
+ } else {
+ target.parentNode.insertBefore(dragEl, after ? nextSibling : target);
+ }
+ }
+ catch(err) {
+ return completed(false);
}
// Undo chrome's scroll adjustment (has no effect on other browsers)

View File

@@ -1,55 +0,0 @@
diff --git a/build/inject-manifest.js b/build/inject-manifest.js
index 60e3d2bb51c11a19fbbedbad65e101082ec41c36..fed6026630f43f86e25446383982cf6fb694313b 100644
--- a/build/inject-manifest.js
+++ b/build/inject-manifest.js
@@ -104,7 +104,7 @@ async function injectManifest(config) {
replaceString: manifestString,
searchString: options.injectionPoint,
});
- filesToWrite[options.swDest] = source;
+ filesToWrite[options.swDest] = source.replace(url, encodeURI(upath_1.default.basename(destPath)));
filesToWrite[destPath] = map;
}
else {
diff --git a/build/lib/translate-url-to-sourcemap-paths.js b/build/lib/translate-url-to-sourcemap-paths.js
index 3220c5474eeac6e8a56ca9b2ac2bd9be48529e43..5f003879a904d4840529a42dd056d288fd213771 100644
--- a/build/lib/translate-url-to-sourcemap-paths.js
+++ b/build/lib/translate-url-to-sourcemap-paths.js
@@ -22,7 +22,7 @@ function translateURLToSourcemapPaths(url, swSrc, swDest) {
const possibleSrcPath = upath_1.default.resolve(upath_1.default.dirname(swSrc), url);
if (fs_extra_1.default.existsSync(possibleSrcPath)) {
srcPath = possibleSrcPath;
- destPath = upath_1.default.resolve(upath_1.default.dirname(swDest), url);
+ destPath = `${swDest}.map`;
}
else {
warning = `${errors_1.errors['cant-find-sourcemap']} ${possibleSrcPath}`;
diff --git a/src/inject-manifest.ts b/src/inject-manifest.ts
index 8795ddcaa77aea7b0356417e4bc4b19e2b3f860c..fcdc68342d9ac53936c9ed40a9ccfc2f5070cad3 100644
--- a/src/inject-manifest.ts
+++ b/src/inject-manifest.ts
@@ -129,7 +129,10 @@ export async function injectManifest(
searchString: options.injectionPoint!,
});
- filesToWrite[options.swDest] = source;
+ filesToWrite[options.swDest] = source.replace(
+ url!,
+ encodeURI(upath.basename(destPath)),
+ );
filesToWrite[destPath] = map;
} else {
// If there's no sourcemap associated with swSrc, a simple string
diff --git a/src/lib/translate-url-to-sourcemap-paths.ts b/src/lib/translate-url-to-sourcemap-paths.ts
index 072eac40d4ef5d095a01cb7f7e392a9e034853bd..f0bbe69e88ef3a415de18a7e9cb264daea273d71 100644
--- a/src/lib/translate-url-to-sourcemap-paths.ts
+++ b/src/lib/translate-url-to-sourcemap-paths.ts
@@ -28,7 +28,7 @@ export function translateURLToSourcemapPaths(
const possibleSrcPath = upath.resolve(upath.dirname(swSrc), url);
if (fse.existsSync(possibleSrcPath)) {
srcPath = possibleSrcPath;
- destPath = upath.resolve(upath.dirname(swDest), url);
+ destPath = `${swDest}.map`;
} else {
warning = `${errors['cant-find-sourcemap']} ${possibleSrcPath}`;
}

File diff suppressed because one or more lines are too long

View File

@@ -1,9 +0,0 @@
compressionLevel: mixed
defaultSemverRangePrefix: ""
enableGlobalCache: false
nodeLinker: node-modules
yarnPath: .yarn/releases/yarn-4.5.0.cjs

View File

@@ -1,4 +1,5 @@
include README.md
include LICENSE.md
graft hass_frontend
graft hass_frontend_es5
recursive-exclude * *.py[co]

View File

@@ -2,7 +2,7 @@
This is the repository for the official [Home Assistant](https://home-assistant.io) frontend.
[![Screenshot of the frontend](https://raw.githubusercontent.com/home-assistant/frontend/master/docs/screenshot.png)](https://demo.home-assistant.io/)
[![Screenshot of the frontend](https://raw.githubusercontent.com/home-assistant/home-assistant-polymer/master/docs/screenshot.png)](https://demo.home-assistant.io/)
- [View demo of Home Assistant](https://demo.home-assistant.io/)
- [More information about Home Assistant](https://home-assistant.io)
@@ -14,7 +14,7 @@ This is the repository for the official [Home Assistant](https://home-assistant.
- Development: [Instructions](https://developers.home-assistant.io/docs/frontend/development/)
- Production build: `script/build_frontend`
- Gallery: `cd gallery && script/develop_gallery`
- Supervisor: [Instructions](https://developers.home-assistant.io/docs/supervisor/developing)
- Hass.io: [Instructions](https://developers.home-assistant.io/docs/en/hassio_hass.html)
## Frontend development
@@ -26,6 +26,4 @@ A complete guide can be found at the following [link](https://www.home-assistant
Home Assistant is open-source and Apache 2 licensed. Feel free to browse the repository, learn and reuse parts in your own projects.
We use [BrowserStack](https://www.browserstack.com) to test Home Assistant on a large variety of devices.
[![Home Assistant - A project from the Open Home Foundation](https://www.openhomefoundation.org/badges/home-assistant.png)](https://www.openhomefoundation.org/)
We use [BrowserStack](https://www.browserstack.com) to test Home Assistant on a large variation of devices.

View File

@@ -0,0 +1,30 @@
# https://dev.azure.com/home-assistant
trigger: none
pr: none
schedules:
- cron: "0 0 * * *"
displayName: "build preview"
branches:
include:
- dev
always: true
variables:
- group: netlify
jobs:
- job: 'Netlify_preview'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: |
# Cast
curl -X POST -d {} https://api.netlify.com/build_hooks/${NETLIFY_CAST}
# Demo
curl -X POST -d {} https://api.netlify.com/build_hooks/${NETLIFY_DEMO}
# Gallery
curl -X POST -d {} https://api.netlify.com/build_hooks/${NETLIFY_GALLERY}
displayName: 'Trigger netlify build preview'

View File

@@ -0,0 +1,59 @@
# https://dev.azure.com/home-assistant
trigger:
batch: true
tags:
include:
- "*"
pr: none
variables:
- name: versionWheels
value: '1.10.1-3.7-alpine3.11'
- name: versionNode
value: '12.1'
- group: twine
resources:
repositories:
- repository: azure
type: github
name: 'home-assistant/ci-azure'
endpoint: 'home-assistant'
stages:
- stage: "Validate"
jobs:
- template: templates/azp-job-version.yaml@azure
- stage: "Build"
jobs:
- job: "ReleasePython"
pool:
vmImage: "ubuntu-latest"
steps:
- task: UsePythonVersion@0
displayName: "Use Python 3.7"
inputs:
versionSpec: "3.7"
- task: NodeTool@0
displayName: "Use Node $(versionNode)"
inputs:
versionSpec: "$(versionNode)"
- script: pip install twine wheel
displayName: "Install tools"
- script: |
export TWINE_USERNAME="$(twineUser)"
export TWINE_PASSWORD="$(twinePassword)"
script/release
displayName: "Build and release package"
- stage: "Wheels"
jobs:
- template: templates/azp-job-wheels.yaml@azure
parameters:
builderVersion: '$(versionWheels)'
wheelsRequirement: 'requirement.txt'
preBuild:
- script: |
sleep 240
echo "home-assistant-frontend==$(Build.SourceBranchName)" > requirement.txt

View File

@@ -0,0 +1,70 @@
# https://dev.azure.com/home-assistant
trigger:
batch: true
branches:
include:
- dev
paths:
include:
- translations/en.json
pr: none
schedules:
- cron: "30 0 * * *"
displayName: "frontend translation update"
branches:
include:
- dev
always: true
variables:
- group: translation
resources:
repositories:
- repository: azure
type: github
name: 'home-assistant/ci-azure'
endpoint: 'home-assistant'
jobs:
- job: 'Upload'
pool:
vmImage: 'ubuntu-latest'
steps:
- task: NodeTool@0
displayName: 'Use Node 12.x'
inputs:
versionSpec: '12.x'
- script: |
export LOKALISE_TOKEN="$(lokaliseToken)"
export AZURE_BRANCH="$(Build.SourceBranchName)"
./script/translations_upload_base
displayName: 'Upload Translation'
- job: 'Download'
dependsOn:
- 'Upload'
condition: or(eq(variables['Build.Reason'], 'Schedule'), eq(variables['Build.Reason'], 'Manual'))
pool:
vmImage: 'ubuntu-latest'
steps:
- task: NodeTool@0
displayName: 'Use Node 12.x'
inputs:
versionSpec: '12.x'
- template: templates/azp-step-git-init.yaml@azure
- script: |
export LOKALISE_TOKEN="$(lokaliseToken)"
export AZURE_BRANCH="$(Build.SourceBranchName)"
npm install
./script/translations_download
displayName: 'Download Translation'
- script: |
git checkout dev
git add translation
git commit -am "[ci skip] Translation update"
git push
displayName: 'Update translation'

7
build-scripts/.eslintrc Normal file
View File

@@ -0,0 +1,7 @@
{
"rules": {
"import/no-extraneous-dependencies": 0,
"no-restricted-syntax": 0,
"no-console": 0
}
}

View File

@@ -1,12 +1,7 @@
{
"extends": "../.eslintrc.json",
"rules": {
"no-console": "off",
"import/no-extraneous-dependencies": "off",
"import/extensions": "off",
"import/no-dynamic-require": "off",
"global-require": "off",
"@typescript-eslint/no-var-requires": "off",
"prefer-arrow-callback": "off"
"import/no-extraneous-dependencies": 0,
"global-require": 0
}
}

View File

@@ -1,39 +0,0 @@
# Bundling Home Assistant Frontend
The Home Assistant build pipeline contains various steps to prepare a build.
- Generating icon files to be included
- Generating translation files to be included
- Converting TypeScript, CSS and JSON files to JavaScript
- Bundling
- Minifying the files
- Generating the HTML entrypoint files
- Generating the service worker
- Compressing the files
## Converting files
Currently in Home Assistant we use a bundler to convert TypeScript, CSS and JSON files to JavaScript files that the browser understands.
We currently rely on Webpack but also have experimental Rollup support. Both of these programs bundle the converted files in both production and development.
For development, bundling is optional. We just want to get the right files in the browser.
Responsibilities of the converter during development:
- Convert TypeScript to JavaScript
- Convert CSS to JavaScript that sets the content as the default export
- Convert JSON to JavaScript that sets the content as the default export
- Make sure import, dynamic import and web worker references work
- Add extensions where missing
- Resolve absolute package imports
- Filter out specific imports/packages
- Replace constants with values
In production, the following responsibilities are added:
- Minify HTML
- Bundle multiple imports so that the browser can fetch less files
- Generate a second version that is ES5 compatible
Configuration for all these steps are specified in [bundle.js](bundle.js).

View File

@@ -1,182 +0,0 @@
import defineProvider from "@babel/helper-define-polyfill-provider";
import { join } from "node:path";
import paths from "../paths.cjs";
const POLYFILL_DIR = join(paths.polymer_dir, "src/resources/polyfills");
// List of polyfill keys with supported browser targets for the functionality
const PolyfillSupport = {
// Note states and shadowRoot properties should be supported.
"element-internals": {
android: 90,
chrome: 90,
edge: 90,
firefox: 126,
ios: 17.4,
opera: 76,
opera_mobile: 64,
safari: 17.4,
samsung: 15.0,
},
"element-append": {
android: 54,
chrome: 54,
edge: 17,
firefox: 49,
ios: 10.0,
opera: 41,
opera_mobile: 41,
safari: 10.0,
samsung: 6.0,
},
"element-getattributenames": {
android: 61,
chrome: 61,
edge: 18,
firefox: 45,
ios: 10.3,
opera: 48,
opera_mobile: 45,
safari: 10.1,
samsung: 8.0,
},
"element-toggleattribute": {
android: 69,
chrome: 69,
edge: 18,
firefox: 63,
ios: 12.0,
opera: 56,
opera_mobile: 48,
safari: 12.0,
samsung: 10.0,
},
fetch: {
android: 42,
chrome: 42,
edge: 14,
firefox: 39,
ios: 10.3,
opera: 29,
opera_mobile: 29,
safari: 10.1,
samsung: 4.0,
},
"intl-getcanonicallocales": {
android: 54,
chrome: 54,
edge: 16,
firefox: 48,
ios: 10.3,
opera: 41,
opera_mobile: 41,
safari: 10.1,
samsung: 6.0,
},
"intl-locale": {
android: 74,
chrome: 74,
edge: 79,
firefox: 75,
ios: 14.0,
opera: 62,
opera_mobile: 53,
safari: 14.0,
samsung: 11.0,
},
"intl-other": {
// Not specified (i.e. always try polyfill) since compatibility depends on supported locales
},
proxy: {
android: 49,
chrome: 49,
edge: 12,
firefox: 18,
ios: 10.0,
opera: 36,
opera_mobile: 36,
safari: 10.0,
samsung: 5.0,
},
"resize-observer": {
android: 64,
chrome: 64,
edge: 79,
firefox: 69,
ios: 13.4,
opera: 51,
opera_mobile: 47,
safari: 13.1,
samsung: 9.0,
},
};
// Map of global variables and/or instance and static properties to the
// corresponding polyfill key and actual module to import
const polyfillMap = {
global: {
fetch: { key: "fetch", module: "unfetch/polyfill" },
Proxy: { key: "proxy", module: "proxy-polyfill" },
ResizeObserver: {
key: "resize-observer",
module: join(POLYFILL_DIR, "resize-observer.ts"),
},
},
instance: {
attachInternals: {
key: "element-internals",
module: "element-internals-polyfill",
},
...Object.fromEntries(
["append", "getAttributeNames", "toggleAttribute"].map((prop) => {
const key = `element-${prop.toLowerCase()}`;
return [prop, { key, module: join(POLYFILL_DIR, `${key}.ts`) }];
})
),
},
static: {
Intl: {
getCanonicalLocales: {
key: "intl-getcanonicallocales",
module: join(POLYFILL_DIR, "intl-polyfill.ts"),
},
Locale: {
key: "intl-locale",
module: join(POLYFILL_DIR, "intl-polyfill.ts"),
},
...Object.fromEntries(
[
"DateTimeFormat",
"DisplayNames",
"ListFormat",
"NumberFormat",
"PluralRules",
"RelativeTimeFormat",
].map((obj) => [
obj,
{ key: "intl-other", module: join(POLYFILL_DIR, "intl-polyfill.ts") },
])
),
},
},
};
// Create plugin using the same factory as for CoreJS
export default defineProvider(
({ createMetaResolver, debug, shouldInjectPolyfill }) => {
const resolvePolyfill = createMetaResolver(polyfillMap);
return {
name: "custom-polyfill",
polyfills: PolyfillSupport,
usageGlobal(meta, utils) {
const polyfill = resolvePolyfill(meta);
if (polyfill && shouldInjectPolyfill(polyfill.desc.key)) {
debug(polyfill.desc.key);
utils.injectGlobalImport(polyfill.desc.module);
return true;
}
return false;
},
};
}
);

View File

@@ -1,168 +0,0 @@
const path = require("path");
// Currently only supports CommonJS modules, as require is synchronous. `import` would need babel running asynchronous.
module.exports = function inlineConstants(babel, options, cwd) {
const t = babel.types;
if (!Array.isArray(options.modules)) {
throw new TypeError(
"babel-plugin-inline-constants: expected a `modules` array to be passed"
);
}
if (options.resolveExtensions && !Array.isArray(options.resolveExtensions)) {
throw new TypeError(
"babel-plugin-inline-constants: expected `resolveExtensions` to be an array"
);
}
const ignoreModuleNotFound = options.ignoreModuleNotFound;
const resolveExtensions = options.resolveExtensions;
const hasRelativeModules = options.modules.some(
(module) => module.startsWith(".") || module.startsWith("/")
);
const modules = Object.fromEntries(
options.modules.map((module) => {
const absolute = module.startsWith(".")
? require.resolve(module, { paths: [cwd] })
: module;
return [absolute, require(absolute)];
})
);
const toLiteral = (value) => {
if (typeof value === "string") {
return t.stringLiteral(value);
}
if (typeof value === "number") {
return t.numericLiteral(value);
}
if (typeof value === "boolean") {
return t.booleanLiteral(value);
}
if (value === null) {
return t.nullLiteral();
}
throw new Error(
"babel-plugin-inline-constants: cannot handle non-literal `" + value + "`"
);
};
const resolveAbsolute = (value, state, resolveExtensionIndex) => {
if (!state.filename) {
throw new TypeError(
"babel-plugin-inline-constants: expected a `filename` to be set for files"
);
}
if (resolveExtensions && resolveExtensionIndex !== undefined) {
value += resolveExtensions[resolveExtensionIndex];
}
try {
return require.resolve(value, { paths: [path.dirname(state.filename)] });
} catch (error) {
if (
error.code === "MODULE_NOT_FOUND" &&
resolveExtensions &&
(resolveExtensionIndex === undefined ||
resolveExtensionIndex < resolveExtensions.length - 1)
) {
const resolveExtensionIdx = (resolveExtensionIndex || -1) + 1;
return resolveAbsolute(value, state, resolveExtensionIdx);
}
if (error.code === "MODULE_NOT_FOUND" && ignoreModuleNotFound) {
return undefined;
}
throw error;
}
};
const importDeclaration = (p, state) => {
if (p.node.type !== "ImportDeclaration") {
return;
}
const absolute =
hasRelativeModules && p.node.source.value.startsWith(".")
? resolveAbsolute(p.node.source.value, state)
: p.node.source.value;
if (!absolute || !(absolute in modules)) {
return;
}
const module = modules[absolute];
for (const specifier of p.node.specifiers) {
if (
specifier.type === "ImportDefaultSpecifier" &&
specifier.local &&
specifier.local.type === "Identifier"
) {
if (!("default" in module)) {
throw new Error(
"babel-plugin-inline-constants: cannot access default export from `" +
p.node.source.value +
"`"
);
}
const variableValue = toLiteral(module.default);
const variable = t.variableDeclarator(
t.identifier(specifier.local.name),
variableValue
);
p.insertBefore({
type: "VariableDeclaration",
kind: "const",
declarations: [variable],
});
} else if (
specifier.type === "ImportSpecifier" &&
specifier.imported &&
specifier.imported.type === "Identifier" &&
specifier.local &&
specifier.local.type === "Identifier"
) {
if (!(specifier.imported.name in module)) {
throw new Error(
"babel-plugin-inline-constants: cannot access `" +
specifier.imported.name +
"` from `" +
p.node.source.value +
"`"
);
}
const variableValue = toLiteral(module[specifier.imported.name]);
const variable = t.variableDeclarator(
t.identifier(specifier.local.name),
variableValue
);
p.insertBefore({
type: "VariableDeclaration",
kind: "const",
declarations: [variable],
});
} else {
throw new Error("Cannot handle specifier `" + specifier.type + "`");
}
}
p.remove();
};
return {
visitor: {
ImportDeclaration: importDeclaration,
},
};
};

View File

@@ -1,331 +0,0 @@
const path = require("path");
const env = require("./env.cjs");
const paths = require("./paths.cjs");
const { dependencies } = require("../package.json");
const BABEL_PLUGINS = path.join(__dirname, "babel-plugins");
// GitHub base URL to use for production source maps
// Nightly builds use the commit SHA, otherwise assumes there is a tag that matches the version
module.exports.sourceMapURL = () => {
const ref = env.version().endsWith("dev")
? process.env.GITHUB_SHA || "dev"
: env.version();
return `https://raw.githubusercontent.com/home-assistant/frontend/${ref}/`;
};
// Files from NPM Packages that should not be imported
module.exports.ignorePackages = () => [];
// Files from NPM packages that we should replace with empty file
module.exports.emptyPackages = ({ latestBuild, isHassioBuild }) =>
[
// Contains all color definitions for all material color sets.
// We don't use it
require.resolve("@polymer/paper-styles/color.js"),
require.resolve("@polymer/paper-styles/default-theme.js"),
// Loads stuff from a CDN
require.resolve("@polymer/font-roboto/roboto.js"),
require.resolve("@vaadin/vaadin-material-styles/typography.js"),
require.resolve("@vaadin/vaadin-material-styles/font-icons.js"),
// Compatibility not needed for latest builds
latestBuild &&
// wrapped in require.resolve so it blows up if file no longer exists
require.resolve(
path.resolve(paths.polymer_dir, "src/resources/compatibility.ts")
),
// Icons in supervisor conflict with icons in HA so we don't load.
isHassioBuild &&
require.resolve(
path.resolve(paths.polymer_dir, "src/components/ha-icon.ts")
),
isHassioBuild &&
require.resolve(
path.resolve(paths.polymer_dir, "src/components/ha-icon-picker.ts")
),
].filter(Boolean);
module.exports.definedVars = ({ isProdBuild, latestBuild, defineOverlay }) => ({
__DEV__: !isProdBuild,
__BUILD__: JSON.stringify(latestBuild ? "modern" : "legacy"),
__VERSION__: JSON.stringify(env.version()),
__DEMO__: false,
__SUPERVISOR__: false,
__BACKWARDS_COMPAT__: false,
__STATIC_PATH__: "/static/",
"process.env.NODE_ENV": JSON.stringify(
isProdBuild ? "production" : "development"
),
...defineOverlay,
});
module.exports.htmlMinifierOptions = {
caseSensitive: true,
collapseWhitespace: true,
conservativeCollapse: true,
decodeEntities: true,
removeComments: true,
removeRedundantAttributes: true,
minifyCSS: {
compatibility: "*,-properties.zeroUnits",
},
};
module.exports.terserOptions = ({ latestBuild, isTestBuild }) => ({
safari10: !latestBuild,
ecma: latestBuild ? 2015 : 5,
module: latestBuild,
format: { comments: false },
sourceMap: !isTestBuild,
});
module.exports.babelOptions = ({
latestBuild,
isProdBuild,
isTestBuild,
sw,
}) => ({
babelrc: false,
compact: false,
assumptions: {
privateFieldsAsProperties: true,
setPublicClassFields: true,
setSpreadProperties: true,
},
browserslistEnv: latestBuild ? "modern" : `legacy${sw ? "-sw" : ""}`,
presets: [
[
"@babel/preset-env",
{
useBuiltIns: "usage",
corejs: dependencies["core-js"],
bugfixes: true,
shippedProposals: true,
},
],
"@babel/preset-typescript",
],
plugins: [
[
path.join(BABEL_PLUGINS, "inline-constants-plugin.cjs"),
{
modules: ["@mdi/js"],
ignoreModuleNotFound: true,
},
],
// Minify template literals for production
isProdBuild && [
"template-html-minifier",
{
modules: {
...Object.fromEntries(
["lit", "lit-element", "lit-html"].map((m) => [
m,
[
"html",
{ name: "svg", encapsulation: "svg" },
{ name: "css", encapsulation: "style" },
],
])
),
"@polymer/polymer/lib/utils/html-tag.js": ["html"],
},
strictCSS: true,
htmlMinifier: module.exports.htmlMinifierOptions,
failOnError: false, // we can turn this off in case of false positives
},
],
// Import helpers and regenerator from runtime package
[
"@babel/plugin-transform-runtime",
{ version: dependencies["@babel/runtime"] },
],
// Transpile decorators (still in TC39 process)
// Modern browsers support class fields and private methods, but transform is required with the older decorator version dictated by Lit
[
"@babel/plugin-proposal-decorators",
{ version: "2018-09", decoratorsBeforeExport: true },
],
"@babel/plugin-transform-class-properties",
"@babel/plugin-transform-private-methods",
].filter(Boolean),
exclude: [
// \\ for Windows, / for Mac OS and Linux
/node_modules[\\/]core-js/,
/node_modules[\\/]webpack[\\/]buildin/,
],
sourceMaps: !isTestBuild,
overrides: [
{
// Add plugin to inject various polyfills, excluding the polyfills
// themselves to prevent self-injection.
plugins: [
[
path.join(BABEL_PLUGINS, "custom-polyfill-plugin.js"),
{ method: "usage-global" },
],
],
exclude: [
path.join(paths.polymer_dir, "src/resources/polyfills"),
...[
"@formatjs/(?:ecma402-abstract|intl-\\w+)",
"@lit-labs/virtualizer/polyfills",
"@webcomponents/scoped-custom-element-registry",
"element-internals-polyfill",
"proxy-polyfill",
"unfetch",
].map((p) => new RegExp(`/node_modules/${p}/`)),
],
},
{
// Use unambiguous for dependencies so that require() is correctly injected into CommonJS files
// Exclusions are needed in some cases where ES modules have no static imports or exports, such as polyfills
sourceType: "unambiguous",
include: /\/node_modules\//,
exclude: [
"element-internals-polyfill",
"@?lit(?:-labs|-element|-html)?",
].map((p) => new RegExp(`/node_modules/${p}/`)),
},
],
});
const nameSuffix = (latestBuild) => (latestBuild ? "-modern" : "-legacy");
const outputPath = (outputRoot, latestBuild) =>
path.resolve(outputRoot, latestBuild ? "frontend_latest" : "frontend_es5");
const publicPath = (latestBuild, root = "") =>
latestBuild ? `${root}/frontend_latest/` : `${root}/frontend_es5/`;
/*
BundleConfig {
// Object with entrypoints that need to be bundled
entry: { [name: string]: pathToFile },
// Folder where bundled files need to be written
outputPath: string,
// absolute url-path where bundled files can be found
publicPath: string,
// extra definitions that we need to replace in source
defineOverlay: {[name: string]: value },
// if this is a production build
isProdBuild: boolean,
// If we're targeting latest browsers
latestBuild: boolean,
// If we're doing a stats build (create nice chunk names)
isStatsBuild: boolean,
// If it's just a test build in CI, skip time on source map generation
isTestBuild: boolean,
// Names of entrypoints that should not be hashed
dontHash: Set<string>
}
*/
module.exports.config = {
app({ isProdBuild, latestBuild, isStatsBuild, isTestBuild, isWDS }) {
return {
name: "frontend" + nameSuffix(latestBuild),
entry: {
"service-worker":
!env.useRollup() && !latestBuild
? {
import: "./src/entrypoints/service-worker.ts",
layer: "sw",
}
: "./src/entrypoints/service-worker.ts",
app: "./src/entrypoints/app.ts",
authorize: "./src/entrypoints/authorize.ts",
onboarding: "./src/entrypoints/onboarding.ts",
core: "./src/entrypoints/core.ts",
"custom-panel": "./src/entrypoints/custom-panel.ts",
},
outputPath: outputPath(paths.app_output_root, latestBuild),
publicPath: publicPath(latestBuild),
isProdBuild,
latestBuild,
isStatsBuild,
isTestBuild,
isWDS,
};
},
demo({ isProdBuild, latestBuild, isStatsBuild }) {
return {
name: "demo" + nameSuffix(latestBuild),
entry: {
main: path.resolve(paths.demo_dir, "src/entrypoint.ts"),
},
outputPath: outputPath(paths.demo_output_root, latestBuild),
publicPath: publicPath(latestBuild),
defineOverlay: {
__VERSION__: JSON.stringify(`DEMO-${env.version()}`),
__DEMO__: true,
},
isProdBuild,
latestBuild,
isStatsBuild,
};
},
cast({ isProdBuild, latestBuild }) {
const entry = {
launcher: path.resolve(paths.cast_dir, "src/launcher/entrypoint.ts"),
media: path.resolve(paths.cast_dir, "src/media/entrypoint.ts"),
};
if (latestBuild) {
entry.receiver = path.resolve(
paths.cast_dir,
"src/receiver/entrypoint.ts"
);
}
return {
name: "cast" + nameSuffix(latestBuild),
entry,
outputPath: outputPath(paths.cast_output_root, latestBuild),
publicPath: publicPath(latestBuild),
isProdBuild,
latestBuild,
defineOverlay: {
__BACKWARDS_COMPAT__: true,
},
};
},
hassio({ isProdBuild, latestBuild, isStatsBuild, isTestBuild }) {
return {
name: "supervisor" + nameSuffix(latestBuild),
entry: {
entrypoint: path.resolve(paths.hassio_dir, "src/entrypoint.ts"),
},
outputPath: outputPath(paths.hassio_output_root, latestBuild),
publicPath: publicPath(latestBuild, paths.hassio_publicPath),
isProdBuild,
latestBuild,
isStatsBuild,
isTestBuild,
isHassioBuild: true,
defineOverlay: {
__SUPERVISOR__: true,
__STATIC_PATH__: `"${paths.hassio_publicPath}/static/"`,
},
};
},
gallery({ isProdBuild, latestBuild }) {
return {
name: "gallery" + nameSuffix(latestBuild),
entry: {
entrypoint: path.resolve(paths.gallery_dir, "src/entrypoint.js"),
},
outputPath: outputPath(paths.gallery_output_root, latestBuild),
publicPath: publicPath(latestBuild),
isProdBuild,
latestBuild,
defineOverlay: {
__DEMO__: true,
},
};
},
};

196
build-scripts/bundle.js Normal file
View File

@@ -0,0 +1,196 @@
const path = require("path");
const env = require("./env.js");
const paths = require("./paths.js");
// Files from NPM Packages that should not be imported
module.exports.ignorePackages = ({ latestBuild }) => [
// Bloats bundle and it's not used.
path.resolve(require.resolve("moment"), "../locale"),
// Part of yaml.js and only used for !!js functions that we don't use
require.resolve("esprima"),
];
// Files from NPM packages that we should replace with empty file
module.exports.emptyPackages = ({ latestBuild }) =>
[
// Contains all color definitions for all material color sets.
// We don't use it
require.resolve("@polymer/paper-styles/color.js"),
require.resolve("@polymer/paper-styles/default-theme.js"),
// Loads stuff from a CDN
require.resolve("@polymer/font-roboto/roboto.js"),
require.resolve("@vaadin/vaadin-material-styles/font-roboto.js"),
// Compatibility not needed for latest builds
latestBuild &&
// wrapped in require.resolve so it blows up if file no longer exists
require.resolve(
path.resolve(paths.polymer_dir, "src/resources/compatibility.ts")
),
// This polyfill is loaded in workers to support ES5, filter it out.
latestBuild && require.resolve("proxy-polyfill/src/index.js"),
].filter(Boolean);
module.exports.definedVars = ({ isProdBuild, latestBuild, defineOverlay }) => ({
__DEV__: !isProdBuild,
__BUILD__: JSON.stringify(latestBuild ? "latest" : "es5"),
__VERSION__: JSON.stringify(env.version()),
__DEMO__: false,
__BACKWARDS_COMPAT__: false,
__STATIC_PATH__: "/static/",
"process.env.NODE_ENV": JSON.stringify(
isProdBuild ? "production" : "development"
),
...defineOverlay,
});
module.exports.terserOptions = (latestBuild) => ({
safari10: true,
ecma: latestBuild ? undefined : 5,
output: { comments: false },
});
module.exports.babelOptions = ({ latestBuild }) => ({
babelrc: false,
presets: [
!latestBuild && [require("@babel/preset-env").default, { modules: false }],
require("@babel/preset-typescript").default,
].filter(Boolean),
plugins: [
// Part of ES2018. Converts {...a, b: 2} to Object.assign({}, a, {b: 2})
[
"@babel/plugin-proposal-object-rest-spread",
{ loose: true, useBuiltIns: true },
],
// Only support the syntax, Webpack will handle it.
"@babel/syntax-dynamic-import",
"@babel/plugin-proposal-optional-chaining",
"@babel/plugin-proposal-nullish-coalescing-operator",
[
require("@babel/plugin-proposal-decorators").default,
{ decoratorsBeforeExport: true },
],
[
require("@babel/plugin-proposal-class-properties").default,
{ loose: true },
],
],
});
// Are already ES5, cause warnings when babelified.
module.exports.babelExclude = () => [
require.resolve("@mdi/js/mdi.js"),
require.resolve("hls.js"),
];
const outputPath = (outputRoot, latestBuild) =>
path.resolve(outputRoot, latestBuild ? "frontend_latest" : "frontend_es5");
const publicPath = (latestBuild, root = "") =>
latestBuild ? `${root}/frontend_latest/` : `${root}/frontend_es5/`;
/*
BundleConfig {
// Object with entrypoints that need to be bundled
entry: { [name: string]: pathToFile },
// Folder where bundled files need to be written
outputPath: string,
// absolute url-path where bundled files can be found
publicPath: string,
// extra definitions that we need to replace in source
defineOverlay: {[name: string]: value },
// if this is a production build
isProdBuild: boolean,
// If we're targeting latest browsers
latestBuild: boolean,
// If we're doing a stats build (create nice chunk names)
isStatsBuild: boolean,
// Names of entrypoints that should not be hashed
dontHash: Set<string>
}
*/
module.exports.config = {
app({ isProdBuild, latestBuild, isStatsBuild }) {
return {
entry: {
service_worker: "./src/entrypoints/service_worker.ts",
app: "./src/entrypoints/app.ts",
authorize: "./src/entrypoints/authorize.ts",
onboarding: "./src/entrypoints/onboarding.ts",
core: "./src/entrypoints/core.ts",
"custom-panel": "./src/entrypoints/custom-panel.ts",
},
outputPath: outputPath(paths.app_output_root, latestBuild),
publicPath: publicPath(latestBuild),
isProdBuild,
latestBuild,
isStatsBuild,
};
},
demo({ isProdBuild, latestBuild, isStatsBuild }) {
return {
entry: {
main: path.resolve(paths.demo_dir, "src/entrypoint.ts"),
},
outputPath: outputPath(paths.demo_output_root, latestBuild),
publicPath: publicPath(latestBuild),
defineOverlay: {
__VERSION__: JSON.stringify(`DEMO-${env.version()}`),
__DEMO__: true,
},
isProdBuild,
latestBuild,
isStatsBuild,
};
},
cast({ isProdBuild, latestBuild }) {
const entry = {
launcher: path.resolve(paths.cast_dir, "src/launcher/entrypoint.ts"),
};
if (latestBuild) {
entry.receiver = path.resolve(
paths.cast_dir,
"src/receiver/entrypoint.ts"
);
}
return {
entry,
outputPath: outputPath(paths.cast_output_root, latestBuild),
publicPath: publicPath(latestBuild),
isProdBuild,
latestBuild,
defineOverlay: {
__BACKWARDS_COMPAT__: true,
},
};
},
hassio({ isProdBuild, latestBuild }) {
return {
entry: {
entrypoint: path.resolve(paths.hassio_dir, "src/entrypoint.ts"),
},
outputPath: outputPath(paths.hassio_output_root, latestBuild),
publicPath: publicPath(latestBuild, paths.hassio_publicPath),
isProdBuild,
latestBuild,
dontHash: new Set(["entrypoint"]),
};
},
gallery({ isProdBuild, latestBuild }) {
return {
entry: {
entrypoint: path.resolve(paths.gallery_dir, "src/entrypoint.js"),
},
outputPath: outputPath(paths.gallery_output_root, latestBuild),
publicPath: publicPath(latestBuild),
isProdBuild,
latestBuild,
};
},
};

View File

@@ -1,14 +1,11 @@
const fs = require("fs");
const path = require("path");
const paths = require("./paths.cjs");
const paths = require("./paths.js");
module.exports = {
useRollup() {
return process.env.ROLLUP === "1";
},
useWDS() {
return process.env.WDS === "1";
},
isProdBuild() {
return (
process.env.NODE_ENV === "production" || module.exports.isStatsBuild()
@@ -17,7 +14,7 @@ module.exports = {
isStatsBuild() {
return process.env.STATS === "1";
},
isTestBuild() {
isTest() {
return process.env.IS_TEST === "true";
},
isNetlify() {
@@ -25,14 +22,11 @@ module.exports = {
},
version() {
const version = fs
.readFileSync(path.resolve(paths.polymer_dir, "pyproject.toml"), "utf8")
.match(/version\W+=\W"(\d{8}\.\d(?:\.dev)?)"/);
.readFileSync(path.resolve(paths.polymer_dir, "setup.py"), "utf8")
.match(/\d{8}\.\d+/);
if (!version) {
throw Error("Version not found");
}
return version[1];
},
isDevContainer() {
return process.env.DEV_CONTAINER === "1";
return version[0];
},
};

View File

@@ -1,16 +1,17 @@
import gulp from "gulp";
import env from "../env.cjs";
import "./clean.js";
import "./compress.js";
import "./entry-html.js";
import "./gather-static.js";
import "./gen-icons-json.js";
import "./locale-data.js";
import "./rollup.js";
import "./service-worker.js";
import "./translations.js";
import "./wds.js";
import "./webpack.js";
// Run HA develop mode
const gulp = require("gulp");
const env = require("../env");
require("./clean.js");
require("./translations.js");
require("./gen-icons-json.js");
require("./gather-static.js");
require("./compress.js");
require("./webpack.js");
require("./service-worker.js");
require("./entry-html.js");
require("./rollup.js");
gulp.task(
"develop-app",
@@ -22,16 +23,12 @@ gulp.task(
gulp.parallel(
"gen-service-worker-app-dev",
"gen-icons-json",
"gen-pages-app-dev",
"build-translations",
"build-locale-data"
"gen-pages-dev",
"gen-index-app-dev",
"build-translations"
),
"copy-static-app",
env.useWDS()
? "wds-watch-app"
: env.useRollup()
? "rollup-watch-app"
: "webpack-watch-app"
env.useRollup() ? "rollup-watch-app" : "webpack-watch-app"
)
);
@@ -42,11 +39,15 @@ gulp.task(
process.env.NODE_ENV = "production";
},
"clean",
gulp.parallel("gen-icons-json", "build-translations", "build-locale-data"),
gulp.parallel("gen-icons-json", "build-translations"),
"copy-static-app",
env.useRollup() ? "rollup-prod-app" : "webpack-prod-app",
gulp.parallel("gen-pages-app-prod", "gen-service-worker-app-prod"),
// Don't compress running tests
...(env.isTestBuild() ? [] : ["compress-app"])
...// Don't compress running tests
(env.isTest() ? [] : ["compress-app"]),
gulp.parallel(
"gen-pages-prod",
"gen-index-app-prod",
"gen-service-worker-app-prod"
)
)
);

View File

@@ -1,12 +1,14 @@
import gulp from "gulp";
import env from "../env.cjs";
import "./clean.js";
import "./entry-html.js";
import "./gather-static.js";
import "./rollup.js";
import "./service-worker.js";
import "./translations.js";
import "./webpack.js";
const gulp = require("gulp");
const env = require("../env");
require("./clean.js");
require("./translations.js");
require("./gather-static.js");
require("./webpack.js");
require("./service-worker.js");
require("./entry-html.js");
require("./rollup.js");
gulp.task(
"develop-cast",
@@ -16,9 +18,9 @@ gulp.task(
},
"clean-cast",
"translations-enable-merge-backend",
gulp.parallel("gen-icons-json", "build-translations", "build-locale-data"),
gulp.parallel("gen-icons-json", "build-translations"),
"copy-static-cast",
"gen-pages-cast-dev",
"gen-index-cast-dev",
env.useRollup() ? "rollup-dev-server-cast" : "webpack-dev-server-cast"
)
);
@@ -31,9 +33,9 @@ gulp.task(
},
"clean-cast",
"translations-enable-merge-backend",
gulp.parallel("gen-icons-json", "build-translations", "build-locale-data"),
gulp.parallel("gen-icons-json", "build-translations"),
"copy-static-cast",
env.useRollup() ? "rollup-prod-cast" : "webpack-prod-cast",
"gen-pages-cast-prod"
"gen-index-cast-prod"
)
);

View File

@@ -1,40 +1,36 @@
import { deleteSync } from "del";
import gulp from "gulp";
import paths from "../paths.cjs";
import "./translations.js";
const del = require("del");
const gulp = require("gulp");
const paths = require("../paths");
require("./translations");
gulp.task(
"clean",
gulp.parallel("clean-translations", async () =>
deleteSync([paths.app_output_root, paths.build_dir])
)
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
return del([paths.app_output_root, paths.build_dir]);
})
);
gulp.task(
"clean-demo",
gulp.parallel("clean-translations", async () =>
deleteSync([paths.demo_output_root, paths.build_dir])
)
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
return del([paths.demo_output_root, paths.build_dir]);
})
);
gulp.task(
"clean-cast",
gulp.parallel("clean-translations", async () =>
deleteSync([paths.cast_output_root, paths.build_dir])
)
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
return del([paths.cast_output_root, paths.build_dir]);
})
);
gulp.task("clean-hassio", async () =>
deleteSync([paths.hassio_output_root, paths.build_dir])
);
gulp.task("clean-hassio", function cleanOutputAndBuildDir() {
return del([paths.hassio_output_root, paths.build_dir]);
});
gulp.task(
"clean-gallery",
gulp.parallel("clean-translations", async () =>
deleteSync([
paths.gallery_output_root,
paths.gallery_build,
paths.build_dir,
])
)
gulp.parallel("clean-translations", function cleanOutputAndBuildDir() {
return del([paths.gallery_output_root, paths.build_dir]);
})
);

View File

@@ -1,68 +1,45 @@
// Tasks to compress
import { constants } from "node:zlib";
import gulp from "gulp";
import brotli from "gulp-brotli";
import zopfli from "gulp-zopfli-green";
import paths from "../paths.cjs";
const gulp = require("gulp");
const zopfli = require("gulp-zopfli-green");
const merge = require("merge-stream");
const path = require("path");
const paths = require("../paths");
const filesGlob = "*.{js,json,css,svg,xml}";
const brotliOptions = {
skipLarger: true,
params: {
[constants.BROTLI_PARAM_QUALITY]: constants.BROTLI_MAX_QUALITY,
},
};
const zopfliOptions = { threshold: 150 };
const compressDistBrotli = (rootDir, modernDir, compressServiceWorker = true) =>
gulp
.src(
[
`${modernDir}/**/${filesGlob}`,
compressServiceWorker ? `${rootDir}/sw-modern.js` : undefined,
].filter(Boolean),
{
base: rootDir,
}
)
.pipe(brotli(brotliOptions))
.pipe(gulp.dest(rootDir));
const compressDistZopfli = (rootDir, modernDir, compressModern = false) =>
gulp
.src(
[
`${rootDir}/**/${filesGlob}`,
compressModern ? undefined : `!${modernDir}/**/${filesGlob}`,
`!${rootDir}/{sw-modern,service_worker}.js`,
`${rootDir}/{authorize,onboarding}.html`,
].filter(Boolean),
{ base: rootDir }
)
gulp.task("compress-app", function compressApp() {
const jsLatest = gulp
.src(path.resolve(paths.app_output_latest, "**/*.js"))
.pipe(zopfli(zopfliOptions))
.pipe(gulp.dest(rootDir));
.pipe(gulp.dest(paths.app_output_latest));
const compressAppBrotli = () =>
compressDistBrotli(paths.app_output_root, paths.app_output_latest);
const compressHassioBrotli = () =>
compressDistBrotli(
paths.hassio_output_root,
paths.hassio_output_latest,
false
);
const jsEs5 = gulp
.src(path.resolve(paths.app_output_es5, "**/*.js"))
.pipe(zopfli(zopfliOptions))
.pipe(gulp.dest(paths.app_output_es5));
const compressAppZopfli = () =>
compressDistZopfli(paths.app_output_root, paths.app_output_latest);
const compressHassioZopfli = () =>
compressDistZopfli(
paths.hassio_output_root,
paths.hassio_output_latest,
true
);
const polyfills = gulp
.src(path.resolve(paths.app_output_static, "polyfills/*.js"))
.pipe(zopfli(zopfliOptions))
.pipe(gulp.dest(path.resolve(paths.app_output_static, "polyfills")));
gulp.task("compress-app", gulp.parallel(compressAppBrotli, compressAppZopfli));
gulp.task(
"compress-hassio",
gulp.parallel(compressHassioBrotli, compressHassioZopfli)
);
const translations = gulp
.src(path.resolve(paths.app_output_static, "translations/**/*.json"))
.pipe(zopfli(zopfliOptions))
.pipe(gulp.dest(path.resolve(paths.app_output_static, "translations")));
const icons = gulp
.src(path.resolve(paths.app_output_static, "mdi/*.json"))
.pipe(zopfli(zopfliOptions))
.pipe(gulp.dest(path.resolve(paths.app_output_static, "mdi")));
return merge(jsLatest, jsEs5, polyfills, translations, icons);
});
gulp.task("compress-hassio", function compressApp() {
return gulp
.src(path.resolve(paths.hassio_output_root, "**/*.js"))
.pipe(zopfli(zopfliOptions))
.pipe(gulp.dest(paths.hassio_output_root));
});

View File

@@ -1,13 +1,16 @@
import gulp from "gulp";
import env from "../env.cjs";
import "./clean.js";
import "./entry-html.js";
import "./gather-static.js";
import "./gen-icons-json.js";
import "./rollup.js";
import "./service-worker.js";
import "./translations.js";
import "./webpack.js";
// Run demo develop mode
const gulp = require("gulp");
const env = require("../env");
require("./clean.js");
require("./translations.js");
require("./gen-icons-json.js");
require("./gather-static.js");
require("./webpack.js");
require("./service-worker.js");
require("./entry-html.js");
require("./rollup.js");
gulp.task(
"develop-demo",
@@ -17,12 +20,7 @@ gulp.task(
},
"clean-demo",
"translations-enable-merge-backend",
gulp.parallel(
"gen-icons-json",
"gen-pages-demo-dev",
"build-translations",
"build-locale-data"
),
gulp.parallel("gen-icons-json", "gen-index-demo-dev", "build-translations"),
"copy-static-demo",
env.useRollup() ? "rollup-dev-server-demo" : "webpack-dev-server-demo"
)
@@ -37,9 +35,9 @@ gulp.task(
"clean-demo",
// Cast needs to be backwards compatible and older HA has no translations
"translations-enable-merge-backend",
gulp.parallel("gen-icons-json", "build-translations", "build-locale-data"),
gulp.parallel("gen-icons-json", "build-translations"),
"copy-static-demo",
env.useRollup() ? "rollup-prod-demo" : "webpack-prod-demo",
"gen-pages-demo-prod"
"gen-index-demo-prod"
)
);

View File

@@ -1,180 +0,0 @@
import fs from "fs/promises";
import gulp from "gulp";
import path from "path";
import mapStream from "map-stream";
import transform from "gulp-json-transform";
import { LokaliseApi } from "@lokalise/node-api";
import JSZip from "jszip";
const inDir = "translations";
const inDirFrontend = `${inDir}/frontend`;
const inDirBackend = `${inDir}/backend`;
const srcMeta = "src/translations/translationMetadata.json";
const encoding = "utf8";
function hasHtml(data) {
return /<[a-z][\s\S]*>/i.test(data);
}
function recursiveCheckHasHtml(file, data, errors, recKey) {
Object.keys(data).forEach(function (key) {
if (typeof data[key] === "object") {
const nextRecKey = recKey ? `${recKey}.${key}` : key;
recursiveCheckHasHtml(file, data[key], errors, nextRecKey);
} else if (hasHtml(data[key])) {
errors.push(`HTML found in ${file.path} at key ${recKey}.${key}`);
}
});
}
function checkHtml() {
const errors = [];
return mapStream(function (file, cb) {
const content = file.contents;
let error;
if (content) {
if (hasHtml(String(content))) {
const data = JSON.parse(String(content));
recursiveCheckHasHtml(file, data, errors);
if (errors.length > 0) {
error = errors.join("\r\n");
}
}
}
cb(error, file);
});
}
function convertBackendTranslations(data, _file) {
const output = { component: {} };
if (!data.component) {
return output;
}
Object.keys(data.component).forEach((domain) => {
if (!("entity_component" in data.component[domain])) {
return;
}
output.component[domain] = { entity_component: {} };
Object.keys(data.component[domain].entity_component).forEach((key) => {
output.component[domain].entity_component[key] =
data.component[domain].entity_component[key];
});
});
return output;
}
gulp.task("convert-backend-translations", function () {
return gulp
.src([`${inDirBackend}/*.json`])
.pipe(transform((data, file) => convertBackendTranslations(data, file)))
.pipe(gulp.dest(inDirBackend));
});
gulp.task("check-translations-html", function () {
return gulp
.src([`${inDirFrontend}/*.json`, `${inDirBackend}/*.json`])
.pipe(checkHtml());
});
gulp.task("check-all-files-exist", async function () {
const file = await fs.readFile(srcMeta, { encoding });
const meta = JSON.parse(file);
const writings = [];
Object.keys(meta).forEach((lang) => {
writings.push(
fs.writeFile(`${inDirFrontend}/${lang}.json`, JSON.stringify({}), {
flag: "wx",
}),
fs.writeFile(`${inDirBackend}/${lang}.json`, JSON.stringify({}), {
flag: "wx",
})
);
});
await Promise.allSettled(writings);
});
const lokaliseProjects = {
backend: "130246255a974bd3b5e8a1.51616605",
frontend: "3420425759f6d6d241f598.13594006",
};
gulp.task("fetch-lokalise", async function () {
let apiKey;
try {
apiKey =
process.env.LOKALISE_TOKEN ||
(await fs.readFile(".lokalise_token", { encoding }));
} catch {
throw new Error(
"An Administrator Lokalise API token is required to download the latest set of translations. Place your token in a new file `.lokalise_token` in the repo root directory."
);
}
const lokaliseApi = new LokaliseApi({ apiKey });
const mkdirPromise = Promise.all([
fs.mkdir(inDirFrontend, { recursive: true }),
fs.mkdir(inDirBackend, { recursive: true }),
]);
await Promise.all(
Object.entries(lokaliseProjects).map(([project, projectId]) =>
lokaliseApi
.files()
.download(projectId, {
format: "json",
original_filenames: false,
replace_breaks: false,
json_unescaped_slashes: true,
export_empty_as: "skip",
})
.then((download) => fetch(download.bundle_url))
.then((response) => {
if (response.status === 200 || response.status === 0) {
return response.arrayBuffer();
}
throw new Error(response.statusText);
})
.then(JSZip.loadAsync)
.then(async (contents) => {
await mkdirPromise;
return Promise.all(
Object.keys(contents.files).map(async (filename) => {
const file = contents.file(filename);
if (!file) {
// no file, probably a directory
return Promise.resolve();
}
return file
.async("nodebuffer")
.then((content) =>
fs.writeFile(
path.join(
inDir,
project,
filename.split("/").splice(-1)[0]
),
content,
{ flag: "w", encoding }
)
);
})
);
})
.catch((err) => {
console.error(err);
throw err;
})
)
);
});
gulp.task(
"download-translations",
gulp.series(
"fetch-lokalise",
"convert-backend-translations",
"check-translations-html",
"check-all-files-exist"
)
);

View File

@@ -0,0 +1,95 @@
const del = require("del");
const gulp = require("gulp");
const fs = require("fs");
const mapStream = require("map-stream");
const inDirFrontend = "translations/frontend";
const inDirBackend = "translations/backend";
const downloadDir = "translations/downloads";
const srcMeta = "src/translations/translationMetadata.json";
const encoding = "utf8";
const tasks = [];
function hasHtml(data) {
return /<[a-z][\s\S]*>/i.test(data);
}
function recursiveCheckHasHtml(file, data, errors, recKey) {
Object.keys(data).forEach(function (key) {
if (typeof data[key] === "object") {
const nextRecKey = recKey ? `${recKey}.${key}` : key;
recursiveCheckHasHtml(file, data[key], errors, nextRecKey);
} else if (hasHtml(data[key])) {
errors.push(`HTML found in ${file.path} at key ${recKey}.${key}`);
}
});
}
function checkHtml() {
const errors = [];
return mapStream(function (file, cb) {
const content = file.contents;
let error;
if (content) {
if (hasHtml(String(content))) {
const data = JSON.parse(String(content));
recursiveCheckHasHtml(file, data, errors);
if (errors.length > 0) {
error = errors.join("\r\n");
}
}
}
cb(error, file);
});
}
let taskName = "clean-downloaded-translations";
gulp.task(taskName, function () {
return del([`${downloadDir}/**`]);
});
tasks.push(taskName);
taskName = "check-translations-html";
gulp.task(taskName, function () {
return gulp.src(`${downloadDir}/*.json`).pipe(checkHtml());
});
tasks.push(taskName);
taskName = "check-all-files-exist";
gulp.task(taskName, function () {
const file = fs.readFileSync(srcMeta, { encoding });
const meta = JSON.parse(file);
Object.keys(meta).forEach((lang) => {
if (!fs.existsSync(`${inDirFrontend}/${lang}.json`)) {
fs.writeFileSync(`${inDirFrontend}/${lang}.json`, JSON.stringify({}));
}
if (!fs.existsSync(`${inDirBackend}/${lang}.json`)) {
fs.writeFileSync(`${inDirBackend}/${lang}.json`, JSON.stringify({}));
}
});
return Promise.resolve();
});
tasks.push(taskName);
taskName = "move-downloaded-translations";
gulp.task(taskName, function () {
return gulp.src(`${downloadDir}/*.json`).pipe(gulp.dest(inDirFrontend));
});
tasks.push(taskName);
taskName = "check-downloaded-translations";
gulp.task(
taskName,
gulp.series(
"check-translations-html",
"move-downloaded-translations",
"check-all-files-exist",
"clean-downloaded-translations"
)
);
tasks.push(taskName);
module.exports = tasks;

View File

@@ -1,285 +1,271 @@
// Tasks to generate entry HTML
/* eslint-disable import/no-dynamic-require */
/* eslint-disable global-require */
const gulp = require("gulp");
const fs = require("fs-extra");
const path = require("path");
const template = require("lodash.template");
const minify = require("html-minifier").minify;
const paths = require("../paths.js");
const env = require("../env.js");
import {
applyVersionsToRegexes,
compileRegex,
getPreUserAgentRegexes,
} from "browserslist-useragent-regexp";
import fs from "fs-extra";
import gulp from "gulp";
import { minify } from "html-minifier-terser";
import template from "lodash.template";
import { dirname, extname, resolve } from "node:path";
import { htmlMinifierOptions, terserOptions } from "../bundle.cjs";
import env from "../env.cjs";
import paths from "../paths.cjs";
const templatePath = (tpl) =>
path.resolve(paths.polymer_dir, "src/html/", `${tpl}.html.template`);
// macOS companion app has no way to obtain the Safari version used by WKWebView,
// and it is not in the default user agent string. So we add an additional regex
// to serve modern based on a minimum macOS version. We take the minimum Safari
// major version from browserslist and manually map that to a supported macOS
// version. Note this assumes the user has kept Safari updated.
const HA_MACOS_REGEX =
/Home Assistant\/[\d.]+ \(.+; macOS (\d+)\.(\d+)(?:\.(\d+))?\)/;
const SAFARI_TO_MACOS = {
15: [10, 15, 0],
16: [11, 0, 0],
17: [12, 0, 0],
18: [13, 0, 0],
};
const readFile = (pth) => fs.readFileSync(pth).toString();
const getCommonTemplateVars = () => {
const browserRegexes = getPreUserAgentRegexes({
env: "modern",
allowHigherVersions: true,
mobileToDesktop: true,
throwOnMissing: true,
});
const minSafariVersion = browserRegexes.find(
(regex) => regex.family === "safari"
)?.matchedVersions[0][0];
const minMacOSVersion = SAFARI_TO_MACOS[minSafariVersion];
if (!minMacOSVersion) {
throw Error(
`Could not find minimum MacOS version for Safari ${minSafariVersion}.`
);
}
const haMacOSRegex = applyVersionsToRegexes(
[
{
family: "ha_macos",
regex: HA_MACOS_REGEX,
matchedVersions: [minMacOSVersion],
requestVersions: [minMacOSVersion],
},
],
{ ignorePatch: true, allowHigherVersions: true }
);
return {
useRollup: env.useRollup(),
useWDS: env.useWDS(),
modernRegex: compileRegex(browserRegexes.concat(haMacOSRegex)).toString(),
};
};
const renderTemplate = (templateFile, data = {}) => {
const compiled = template(
fs.readFileSync(templateFile, { encoding: "utf-8" })
);
const renderTemplate = (pth, data = {}, pathFunc = templatePath) => {
const compiled = template(readFile(pathFunc(pth)));
return compiled({
...data,
// Resolve any child/nested templates relative to the parent and pass the same data
renderTemplate: (childTemplate) =>
renderTemplate(resolve(dirname(templateFile), childTemplate), data),
useRollup: env.useRollup(),
renderTemplate,
});
};
const WRAP_TAGS = { ".js": "script", ".css": "style" };
const minifyHtml = (content, ext) => {
const wrapTag = WRAP_TAGS[ext] || "";
const begTag = wrapTag && `<${wrapTag}>`;
const endTag = wrapTag && `</${wrapTag}>`;
return minify(begTag + content + endTag, {
...htmlMinifierOptions,
conservativeCollapse: false,
minifyJS: terserOptions({
latestBuild: false, // Shared scripts should be ES5
isTestBuild: true, // Don't need source maps
}),
}).then((wrapped) =>
wrapTag ? wrapped.slice(begTag.length, -endTag.length) : wrapped
const renderDemoTemplate = (pth, data = {}) =>
renderTemplate(pth, data, (tpl) =>
path.resolve(paths.demo_dir, "src/html/", `${tpl}.html.template`)
);
};
// Function to generate a dev task for each project's configuration
// Note Currently WDS paths are hard-coded to only work for app
const genPagesDevTask =
(
pageEntries,
inputRoot,
outputRoot,
useWDS = false,
inputSub = "src/html",
publicRoot = ""
) =>
async () => {
const commonVars = getCommonTemplateVars();
for (const [page, entries] of Object.entries(pageEntries)) {
const content = renderTemplate(
resolve(inputRoot, inputSub, `${page}.template`),
{
...commonVars,
latestEntryJS: entries.map((entry) =>
useWDS
? `http://localhost:8000/src/entrypoints/${entry}.ts`
: `${publicRoot}/frontend_latest/${entry}.js`
),
es5EntryJS: entries.map(
(entry) => `${publicRoot}/frontend_es5/${entry}.js`
),
latestCustomPanelJS: useWDS
? "http://localhost:8000/src/entrypoints/custom-panel.ts"
: `${publicRoot}/frontend_latest/custom-panel.js`,
es5CustomPanelJS: `${publicRoot}/frontend_es5/custom-panel.js`,
}
);
fs.outputFileSync(resolve(outputRoot, page), content);
}
};
const renderCastTemplate = (pth, data = {}) =>
renderTemplate(pth, data, (tpl) =>
path.resolve(paths.cast_dir, "src/html/", `${tpl}.html.template`)
);
// Same as previous but for production builds
// (includes minification and hashed file names from manifest)
const genPagesProdTask =
(
pageEntries,
inputRoot,
outputRoot,
outputLatest,
outputES5,
inputSub = "src/html"
) =>
async () => {
const latestManifest = fs.readJsonSync(
resolve(outputLatest, "manifest.json")
const renderGalleryTemplate = (pth, data = {}) =>
renderTemplate(pth, data, (tpl) =>
path.resolve(paths.gallery_dir, "src/html/", `${tpl}.html.template`)
);
const minifyHtml = (content) =>
minify(content, {
collapseWhitespace: true,
minifyJS: true,
minifyCSS: true,
removeComments: true,
});
const PAGES = ["onboarding", "authorize"];
gulp.task("gen-pages-dev", (done) => {
for (const page of PAGES) {
const content = renderTemplate(page, {
latestPageJS: `/frontend_latest/${page}.js`,
es5PageJS: `/frontend_es5/${page}.js`,
});
fs.outputFileSync(
path.resolve(paths.app_output_root, `${page}.html`),
content
);
const es5Manifest = outputES5
? fs.readJsonSync(resolve(outputES5, "manifest.json"))
: {};
const commonVars = getCommonTemplateVars();
const minifiedHTML = [];
for (const [page, entries] of Object.entries(pageEntries)) {
const content = renderTemplate(
resolve(inputRoot, inputSub, `${page}.template`),
{
...commonVars,
latestEntryJS: entries.map((entry) => latestManifest[`${entry}.js`]),
es5EntryJS: entries.map((entry) => es5Manifest[`${entry}.js`]),
latestCustomPanelJS: latestManifest["custom-panel.js"],
es5CustomPanelJS: es5Manifest["custom-panel.js"],
}
);
minifiedHTML.push(
minifyHtml(content, extname(page)).then((minified) =>
fs.outputFileSync(resolve(outputRoot, page), minified)
)
);
}
await Promise.all(minifiedHTML);
};
}
done();
});
// Map HTML pages to their required entrypoints
const APP_PAGE_ENTRIES = {
"authorize.html": ["authorize"],
"onboarding.html": ["onboarding"],
"index.html": ["core", "app"],
};
gulp.task(
"gen-pages-app-dev",
genPagesDevTask(
APP_PAGE_ENTRIES,
paths.polymer_dir,
paths.app_output_root,
env.useWDS()
)
);
gulp.task(
"gen-pages-app-prod",
genPagesProdTask(
APP_PAGE_ENTRIES,
paths.polymer_dir,
paths.app_output_root,
gulp.task("gen-pages-prod", (done) => {
const latestManifest = require(path.resolve(
paths.app_output_latest,
paths.app_output_es5
)
);
"manifest.json"
));
const es5Manifest = require(path.resolve(
paths.app_output_es5,
"manifest.json"
));
const CAST_PAGE_ENTRIES = {
"faq.html": ["launcher"],
"index.html": ["launcher"],
"media.html": ["media"],
"receiver.html": ["receiver"],
};
for (const page of PAGES) {
const content = renderTemplate(page, {
latestPageJS: latestManifest[`${page}.js`],
gulp.task(
"gen-pages-cast-dev",
genPagesDevTask(CAST_PAGE_ENTRIES, paths.cast_dir, paths.cast_output_root)
);
es5PageJS: es5Manifest[`${page}.js`],
});
gulp.task(
"gen-pages-cast-prod",
genPagesProdTask(
CAST_PAGE_ENTRIES,
paths.cast_dir,
paths.cast_output_root,
fs.outputFileSync(
path.resolve(paths.app_output_root, `${page}.html`),
minifyHtml(content)
);
}
done();
});
gulp.task("gen-index-app-dev", (done) => {
// In dev mode we don't mangle names, so we hardcode urls. That way we can
// run webpack as last in watch mode, which blocks output.
const content = renderTemplate("index", {
latestAppJS: "/frontend_latest/app.js",
latestCoreJS: "/frontend_latest/core.js",
latestCustomPanelJS: "/frontend_latest/custom-panel.js",
es5AppJS: "/frontend_es5/app.js",
es5CoreJS: "/frontend_es5/core.js",
es5CustomPanelJS: "/frontend_es5/custom-panel.js",
}).replace(/#THEMEC/g, "{{ theme_color }}");
fs.outputFileSync(path.resolve(paths.app_output_root, "index.html"), content);
done();
});
gulp.task("gen-index-app-prod", (done) => {
const latestManifest = require(path.resolve(
paths.app_output_latest,
"manifest.json"
));
const es5Manifest = require(path.resolve(
paths.app_output_es5,
"manifest.json"
));
const content = renderTemplate("index", {
latestAppJS: latestManifest["app.js"],
latestCoreJS: latestManifest["core.js"],
latestCustomPanelJS: latestManifest["custom-panel.js"],
es5AppJS: es5Manifest["app.js"],
es5CoreJS: es5Manifest["core.js"],
es5CustomPanelJS: es5Manifest["custom-panel.js"],
});
const minified = minifyHtml(content).replace(/#THEMEC/g, "{{ theme_color }}");
fs.outputFileSync(
path.resolve(paths.app_output_root, "index.html"),
minified
);
done();
});
gulp.task("gen-index-cast-dev", (done) => {
const contentReceiver = renderCastTemplate("receiver", {
latestReceiverJS: "/frontend_latest/receiver.js",
});
fs.outputFileSync(
path.resolve(paths.cast_output_root, "receiver.html"),
contentReceiver
);
const contentFAQ = renderCastTemplate("launcher-faq", {
latestLauncherJS: "/frontend_latest/launcher.js",
es5LauncherJS: "/frontend_es5/launcher.js",
});
fs.outputFileSync(
path.resolve(paths.cast_output_root, "faq.html"),
contentFAQ
);
const contentLauncher = renderCastTemplate("launcher", {
latestLauncherJS: "/frontend_latest/launcher.js",
es5LauncherJS: "/frontend_es5/launcher.js",
});
fs.outputFileSync(
path.resolve(paths.cast_output_root, "index.html"),
contentLauncher
);
done();
});
gulp.task("gen-index-cast-prod", (done) => {
const latestManifest = require(path.resolve(
paths.cast_output_latest,
paths.cast_output_es5
)
);
"manifest.json"
));
const es5Manifest = require(path.resolve(
paths.cast_output_es5,
"manifest.json"
));
const DEMO_PAGE_ENTRIES = { "index.html": ["main"] };
const contentReceiver = renderCastTemplate("receiver", {
latestReceiverJS: latestManifest["receiver.js"],
});
fs.outputFileSync(
path.resolve(paths.cast_output_root, "receiver.html"),
contentReceiver
);
gulp.task(
"gen-pages-demo-dev",
genPagesDevTask(DEMO_PAGE_ENTRIES, paths.demo_dir, paths.demo_output_root)
);
const contentFAQ = renderCastTemplate("launcher-faq", {
latestLauncherJS: latestManifest["launcher.js"],
es5LauncherJS: es5Manifest["launcher.js"],
});
fs.outputFileSync(
path.resolve(paths.cast_output_root, "faq.html"),
contentFAQ
);
gulp.task(
"gen-pages-demo-prod",
genPagesProdTask(
DEMO_PAGE_ENTRIES,
paths.demo_dir,
paths.demo_output_root,
const contentLauncher = renderCastTemplate("launcher", {
latestLauncherJS: latestManifest["launcher.js"],
es5LauncherJS: es5Manifest["launcher.js"],
});
fs.outputFileSync(
path.resolve(paths.cast_output_root, "index.html"),
contentLauncher
);
done();
});
gulp.task("gen-index-demo-dev", (done) => {
// In dev mode we don't mangle names, so we hardcode urls. That way we can
// run webpack as last in watch mode, which blocks output.
const content = renderDemoTemplate("index", {
latestDemoJS: "/frontend_latest/main.js",
es5DemoJS: "/frontend_es5/main.js",
});
fs.outputFileSync(
path.resolve(paths.demo_output_root, "index.html"),
content
);
done();
});
gulp.task("gen-index-demo-prod", (done) => {
const latestManifest = require(path.resolve(
paths.demo_output_latest,
paths.demo_output_es5
)
);
"manifest.json"
));
const es5Manifest = require(path.resolve(
paths.demo_output_es5,
"manifest.json"
));
const content = renderDemoTemplate("index", {
latestDemoJS: latestManifest["main.js"],
const GALLERY_PAGE_ENTRIES = { "index.html": ["entrypoint"] };
es5DemoJS: es5Manifest["main.js"],
});
const minified = minifyHtml(content);
gulp.task(
"gen-pages-gallery-dev",
genPagesDevTask(
GALLERY_PAGE_ENTRIES,
paths.gallery_dir,
paths.gallery_output_root
)
);
fs.outputFileSync(
path.resolve(paths.demo_output_root, "index.html"),
minified
);
done();
});
gulp.task(
"gen-pages-gallery-prod",
genPagesProdTask(
GALLERY_PAGE_ENTRIES,
paths.gallery_dir,
paths.gallery_output_root,
paths.gallery_output_latest
)
);
gulp.task("gen-index-gallery-dev", (done) => {
// In dev mode we don't mangle names, so we hardcode urls. That way we can
// run webpack as last in watch mode, which blocks output.
const content = renderGalleryTemplate("index", {
latestGalleryJS: "./frontend_latest/entrypoint.js",
});
const HASSIO_PAGE_ENTRIES = { "entrypoint.js": ["entrypoint"] };
fs.outputFileSync(
path.resolve(paths.gallery_output_root, "index.html"),
content
);
done();
});
gulp.task(
"gen-pages-hassio-dev",
genPagesDevTask(
HASSIO_PAGE_ENTRIES,
paths.hassio_dir,
paths.hassio_output_root,
undefined,
"src",
paths.hassio_publicPath
)
);
gulp.task("gen-index-gallery-prod", (done) => {
const latestManifest = require(path.resolve(
paths.gallery_output_latest,
"manifest.json"
));
const content = renderGalleryTemplate("index", {
latestGalleryJS: latestManifest["entrypoint.js"],
});
const minified = minifyHtml(content);
gulp.task(
"gen-pages-hassio-prod",
genPagesProdTask(
HASSIO_PAGE_ENTRIES,
paths.hassio_dir,
paths.hassio_output_root,
paths.hassio_output_latest,
paths.hassio_output_es5,
"src"
)
);
fs.outputFileSync(
path.resolve(paths.gallery_output_root, "index.html"),
minified
);
done();
});

View File

@@ -1,171 +0,0 @@
// Task to download the latest Lokalise translations from the nightly workflow artifacts
import { createOAuthDeviceAuth } from "@octokit/auth-oauth-device";
import { retry } from "@octokit/plugin-retry";
import { Octokit } from "@octokit/rest";
import { deleteAsync } from "del";
import { mkdir, readFile, writeFile } from "fs/promises";
import gulp from "gulp";
import jszip from "jszip";
import path from "path";
import process from "process";
import { extract } from "tar";
const MAX_AGE = 24; // hours
const OWNER = "home-assistant";
const REPO = "frontend";
const WORKFLOW_NAME = "nightly.yaml";
const ARTIFACT_NAME = "translations";
const CLIENT_ID = "Iv1.3914e28cb27834d1";
const EXTRACT_DIR = "translations";
const TOKEN_FILE = path.posix.join(EXTRACT_DIR, "token.json");
const ARTIFACT_FILE = path.posix.join(EXTRACT_DIR, "artifact.json");
let allowTokenSetup = false;
gulp.task("allow-setup-fetch-nightly-translations", (done) => {
allowTokenSetup = true;
done();
});
gulp.task("fetch-nightly-translations", async function () {
// Skip all when environment flag is set (assumes translations are already in place)
if (process.env?.SKIP_FETCH_NIGHTLY_TRANSLATIONS) {
console.log("Skipping fetch due to environment signal");
return;
}
// Read current translations artifact info if it exists,
// and stop if they are not old enough
let currentArtifact;
try {
currentArtifact = JSON.parse(await readFile(ARTIFACT_FILE, "utf-8"));
const currentAge =
(Date.now() - Date.parse(currentArtifact.created_at)) / 3600000;
if (currentAge < MAX_AGE) {
console.log(
"Keeping current translations (only %s hours old)",
currentAge.toFixed(1)
);
return;
}
} catch {
currentArtifact = null;
}
// To store file writing promises
const createExtractDir = mkdir(EXTRACT_DIR, { recursive: true });
const writings = [];
// Authenticate to GitHub using GitHub action token if it exists,
// otherwise look for a saved user token or generate a new one if none
let tokenAuth;
if (process.env.GITHUB_TOKEN) {
tokenAuth = { token: process.env.GITHUB_TOKEN };
} else {
try {
tokenAuth = JSON.parse(await readFile(TOKEN_FILE, "utf-8"));
} catch {
if (!allowTokenSetup) {
console.log("No token found so build wil continue with English only");
return;
}
const auth = createOAuthDeviceAuth({
clientType: "github-app",
clientId: CLIENT_ID,
onVerification: (verification) => {
console.log(
"Task needs to authenticate to GitHub to fetch the translations from nightly workflow\n" +
"Please go to %s to authorize this task\n" +
"\nEnter user code: %s\n\n" +
"This code will expire in %s minutes\n" +
"Task will automatically continue after authorization and token will be saved for future use",
verification.verification_uri,
verification.user_code,
(verification.expires_in / 60).toFixed(0)
);
},
});
tokenAuth = await auth({ type: "oauth" });
writings.push(
createExtractDir.then(
writeFile(TOKEN_FILE, JSON.stringify(tokenAuth, null, 2))
)
);
}
}
// Authenticate with token and request workflow runs from GitHub
console.log("Fetching new translations...");
const octokit = new (Octokit.plugin(retry))({
userAgent: "Fetch Nightly Translations",
auth: tokenAuth.token,
});
const workflowRunsResponse = await octokit.rest.actions.listWorkflowRuns({
owner: OWNER,
repo: REPO,
workflow_id: WORKFLOW_NAME,
status: "success",
event: "schedule",
per_page: 1,
exclude_pull_requests: true,
});
if (workflowRunsResponse.data.total_count === 0) {
throw Error("No successful nightly workflow runs found");
}
const latestNightlyRun = workflowRunsResponse.data.workflow_runs[0];
// Stop if current is already the latest, otherwise Find the translations artifact
if (currentArtifact?.workflow_run.id === latestNightlyRun.id) {
console.log("Stopping because current translations are still the latest");
return;
}
const latestArtifact = (
await octokit.actions.listWorkflowRunArtifacts({
owner: OWNER,
repo: REPO,
run_id: latestNightlyRun.id,
})
).data.artifacts.find((artifact) => artifact.name === ARTIFACT_NAME);
if (!latestArtifact) {
throw Error("Latest nightly workflow run has no translations artifact");
}
writings.push(
createExtractDir.then(
writeFile(ARTIFACT_FILE, JSON.stringify(latestArtifact, null, 2))
)
);
// Remove the current translations
const deleteCurrent = Promise.all(writings).then(
deleteAsync([`${EXTRACT_DIR}/*`, `!${ARTIFACT_FILE}`, `!${TOKEN_FILE}`])
);
// Get the download URL and follow the redirect to download (stored as ArrayBuffer)
const downloadResponse = await octokit.actions.downloadArtifact({
owner: OWNER,
repo: REPO,
artifact_id: latestArtifact.id,
archive_format: "zip",
});
if (downloadResponse.status !== 200) {
throw Error("Failure downloading translations artifact");
}
// Artifact is a tarball, but GitHub adds it to a zip file
console.log("Unpacking downloaded translations...");
const zip = await jszip.loadAsync(downloadResponse.data);
await deleteCurrent;
const extractStream = zip.file(/.*/)[0].nodeStream().pipe(extract());
await new Promise((resolve, reject) => {
extractStream.on("close", resolve).on("error", reject);
});
});
gulp.task(
"setup-and-fetch-nightly-translations",
gulp.series(
"allow-setup-fetch-nightly-translations",
"fetch-nightly-translations"
)
);

View File

@@ -1,145 +1,16 @@
import fs from "fs";
import { glob } from "glob";
import gulp from "gulp";
import yaml from "js-yaml";
import { marked } from "marked";
import path from "path";
import env from "../env.cjs";
import paths from "../paths.cjs";
import "./clean.js";
import "./entry-html.js";
import "./gather-static.js";
import "./gen-icons-json.js";
import "./rollup.js";
import "./service-worker.js";
import "./translations.js";
import "./webpack.js";
// Run demo develop mode
const gulp = require("gulp");
gulp.task("gather-gallery-pages", async function gatherPages() {
const pageDir = path.resolve(paths.gallery_dir, "src/pages");
const files = await glob(path.resolve(pageDir, "**/*"));
const env = require("../env");
const galleryBuild = path.resolve(paths.gallery_dir, "build");
fs.mkdirSync(galleryBuild, { recursive: true });
let content = "export const PAGES = {\n";
const processed = new Set();
for (const file of files) {
if (fs.lstatSync(file).isDirectory()) {
continue;
}
const pageId = file.substring(pageDir.length + 1, file.lastIndexOf("."));
if (processed.has(pageId)) {
continue;
}
processed.add(pageId);
const [category] = pageId.split("/", 2);
const demoFile = path.resolve(pageDir, `${pageId}.ts`);
const descriptionFile = path.resolve(pageDir, `${pageId}.markdown`);
const hasDemo = fs.existsSync(demoFile);
let hasDescription = fs.existsSync(descriptionFile);
let metadata = {};
if (hasDescription) {
let descriptionContent = fs.readFileSync(descriptionFile, "utf-8");
if (descriptionContent.startsWith("---")) {
const metadataEnd = descriptionContent.indexOf("---", 3);
metadata = yaml.load(descriptionContent.substring(3, metadataEnd));
descriptionContent = descriptionContent
.substring(metadataEnd + 3)
.trim();
}
// If description is just metadata
if (descriptionContent === "") {
hasDescription = false;
} else {
descriptionContent = marked(descriptionContent).replace(/`/g, "\\`");
fs.mkdirSync(path.resolve(galleryBuild, category), { recursive: true });
fs.writeFileSync(
path.resolve(galleryBuild, `${pageId}-description.ts`),
`
import {html} from "lit";
export default html\`${descriptionContent}\`
`
);
}
}
content += ` "${pageId}": {
metadata: ${JSON.stringify(metadata)},
${
hasDescription
? `description: () => import("./${pageId}-description").then(m => m.default),`
: ""
}
${hasDemo ? `demo: () => import("../src/pages/${pageId}")` : ""}
},\n`;
}
content += "};\n";
// Generate sidebar
const sidebarPath = path.resolve(paths.gallery_dir, "sidebar.js");
const sidebar = (await import(sidebarPath)).default;
const pagesToProcess = {};
for (const key of processed) {
const [category, page] = key.split("/", 2);
if (!(category in pagesToProcess)) {
pagesToProcess[category] = new Set();
}
pagesToProcess[category].add(page);
}
for (const group of Object.values(sidebar)) {
const toProcess = pagesToProcess[group.category];
delete pagesToProcess[group.category];
if (!toProcess) {
console.error("Unknown category", group.category);
if (!group.pages) {
group.pages = [];
}
continue;
}
// Any pre-defined groups will not be sorted.
if (group.pages) {
for (const page of group.pages) {
if (!toProcess.delete(page)) {
console.error("Found unreferenced demo", page);
}
}
} else {
group.pages = [];
}
for (const page of Array.from(toProcess).sort()) {
group.pages.push(page);
}
}
for (const [category, pages] of Object.entries(pagesToProcess)) {
sidebar.push({
category,
header: category,
pages: Array.from(pages).sort(),
});
}
content += `export const SIDEBAR = ${JSON.stringify(sidebar, null, 2)};\n`;
fs.writeFileSync(
path.resolve(galleryBuild, "import-pages.ts"),
content,
"utf-8"
);
});
require("./clean.js");
require("./translations.js");
require("./gen-icons-json.js");
require("./gather-static.js");
require("./webpack.js");
require("./service-worker.js");
require("./entry-html.js");
require("./rollup.js");
gulp.task(
"develop-gallery",
@@ -149,28 +20,10 @@ gulp.task(
},
"clean-gallery",
"translations-enable-merge-backend",
gulp.parallel(
"gen-icons-json",
"build-translations",
"build-locale-data",
"gather-gallery-pages"
),
gulp.parallel("gen-icons-json", "build-translations"),
"copy-static-gallery",
"gen-pages-gallery-dev",
gulp.parallel(
env.useRollup()
? "rollup-dev-server-gallery"
: "webpack-dev-server-gallery",
async function watchMarkdownFiles() {
gulp.watch(
[
path.resolve(paths.gallery_dir, "src/pages/**/*.markdown"),
path.resolve(paths.gallery_dir, "sidebar.js"),
],
gulp.series("gather-gallery-pages")
);
}
)
"gen-index-gallery-dev",
env.useRollup() ? "rollup-dev-server-gallery" : "webpack-dev-server-gallery"
)
);
@@ -182,14 +35,9 @@ gulp.task(
},
"clean-gallery",
"translations-enable-merge-backend",
gulp.parallel(
"gen-icons-json",
"build-translations",
"build-locale-data",
"gather-gallery-pages"
),
gulp.parallel("gen-icons-json", "build-translations"),
"copy-static-gallery",
env.useRollup() ? "rollup-prod-gallery" : "webpack-prod-gallery",
"gen-pages-gallery-prod"
"gen-index-gallery-prod"
)
);

View File

@@ -1,10 +1,10 @@
// Gulp task to gather all static files.
import fs from "fs-extra";
import gulp from "gulp";
import path from "path";
import paths from "../paths.cjs";
import env from "../env.cjs";
const gulp = require("gulp");
const path = require("path");
const cpx = require("cpx");
const fs = require("fs-extra");
const paths = require("../paths");
const npmPath = (...parts) =>
path.resolve(paths.polymer_dir, "node_modules", ...parts);
@@ -13,28 +13,19 @@ const polyPath = (...parts) => path.resolve(paths.polymer_dir, ...parts);
const copyFileDir = (fromFile, toDir) =>
fs.copySync(fromFile, path.join(toDir, path.basename(fromFile)));
const genStaticPath =
(staticDir) =>
(...parts) =>
path.resolve(staticDir, ...parts);
const genStaticPath = (staticDir) => (...parts) =>
path.resolve(staticDir, ...parts);
function copyTranslations(staticDir) {
const staticPath = genStaticPath(staticDir);
// Translation output
fs.copySync(
polyPath("build/translations/output"),
polyPath("build-translations/output"),
staticPath("translations")
);
}
function copyLocaleData(staticDir) {
const staticPath = genStaticPath(staticDir);
// Locale data output
fs.copySync(polyPath("build/locale-data"), staticPath("locale-data"));
}
function copyMdiIcons(staticDir) {
const staticPath = genStaticPath(staticDir);
@@ -60,18 +51,9 @@ function copyPolyfills(staticDir) {
npmPath("@webcomponents/webcomponentsjs/webcomponents-bundle.js.map"),
staticPath("polyfills/")
);
// dialog-polyfill css
copyFileDir(
npmPath("dialog-polyfill/dialog-polyfill.css"),
staticPath("polyfills/")
);
}
function copyLoaderJS(staticDir) {
if (!env.useRollup()) {
return;
}
const staticPath = genStaticPath(staticDir);
copyFileDir(npmPath("systemjs/dist/s.min.js"), staticPath("js"));
copyFileDir(npmPath("systemjs/dist/s.min.js.map"), staticPath("js"));
@@ -80,20 +62,12 @@ function copyLoaderJS(staticDir) {
function copyFonts(staticDir) {
const staticPath = genStaticPath(staticDir);
// Local fonts
fs.copySync(
npmPath("roboto-fontface/fonts/roboto/"),
staticPath("fonts/roboto/"),
{
filter: (src) => !src.includes(".") || src.endsWith(".woff2"),
}
cpx.copySync(
npmPath("roboto-fontface/fonts/roboto/*.woff2"),
staticPath("fonts/roboto")
);
}
function copyQrScannerWorker(staticDir) {
const staticPath = genStaticPath(staticDir);
copyFileDir(npmPath("qr-scanner/qr-scanner-worker.min.js"), staticPath("js"));
}
function copyMapPanel(staticDir) {
const staticPath = genStaticPath(staticDir);
copyFileDir(
@@ -106,27 +80,11 @@ function copyMapPanel(staticDir) {
);
}
gulp.task("copy-locale-data", async () => {
const staticDir = paths.app_output_static;
copyLocaleData(staticDir);
});
gulp.task("copy-translations-app", async () => {
const staticDir = paths.app_output_static;
copyTranslations(staticDir);
});
gulp.task("copy-translations-supervisor", async () => {
const staticDir = paths.hassio_output_static;
copyTranslations(staticDir);
});
gulp.task("copy-static-supervisor", async () => {
const staticDir = paths.hassio_output_static;
copyLocaleData(staticDir);
copyFonts(staticDir);
});
gulp.task("copy-static-app", async () => {
const staticDir = paths.app_output_static;
// Basic static files
@@ -136,14 +94,10 @@ gulp.task("copy-static-app", async () => {
copyPolyfills(staticDir);
copyFonts(staticDir);
copyTranslations(staticDir);
copyLocaleData(staticDir);
copyMdiIcons(staticDir);
// Panel assets
copyMapPanel(staticDir);
// Qr Scanner assets
copyQrScannerWorker(staticDir);
});
gulp.task("copy-static-demo", async () => {
@@ -160,7 +114,6 @@ gulp.task("copy-static-demo", async () => {
copyMapPanel(paths.demo_output_static);
copyFonts(paths.demo_output_static);
copyTranslations(paths.demo_output_static);
copyLocaleData(paths.demo_output_static);
copyMdiIcons(paths.demo_output_static);
});
@@ -175,7 +128,6 @@ gulp.task("copy-static-cast", async () => {
copyMapPanel(paths.cast_output_static);
copyFonts(paths.cast_output_static);
copyTranslations(paths.cast_output_static);
copyLocaleData(paths.cast_output_static);
copyMdiIcons(paths.cast_output_static);
});
@@ -191,6 +143,5 @@ gulp.task("copy-static-gallery", async () => {
copyMapPanel(paths.gallery_output_static);
copyFonts(paths.gallery_output_static);
copyTranslations(paths.gallery_output_static);
copyLocaleData(paths.gallery_output_static);
copyMdiIcons(paths.gallery_output_static);
});

View File

@@ -1,15 +1,16 @@
import fs from "fs";
import gulp from "gulp";
import hash from "object-hash";
import path from "path";
import paths from "../paths.cjs";
const gulp = require("gulp");
const path = require("path");
const fs = require("fs");
const hash = require("object-hash");
const ICON_PACKAGE_PATH = path.resolve("node_modules/@mdi/svg/");
const ICON_PACKAGE_PATH = path.resolve(
__dirname,
"../../node_modules/@mdi/svg/"
);
const META_PATH = path.resolve(ICON_PACKAGE_PATH, "meta.json");
const PACKAGE_PATH = path.resolve(ICON_PACKAGE_PATH, "package.json");
const ICON_PATH = path.resolve(ICON_PACKAGE_PATH, "svg");
const OUTPUT_DIR = path.resolve(paths.build_dir, "mdi");
const REMOVED_ICONS_PATH = new URL("../removedIcons.json", import.meta.url);
const OUTPUT_DIR = path.resolve(__dirname, "../../build/mdi");
const encoding = "utf8";
@@ -20,40 +21,10 @@ const getMeta = () => {
const svg = fs.readFileSync(`${ICON_PATH}/${icon.name}.svg`, {
encoding,
});
return {
path: svg.match(/ d="([^"]+)"/)[1],
name: icon.name,
tags: icon.tags,
aliases: icon.aliases,
};
return { path: svg.match(/ d="([^"]+)"/)[1], name: icon.name };
});
};
const addRemovedMeta = (meta) => {
const file = fs.readFileSync(REMOVED_ICONS_PATH, { encoding });
const removed = JSON.parse(file);
const removedMeta = removed.map((removeIcon) => ({
path: removeIcon.path,
name: removeIcon.name,
tags: [],
aliases: [],
}));
const combinedMeta = [...meta, ...removedMeta];
return combinedMeta.sort((a, b) => a.name.localeCompare(b.name));
};
const homeAutomationTag = "Home Automation";
const orderMeta = (meta) => {
const homeAutomationMeta = meta.filter((icon) =>
icon.tags.includes(homeAutomationTag)
);
const otherMeta = meta.filter(
(icon) => !icon.tags.includes(homeAutomationTag)
);
return [...homeAutomationMeta, ...otherMeta];
};
const splitBySize = (meta) => {
const chunks = [];
const CHUNK_SIZE = 50000;
@@ -99,9 +70,7 @@ const findDifferentiator = (curString, prevString) => {
gulp.task("gen-icons-json", (done) => {
const meta = getMeta();
const metaAndRemoved = addRemovedMeta(meta);
const split = splitBySize(metaAndRemoved);
const split = splitBySize(meta);
if (!fs.existsSync(OUTPUT_DIR)) {
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
@@ -132,34 +101,12 @@ gulp.task("gen-icons-json", (done) => {
});
const file = fs.readFileSync(PACKAGE_PATH, { encoding });
const packageMeta = JSON.parse(file);
const package = JSON.parse(file);
fs.writeFileSync(
path.resolve(OUTPUT_DIR, "iconMetadata.json"),
JSON.stringify({ version: packageMeta.version, parts })
);
fs.writeFileSync(
path.resolve(OUTPUT_DIR, "iconList.json"),
JSON.stringify(
orderMeta(meta).map((icon) => ({
name: icon.name,
keywords: [
...icon.tags.map((t) => t.toLowerCase().replace(/\s\/\s/g, " ")),
...icon.aliases,
],
}))
)
JSON.stringify({ version: package.version, parts })
);
done();
});
gulp.task("gen-dummy-icons-json", (done) => {
if (!fs.existsSync(OUTPUT_DIR)) {
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
}
fs.writeFileSync(path.resolve(OUTPUT_DIR, "iconList.json"), "[]");
done();
});

View File

@@ -1,13 +1,33 @@
import gulp from "gulp";
import env from "../env.cjs";
import "./clean.js";
import "./compress.js";
import "./entry-html.js";
import "./gather-static.js";
import "./gen-icons-json.js";
import "./rollup.js";
import "./translations.js";
import "./webpack.js";
const gulp = require("gulp");
const fs = require("fs");
const path = require("path");
const env = require("../env");
const paths = require("../paths");
require("./clean.js");
require("./gen-icons-json.js");
require("./webpack.js");
require("./compress.js");
require("./rollup.js");
async function writeEntrypointJS() {
// We ship two builds and we need to do feature detection on what version to load.
fs.mkdirSync(paths.hassio_output_root, { recursive: true });
fs.writeFileSync(
path.resolve(paths.hassio_output_root, "entrypoint.js"),
`
try {
new Function("import('${paths.hassio_publicPath}/frontend_latest/entrypoint.js')")();
} catch (err) {
var el = document.createElement('script');
el.src = '${paths.hassio_publicPath}/frontend_es5/entrypoint.js';
document.body.appendChild(el);
}
`,
{ encoding: "utf-8" }
);
}
gulp.task(
"develop-hassio",
@@ -16,12 +36,8 @@ gulp.task(
process.env.NODE_ENV = "development";
},
"clean-hassio",
"gen-dummy-icons-json",
"gen-pages-hassio-dev",
"build-supervisor-translations",
"copy-translations-supervisor",
"build-locale-data",
"copy-static-supervisor",
"gen-icons-json",
writeEntrypointJS,
env.useRollup() ? "rollup-watch-hassio" : "webpack-watch-hassio"
)
);
@@ -33,14 +49,10 @@ gulp.task(
process.env.NODE_ENV = "production";
},
"clean-hassio",
"gen-dummy-icons-json",
"build-supervisor-translations",
"copy-translations-supervisor",
"build-locale-data",
"copy-static-supervisor",
"gen-icons-json",
env.useRollup() ? "rollup-prod-hassio" : "webpack-prod-hassio",
"gen-pages-hassio-prod",
writeEntrypointJS,
...// Don't compress running tests
(env.isTestBuild() ? [] : ["compress-hassio"])
(env.isTest() ? [] : ["compress-hassio"])
)
);

View File

@@ -1,86 +0,0 @@
import { deleteSync } from "del";
import { mkdir, readFile, writeFile } from "fs/promises";
import gulp from "gulp";
import { join, resolve } from "node:path";
import paths from "../paths.cjs";
const formatjsDir = join(paths.polymer_dir, "node_modules", "@formatjs");
const outDir = join(paths.build_dir, "locale-data");
const INTL_POLYFILLS = {
"intl-datetimeformat": "DateTimeFormat",
"intl-displaynames": "DisplayNames",
"intl-listformat": "ListFormat",
"intl-numberformat": "NumberFormat",
"intl-relativetimeformat": "RelativeTimeFormat",
};
const convertToJSON = async (
pkg,
lang,
subDir = "locale-data",
addFunc = "__addLocaleData",
skipMissing = true
) => {
let localeData;
try {
localeData = await readFile(
join(formatjsDir, pkg, subDir, `${lang}.js`),
"utf-8"
);
} catch (e) {
// Ignore if language is missing (i.e. not supported by @formatjs)
if (e.code === "ENOENT" && skipMissing) {
console.warn(`Skipped missing data for language ${lang} from ${pkg}`);
return;
}
throw e;
}
// Convert to JSON
const obj = INTL_POLYFILLS[pkg];
const dataRegex = new RegExp(
`Intl\\.${obj}\\.${addFunc}\\((?<data>.*)\\)`,
"s"
);
localeData = localeData.match(dataRegex)?.groups?.data;
if (!localeData) {
throw Error(`Failed to extract data for language ${lang} from ${pkg}`);
}
// Parse to validate JSON, then stringify to minify
localeData = JSON.stringify(JSON.parse(localeData));
await writeFile(join(outDir, `${pkg}/${lang}.json`), localeData);
};
gulp.task("clean-locale-data", async () => deleteSync([outDir]));
gulp.task("create-locale-data", async () => {
const translationMeta = JSON.parse(
await readFile(
resolve(paths.translations_src, "translationMetadata.json"),
"utf-8"
)
);
const conversions = [];
for (const pkg of Object.keys(INTL_POLYFILLS)) {
// eslint-disable-next-line no-await-in-loop
await mkdir(join(outDir, pkg), { recursive: true });
for (const lang of Object.keys(translationMeta)) {
conversions.push(convertToJSON(pkg, lang));
}
}
conversions.push(
convertToJSON(
"intl-datetimeformat",
"add-all-tz",
".",
"__addTZData",
false
)
);
await Promise.all(conversions);
});
gulp.task(
"build-locale-data",
gulp.series("clean-locale-data", "create-locale-data")
);

View File

@@ -1,14 +1,13 @@
// Tasks to run Rollup
import log from "fancy-log";
import gulp from "gulp";
import http from "http";
import open from "open";
import path from "path";
import { rollup } from "rollup";
import handler from "serve-handler";
import paths from "../paths.cjs";
import rollupConfig from "../rollup.cjs";
const path = require("path");
const gulp = require("gulp");
const rollup = require("rollup");
const handler = require("serve-handler");
const http = require("http");
const log = require("fancy-log");
const rollupConfig = require("../rollup");
const paths = require("../paths");
const open = require("open");
const bothBuilds = (createConfigFunc, params) =>
gulp.series(
@@ -31,11 +30,11 @@ const bothBuilds = (createConfigFunc, params) =>
);
function createServer(serveOptions) {
const server = http.createServer((request, response) =>
handler(request, response, {
const server = http.createServer((request, response) => {
return handler(request, response, {
public: serveOptions.root,
})
);
});
});
server.listen(
serveOptions.port,
@@ -47,7 +46,7 @@ function createServer(serveOptions) {
);
}
function watchRollup(createConfig, extraWatchSrc = [], serveOptions = null) {
function watchRollup(createConfig, extraWatchSrc = [], serveOptions) {
const { inputOptions, outputOptions } = createConfig({
isProdBuild: false,
latestBuild: true,

View File

@@ -1,19 +1,21 @@
// Generate service workers
// Generate service worker.
// Based on manifest, create a file with the content as service_worker.js
/* eslint-disable import/no-dynamic-require */
/* eslint-disable global-require */
const gulp = require("gulp");
const path = require("path");
const fs = require("fs-extra");
const workboxBuild = require("workbox-build");
const sourceMapUrl = require("source-map-url");
const paths = require("../paths.js");
import { deleteAsync } from "del";
import gulp from "gulp";
import { mkdir, readFile, symlink, writeFile } from "node:fs/promises";
import { basename, join, relative } from "node:path";
import { injectManifest } from "workbox-build";
import paths from "../paths.cjs";
const swDest = path.resolve(paths.app_output_root, "service_worker.js");
const SW_MAP = {
[paths.app_output_latest]: "modern",
[paths.app_output_es5]: "legacy",
};
const writeSW = (content) => fs.outputFileSync(swDest, content.trim() + "\n");
const SW_DEV =
`
gulp.task("gen-service-worker-app-dev", (done) => {
writeSW(
`
console.debug('Service worker disabled in development');
self.addEventListener('install', (event) => {
@@ -21,67 +23,74 @@ self.addEventListener('install', (event) => {
// removing any prod service worker the dev might have running
self.skipWaiting();
});
`.trim() + "\n";
gulp.task("gen-service-worker-app-dev", async () => {
await mkdir(paths.app_output_root, { recursive: true });
await Promise.all(
Object.values(SW_MAP).map((build) =>
writeFile(join(paths.app_output_root, `sw-${build}.js`), SW_DEV, {
encoding: "utf-8",
})
)
`
);
done();
});
gulp.task("gen-service-worker-app-prod", () =>
Promise.all(
Object.entries(SW_MAP).map(async ([outPath, build]) => {
const manifest = JSON.parse(
await readFile(join(outPath, "manifest.json"), "utf-8")
);
const swSrc = join(paths.app_output_root, manifest["service-worker.js"]);
const swDest = join(paths.app_output_root, `sw-${build}.js`);
const buildDir = relative(paths.app_output_root, outPath);
const { warnings } = await injectManifest({
swSrc,
swDest,
injectionPoint: "__WB_MANIFEST__",
// Files that mach this pattern will be considered unique and skip revision check
// ignore JS files + translation files
dontCacheBustURLsMatching: new RegExp(
`(?:${buildDir}/.+|static/translations/.+)`
),
globDirectory: paths.app_output_root,
globPatterns: [
`${buildDir}/*.js`,
// Cache all English translations because we catch them as fallback
// Using pattern to match hash instead of * to avoid caching en-GB
// 'v' added as valid hash letter because in dev we hash with 'dev'
"static/translations/**/en-+([a-fv0-9]).json",
// Icon shown on splash screen
"static/icons/favicon-192x192.png",
"static/icons/favicon.ico",
// Common fonts
"static/fonts/roboto/Roboto-Light.woff2",
"static/fonts/roboto/Roboto-Medium.woff2",
"static/fonts/roboto/Roboto-Regular.woff2",
"static/fonts/roboto/Roboto-Bold.woff2",
],
globIgnores: [`${buildDir}/service-worker*`],
});
if (warnings.length > 0) {
console.warn(
`Problems while injecting ${build} service worker:\n`,
warnings.join("\n")
);
}
await deleteAsync(`${swSrc}?(.map)`);
// Needed to install new SW from a cached HTML
if (build === "modern") {
const swOld = join(paths.app_output_root, "service_worker.js");
await symlink(basename(swDest), swOld);
}
})
)
);
gulp.task("gen-service-worker-app-prod", async () => {
// Read bundled source file
const bundleManifestLatest = require(path.resolve(
paths.app_output_latest,
"manifest.json"
));
let serviceWorkerContent = fs.readFileSync(
paths.app_output_root + bundleManifestLatest["service_worker.js"],
"utf-8"
);
// Delete old file from frontend_latest so manifest won't pick it up
fs.removeSync(
paths.app_output_root + bundleManifestLatest["service_worker.js"]
);
fs.removeSync(
paths.app_output_root + bundleManifestLatest["service_worker.js.map"]
);
// Remove ES5
const bundleManifestES5 = require(path.resolve(
paths.app_output_es5,
"manifest.json"
));
fs.removeSync(paths.app_output_root + bundleManifestES5["service_worker.js"]);
fs.removeSync(
paths.app_output_root + bundleManifestES5["service_worker.js.map"]
);
const workboxManifest = await workboxBuild.getManifest({
// Files that mach this pattern will be considered unique and skip revision check
// ignore JS files + translation files
dontCacheBustURLsMatching: /(frontend_latest\/.+|static\/translations\/.+)/,
globDirectory: paths.app_output_root,
globPatterns: [
"frontend_latest/*.js",
// Cache all English translations because we catch them as fallback
// Using pattern to match hash instead of * to avoid caching en-GB
// 'v' added as valid hash letter because in dev we hash with 'dev'
"static/translations/**/en-+([a-fv0-9]).json",
// Icon shown on splash screen
"static/icons/favicon-192x192.png",
"static/icons/favicon.ico",
// Common fonts
"static/fonts/roboto/Roboto-Light.woff2",
"static/fonts/roboto/Roboto-Medium.woff2",
"static/fonts/roboto/Roboto-Regular.woff2",
"static/fonts/roboto/Roboto-Bold.woff2",
],
});
for (const warning of workboxManifest.warnings) {
console.warn(warning);
}
// remove source map and add WB manifest
serviceWorkerContent = sourceMapUrl.removeFrom(serviceWorkerContent);
serviceWorkerContent = serviceWorkerContent.replace(
"WB_MANIFEST",
JSON.stringify(workboxManifest.manifestEntries)
);
// Write new file to root
fs.writeFileSync(swDest, serviceWorkerContent);
});

View File

@@ -1,112 +1,101 @@
/* eslint-disable max-classes-per-file */
import { deleteAsync } from "del";
import { glob } from "glob";
import gulp from "gulp";
import rename from "gulp-rename";
import merge from "lodash.merge";
import { createHash } from "node:crypto";
import { mkdir, readFile } from "node:fs/promises";
import { basename, join } from "node:path";
import { PassThrough, Transform } from "node:stream";
import { finished } from "node:stream/promises";
import env from "../env.cjs";
import paths from "../paths.cjs";
import "./fetch-nightly-translations.js";
const crypto = require("crypto");
const del = require("del");
const path = require("path");
const source = require("vinyl-source-stream");
const vinylBuffer = require("vinyl-buffer");
const gulp = require("gulp");
const fs = require("fs");
const foreach = require("gulp-foreach");
const merge = require("gulp-merge-json");
const minify = require("gulp-jsonminify");
const rename = require("gulp-rename");
const transform = require("gulp-json-transform");
const { mapFiles } = require("../util");
const env = require("../env");
const paths = require("../paths");
const inFrontendDir = "translations/frontend";
const inBackendDir = "translations/backend";
const workDir = "build/translations";
const outDir = join(workDir, "output");
const EN_SRC = join(paths.translations_src, "en.json");
const TEST_LOCALE = "en-x-test";
const workDir = "build-translations";
const fullDir = workDir + "/full";
const coreDir = workDir + "/core";
const outDir = workDir + "/output";
let mergeBackend = false;
gulp.task(
"translations-enable-merge-backend",
gulp.parallel(async () => {
mergeBackend = true;
}, "allow-setup-fetch-nightly-translations")
);
gulp.task("translations-enable-merge-backend", (done) => {
mergeBackend = true;
done();
});
// Transform stream to apply a function on Vinyl JSON files (buffer mode only).
// The provided function can either return a new object, or an array of
// [object, subdirectory] pairs for fragmentizing the JSON.
class CustomJSON extends Transform {
constructor(func, reviver = null) {
super({ objectMode: true });
this._func = func;
this._reviver = reviver;
}
async _transform(file, _, callback) {
try {
let obj = JSON.parse(file.contents.toString(), this._reviver);
if (this._func) obj = this._func(obj, file.path);
for (const [outObj, dir] of Array.isArray(obj) ? obj : [[obj, ""]]) {
const outFile = file.clone({ contents: false });
outFile.contents = Buffer.from(JSON.stringify(outObj));
outFile.dirname += `/${dir}`;
this.push(outFile);
}
callback(null);
} catch (err) {
callback(err);
}
}
}
// Transform stream to merge Vinyl JSON files (buffer mode only).
class MergeJSON extends Transform {
_objects = [];
constructor(stem, startObj = {}, reviver = null) {
super({ objectMode: true, allowHalfOpen: false });
this._stem = stem;
this._startObj = structuredClone(startObj);
this._reviver = reviver;
}
async _transform(file, _, callback) {
try {
this._objects.push(JSON.parse(file.contents.toString(), this._reviver));
if (!this._outFile) this._outFile = file.clone({ contents: false });
callback(null);
} catch (err) {
callback(err);
}
}
async _flush(callback) {
try {
const mergedObj = merge(this._startObj, ...this._objects);
this._outFile.contents = Buffer.from(JSON.stringify(mergedObj));
this._outFile.stem = this._stem;
callback(null, this._outFile);
} catch (err) {
callback(err);
}
}
}
// Utility to flatten object keys to single level using separator
const flatten = (data, prefix = "", sep = ".") => {
const output = {};
for (const [key, value] of Object.entries(data)) {
if (typeof value === "object") {
Object.assign(output, flatten(value, prefix + key + sep, sep));
} else {
output[prefix + key] = value;
}
}
return output;
String.prototype.rsplit = function (sep, maxsplit) {
var split = this.split(sep);
return maxsplit
? [split.slice(0, -maxsplit).join(sep)].concat(split.slice(-maxsplit))
: split;
};
// Filter functions that can be passed directly to JSON.parse()
const emptyReviver = (_key, value) => value || undefined;
const testReviver = (_key, value) =>
value && typeof value === "string" ? "TRANSLATED" : value;
// Panel translations which should be split from the core translations. These
// should mirror the fragment definitions in polymer.json, so that we load
// additional resources at equivalent points.
const TRANSLATION_FRAGMENTS = [
"config",
"history",
"logbook",
"mailbox",
"profile",
"shopping-list",
"page-authorize",
"page-demo",
"page-onboarding",
"developer-tools",
];
function recursiveFlatten(prefix, data) {
let output = {};
Object.keys(data).forEach(function (key) {
if (typeof data[key] === "object") {
output = {
...output,
...recursiveFlatten(prefix + key + ".", data[key]),
};
} else {
output[prefix + key] = data[key];
}
});
return output;
}
function flatten(data) {
return recursiveFlatten("", data);
}
function emptyFilter(data) {
const newData = {};
Object.keys(data).forEach((key) => {
if (data[key]) {
if (typeof data[key] === "object") {
newData[key] = emptyFilter(data[key]);
} else {
newData[key] = data[key];
}
}
});
return newData;
}
function recursiveEmpty(data) {
const newData = {};
Object.keys(data).forEach((key) => {
if (data[key]) {
if (typeof data[key] === "object") {
newData[key] = recursiveEmpty(data[key]);
} else {
newData[key] = "TRANSLATED";
}
}
});
return newData;
}
/**
* Replace Lokalise key placeholders with their actual values.
@@ -115,44 +104,69 @@ const testReviver = (_key, value) =>
* be included in src/translations/en.json, but still be usable while
* developing locally.
*
* @link https://docs.lokalise.com/en/articles/1400528-key-referencing
* @link https://docs.lokalise.co/article/KO5SZWLLsy-key-referencing
*/
const KEY_REFERENCE = /\[%key:([^%]+)%\]/;
const lokaliseTransform = (data, path, original = data) => {
const re_key_reference = /\[%key:([^%]+)%\]/;
function lokaliseTransform(data, original, file) {
const output = {};
for (const [key, value] of Object.entries(data)) {
if (typeof value === "object") {
output[key] = lokaliseTransform(value, path, original);
Object.entries(data).forEach(([key, value]) => {
if (value instanceof Object) {
output[key] = lokaliseTransform(value, original, file);
} else {
output[key] = value.replace(KEY_REFERENCE, (_match, lokalise_key) => {
const replace = lokalise_key.split("::").reduce((tr, k) => {
output[key] = value.replace(re_key_reference, (match, key) => {
const replace = key.split("::").reduce((tr, k) => {
if (!tr) {
throw Error(`Invalid key placeholder ${lokalise_key} in ${path}`);
throw Error(`Invalid key placeholder ${key} in ${file.path}`);
}
return tr[k];
}, original);
if (typeof replace !== "string") {
throw Error(`Invalid key placeholder ${lokalise_key} in ${path}`);
throw Error(`Invalid key placeholder ${key} in ${file.path}`);
}
return replace;
});
}
}
});
return output;
};
}
gulp.task("clean-translations", () => deleteAsync([workDir]));
gulp.task("clean-translations", function () {
return del([workDir]);
});
const makeWorkDir = () => mkdir(workDir, { recursive: true });
gulp.task("ensure-translations-build-dir", (done) => {
if (!fs.existsSync(workDir)) {
fs.mkdirSync(workDir);
}
done();
});
const createTestTranslation = () =>
env.isProdBuild()
? Promise.resolve()
: gulp
.src(EN_SRC)
.pipe(new CustomJSON(null, testReviver))
.pipe(rename(`${TEST_LOCALE}.json`))
.pipe(gulp.dest(workDir));
gulp.task("create-test-metadata", function (cb) {
fs.writeFile(
workDir + "/testMetadata.json",
JSON.stringify({
test: {
nativeName: "Test",
},
}),
cb
);
});
gulp.task(
"create-test-translation",
gulp.series("create-test-metadata", function createTestTranslation() {
return gulp
.src(path.join(paths.translations_src, "en.json"))
.pipe(
transform(function (data, file) {
return recursiveEmpty(data);
})
)
.pipe(rename("test.json"))
.pipe(gulp.dest(workDir));
})
);
/**
* This task will build a master translation file, to be used as the base for
@@ -163,164 +177,231 @@ const createTestTranslation = () =>
* project is buildable immediately after merging new translation keys, since
* the Lokalise update to translations/en.json will not happen immediately.
*/
const createMasterTranslation = () =>
gulp
.src([EN_SRC, ...(mergeBackend ? [`${inBackendDir}/en.json`] : [])])
.pipe(new CustomJSON(lokaliseTransform))
.pipe(new MergeJSON("en"))
.pipe(gulp.dest(workDir));
gulp.task("build-master-translation", function () {
const src = [path.join(paths.translations_src, "en.json")];
const FRAGMENTS = ["base"];
const toggleSupervisorFragment = async () => {
FRAGMENTS[0] = "supervisor";
};
const panelFragment = (fragment) =>
fragment !== "base" && fragment !== "supervisor";
const HASHES = new Map();
const createTranslations = async () => {
// Parse and store the master to avoid repeating this for each locale, then
// add the panel fragments when processing the app.
const enMaster = JSON.parse(await readFile(`${workDir}/en.json`, "utf-8"));
if (FRAGMENTS[0] === "base") {
FRAGMENTS.push(...Object.keys(enMaster.ui.panel));
if (mergeBackend) {
src.push(path.join(inBackendDir, "en.json"));
}
// The downstream pipeline is setup first. It hashes the merged data for
// each locale, then fragmentizes and flattens the data for final output.
const translationFiles = await glob([
`${inFrontendDir}/!(en).json`,
...(env.isProdBuild() ? [] : [`${workDir}/${TEST_LOCALE}.json`]),
]);
const hashStream = new Transform({
objectMode: true,
transform: async (file, _, callback) => {
const hash = env.isProdBuild()
? createHash("md5").update(file.contents).digest("hex")
: "dev";
HASHES.set(file.stem, hash);
file.stem += `-${hash}`;
callback(null, file);
},
}).setMaxListeners(translationFiles.length + 1);
const fragmentsStream = hashStream
return gulp
.src(src)
.pipe(
new CustomJSON((data) =>
FRAGMENTS.map((fragment) => {
switch (fragment) {
case "base":
// Remove the panels and supervisor to create the base translations
return [
flatten({
...data,
ui: { ...data.ui, panel: undefined },
supervisor: undefined,
}),
"",
];
case "supervisor":
// Supervisor key is at the top level
return [flatten(data.supervisor), ""];
default:
// Create a fragment with only the given panel
return [
flatten(data.ui.panel[fragment], `ui.panel.${fragment}.`),
fragment,
];
}
})
)
transform(function (data, file) {
return lokaliseTransform(data, data, file);
})
)
.pipe(gulp.dest(outDir));
// Send the English master downstream first, then for each other locale
// generate merged JSON data to continue piping. It begins with the master
// translation as a failsafe for untranslated strings, and merges all parent
// tags into one file for each specific subtag
//
// TODO: This is a naive interpretation of BCP47 that should be improved.
// Will be OK for now as long as we don't have anything more complicated
// than a base translation + region.
const masterStream = gulp
.src(`${workDir}/en.json`)
.pipe(new PassThrough({ objectMode: true }));
masterStream.pipe(hashStream, { end: false });
const mergesFinished = [finished(masterStream)];
for (const translationFile of translationFiles) {
const locale = basename(translationFile, ".json");
const subtags = locale.split("-");
const mergeFiles = [];
for (let i = 1; i <= subtags.length; i++) {
const lang = subtags.slice(0, i).join("-");
if (lang === TEST_LOCALE) {
mergeFiles.push(`${workDir}/${TEST_LOCALE}.json`);
} else if (lang !== "en") {
mergeFiles.push(`${inFrontendDir}/${lang}.json`);
if (mergeBackend) {
mergeFiles.push(`${inBackendDir}/${lang}.json`);
}
}
}
const mergeStream = gulp
.src(mergeFiles, { allowEmpty: true })
.pipe(new MergeJSON(locale, enMaster, emptyReviver));
mergesFinished.push(finished(mergeStream));
mergeStream.pipe(hashStream, { end: false });
}
// Wait for all merges to finish, then it's safe to end writing to the
// downstream pipeline and wait for all fragments to finish writing.
await Promise.all(mergesFinished);
hashStream.end();
await finished(fragmentsStream);
};
const writeTranslationMetaData = () =>
gulp
.src([`${paths.translations_src}/translationMetadata.json`])
.pipe(
new CustomJSON((meta) => {
// Add the test translation in development.
if (!env.isProdBuild()) {
meta[TEST_LOCALE] = { nativeName: "Translation Test" };
}
// Filter out locales without a native name, and add the hashes.
for (const locale of Object.keys(meta)) {
if (!meta[locale].nativeName) {
meta[locale] = undefined;
console.warn(
`Skipping locale ${locale} because native name is not translated.`
);
} else {
meta[locale].hash = HASHES.get(locale);
}
}
return {
fragments: FRAGMENTS.filter(panelFragment),
translations: meta,
};
merge({
fileName: "translationMaster.json",
})
)
.pipe(gulp.dest(workDir));
});
gulp.task("build-merged-translations", function () {
return gulp
.src([inFrontendDir + "/*.json", workDir + "/test.json"], {
allowEmpty: true,
})
.pipe(
transform(function (data, file) {
return lokaliseTransform(data, data, file);
})
)
.pipe(
foreach(function (stream, file) {
// For each language generate a merged json file. It begins with the master
// translation as a failsafe for untranslated strings, and merges all parent
// tags into one file for each specific subtag
//
// TODO: This is a naive interpretation of BCP47 that should be improved.
// Will be OK for now as long as we don't have anything more complicated
// than a base translation + region.
const tr = path.basename(file.history[0], ".json");
const subtags = tr.split("-");
const src = [workDir + "/translationMaster.json"];
for (let i = 1; i <= subtags.length; i++) {
const lang = subtags.slice(0, i).join("-");
if (lang === "test") {
src.push(workDir + "/test.json");
} else if (lang !== "en") {
src.push(inFrontendDir + "/" + lang + ".json");
if (mergeBackend) {
src.push(inBackendDir + "/" + lang + ".json");
}
}
}
return gulp
.src(src, { allowEmpty: true })
.pipe(transform((data) => emptyFilter(data)))
.pipe(
merge({
fileName: tr + ".json",
})
)
.pipe(gulp.dest(fullDir));
})
);
});
var taskName;
const splitTasks = [];
TRANSLATION_FRAGMENTS.forEach((fragment) => {
taskName = "build-translation-fragment-" + fragment;
gulp.task(taskName, function () {
// Return only the translations for this fragment.
return gulp
.src(fullDir + "/*.json")
.pipe(
transform((data) => ({
ui: {
panel: {
[fragment]: data.ui.panel[fragment],
},
},
}))
)
.pipe(gulp.dest(workDir + "/" + fragment));
});
splitTasks.push(taskName);
});
taskName = "build-translation-core";
gulp.task(taskName, function () {
// Remove the fragment translations from the core translation.
return gulp
.src(fullDir + "/*.json")
.pipe(
transform((data, file) => {
TRANSLATION_FRAGMENTS.forEach((fragment) => {
delete data.ui.panel[fragment];
});
return data;
})
)
.pipe(gulp.dest(coreDir));
});
splitTasks.push(taskName);
gulp.task("build-flattened-translations", function () {
// Flatten the split versions of our translations, and move them into outDir
return gulp
.src(
TRANSLATION_FRAGMENTS.map(
(fragment) => workDir + "/" + fragment + "/*.json"
).concat(coreDir + "/*.json"),
{ base: workDir }
)
.pipe(
transform(function (data) {
// Polymer.AppLocalizeBehavior requires flattened json
return flatten(data);
})
)
.pipe(minify())
.pipe(
rename((filePath) => {
if (filePath.dirname === "core") {
filePath.dirname = "";
}
})
)
.pipe(gulp.dest(outDir));
});
const fingerprints = {};
gulp.task(
"build-translation-fingerprints",
function fingerprintTranslationFiles() {
// Fingerprint full file of each language
const files = fs.readdirSync(fullDir);
for (let i = 0; i < files.length; i++) {
fingerprints[files[i].split(".")[0]] = {
// In dev we create fake hashes
hash: env.isProdBuild()
? crypto
.createHash("md5")
.update(fs.readFileSync(path.join(fullDir, files[i]), "utf-8"))
.digest("hex")
: "dev",
};
}
mapFiles(outDir, ".json", (filename) => {
const parsed = path.parse(filename);
// nl.json -> nl-<hash>.json
if (!(parsed.name in fingerprints)) {
throw new Error(`Unable to find hash for ${filename}`);
}
fs.renameSync(
filename,
`${parsed.dir}/${parsed.name}-${fingerprints[parsed.name].hash}${
parsed.ext
}`
);
});
const stream = source("translationFingerprints.json");
stream.write(JSON.stringify(fingerprints));
process.nextTick(() => stream.end());
return stream.pipe(vinylBuffer()).pipe(gulp.dest(workDir));
}
);
gulp.task(
"build-translations",
gulp.series(
gulp.parallel(
"fetch-nightly-translations",
gulp.series("clean-translations", makeWorkDir)
),
createTestTranslation,
createMasterTranslation,
createTranslations,
writeTranslationMetaData
"clean-translations",
"ensure-translations-build-dir",
env.isProdBuild() ? (done) => done() : "create-test-translation",
"build-master-translation",
"build-merged-translations",
gulp.parallel(...splitTasks),
"build-flattened-translations",
"build-translation-fingerprints",
function writeMetadata() {
return gulp
.src(
[
path.join(paths.translations_src, "translationMetadata.json"),
workDir + "/testMetadata.json",
workDir + "/translationFingerprints.json",
],
{ allowEmpty: true }
)
.pipe(merge({}))
.pipe(
transform(function (data) {
const newData = {};
Object.entries(data).forEach(([key, value]) => {
// Filter out translations without native name.
if (data[key].nativeName) {
newData[key] = data[key];
} else {
console.warn(
`Skipping language ${key}. Native name was not translated.`
);
}
if (data[key]) newData[key] = value;
});
return newData;
})
)
.pipe(
transform((data) => ({
fragments: TRANSLATION_FRAGMENTS,
translations: data,
}))
)
.pipe(rename("translationMetadata.json"))
.pipe(gulp.dest(workDir));
}
)
);
gulp.task(
"build-supervisor-translations",
gulp.series(toggleSupervisorFragment, "build-translations")
);

View File

@@ -1,10 +0,0 @@
import gulp from "gulp";
import { startDevServer } from "@web/dev-server";
gulp.task("wds-watch-app", async () => {
startDevServer({
config: {
watch: true,
},
});
});

View File

@@ -1,71 +1,42 @@
// Tasks to run webpack.
import fs from "fs";
import path from "path";
import log from "fancy-log";
import gulp from "gulp";
import webpack from "webpack";
import WebpackDevServer from "webpack-dev-server";
import env from "../env.cjs";
import paths from "../paths.cjs";
import {
const gulp = require("gulp");
const webpack = require("webpack");
const WebpackDevServer = require("webpack-dev-server");
const log = require("fancy-log");
const path = require("path");
const paths = require("../paths");
const {
createAppConfig,
createCastConfig,
createDemoConfig,
createGalleryConfig,
createCastConfig,
createHassioConfig,
} from "../webpack.cjs";
createGalleryConfig,
} = require("../webpack");
const bothBuilds = (createConfigFunc, params) => [
createConfigFunc({ ...params, latestBuild: true }),
createConfigFunc({ ...params, latestBuild: false }),
];
const isWsl =
fs.existsSync("/proc/version") &&
fs
.readFileSync("/proc/version", "utf-8")
.toLocaleLowerCase()
.includes("microsoft");
/**
* @param {{
* compiler: import("webpack").Compiler,
* contentBase: string,
* port: number,
* listenHost?: string
* }}
*/
const runDevServer = async ({
const runDevServer = ({
compiler,
contentBase,
port,
listenHost = undefined,
}) => {
if (listenHost === undefined) {
// For dev container, we need to listen on all hosts
listenHost = env.isDevContainer() ? "0.0.0.0" : "localhost";
}
const server = new WebpackDevServer(
{
hot: false,
open: true,
host: listenHost,
port,
static: {
directory: contentBase,
watch: true,
},
},
compiler
);
listenHost = "localhost",
}) =>
new WebpackDevServer(compiler, {
open: true,
watchContentBase: true,
contentBase,
}).listen(port, listenHost, function (err) {
if (err) {
throw err;
}
// Server listening
log("[webpack-dev-server]", `http://localhost:${port}`);
});
await server.start();
// Server listening
log("[webpack-dev-server]", `Project is running at http://localhost:${port}`);
};
const doneHandler = (done) => (err, stats) => {
const handler = (done) => (err, stats) => {
if (err) {
log.error(err.stack || err);
if (err.details) {
@@ -74,128 +45,128 @@ const doneHandler = (done) => (err, stats) => {
return;
}
if (stats.hasErrors() || stats.hasWarnings()) {
console.log(stats.toString("minimal"));
}
log(`Build done @ ${new Date().toLocaleTimeString()}`);
if (stats.hasErrors() || stats.hasWarnings()) {
log.warn(stats.toString("minimal"));
}
if (done) {
done();
}
};
const prodBuild = (conf) =>
new Promise((resolve) => {
webpack(
conf,
// Resolve promise when done. Because we pass a callback, webpack closes itself
doneHandler(resolve)
);
});
gulp.task("webpack-watch-app", () => {
// This command will run forever because we don't close compiler
webpack(
process.env.ES5
? bothBuilds(createAppConfig, { isProdBuild: false })
: createAppConfig({ isProdBuild: false, latestBuild: true })
).watch({ poll: isWsl }, doneHandler());
// we are not calling done, so this command will run forever
webpack(createAppConfig({ isProdBuild: false, latestBuild: true })).watch(
{ ignored: /build-translations/ },
handler()
);
gulp.watch(
path.join(paths.translations_src, "en.json"),
gulp.series("build-translations", "copy-translations-app")
);
});
gulp.task("webpack-prod-app", () =>
prodBuild(
bothBuilds(createAppConfig, {
isProdBuild: true,
isStatsBuild: env.isStatsBuild(),
isTestBuild: env.isTestBuild(),
})
)
gulp.task(
"webpack-prod-app",
() =>
new Promise((resolve) =>
webpack(
bothBuilds(createAppConfig, { isProdBuild: true }),
handler(resolve)
)
)
);
gulp.task("webpack-dev-server-demo", () =>
gulp.task("webpack-dev-server-demo", () => {
runDevServer({
compiler: webpack(
createDemoConfig({ isProdBuild: false, latestBuild: true })
),
compiler: webpack(bothBuilds(createDemoConfig, { isProdBuild: false })),
contentBase: paths.demo_output_root,
port: 8090,
})
});
});
gulp.task(
"webpack-prod-demo",
() =>
new Promise((resolve) =>
webpack(
bothBuilds(createDemoConfig, {
isProdBuild: true,
}),
handler(resolve)
)
)
);
gulp.task("webpack-prod-demo", () =>
prodBuild(
bothBuilds(createDemoConfig, {
isProdBuild: true,
})
)
);
gulp.task("webpack-dev-server-cast", () =>
gulp.task("webpack-dev-server-cast", () => {
runDevServer({
compiler: webpack(
createCastConfig({ isProdBuild: false, latestBuild: true })
),
compiler: webpack(bothBuilds(createCastConfig, { isProdBuild: false })),
contentBase: paths.cast_output_root,
port: 8080,
// Accessible from the network, because that's how Cast hits it.
listenHost: "0.0.0.0",
})
);
});
});
gulp.task("webpack-prod-cast", () =>
prodBuild(
bothBuilds(createCastConfig, {
isProdBuild: true,
})
)
gulp.task(
"webpack-prod-cast",
() =>
new Promise((resolve) =>
webpack(
bothBuilds(createCastConfig, {
isProdBuild: true,
}),
handler(resolve)
)
)
);
gulp.task("webpack-watch-hassio", () => {
// This command will run forever because we don't close compiler
// we are not calling done, so this command will run forever
webpack(
createHassioConfig({
isProdBuild: false,
latestBuild: true,
})
).watch({ ignored: /build/, poll: isWsl }, doneHandler());
gulp.watch(
path.join(paths.translations_src, "en.json"),
gulp.series("build-supervisor-translations", "copy-translations-supervisor")
);
).watch({}, handler());
});
gulp.task("webpack-prod-hassio", () =>
prodBuild(
bothBuilds(createHassioConfig, {
isProdBuild: true,
isStatsBuild: env.isStatsBuild(),
isTestBuild: env.isTestBuild(),
})
)
gulp.task(
"webpack-prod-hassio",
() =>
new Promise((resolve) =>
webpack(
bothBuilds(createHassioConfig, {
isProdBuild: true,
}),
handler(resolve)
)
)
);
gulp.task("webpack-dev-server-gallery", () =>
gulp.task("webpack-dev-server-gallery", () => {
runDevServer({
compiler: webpack(
createGalleryConfig({ isProdBuild: false, latestBuild: true })
),
// We don't use the es5 build, but the dev server will fuck up the publicPath if we don't
compiler: webpack(bothBuilds(createGalleryConfig, { isProdBuild: false })),
contentBase: paths.gallery_output_root,
port: 8100,
listenHost: "0.0.0.0",
})
);
});
});
gulp.task("webpack-prod-gallery", () =>
prodBuild(
createGalleryConfig({
isProdBuild: true,
latestBuild: true,
})
)
gulp.task(
"webpack-prod-gallery",
() =>
new Promise((resolve) =>
webpack(
createGalleryConfig({
isProdBuild: true,
latestBuild: true,
}),
handler(resolve)
)
)
);

View File

@@ -1,83 +0,0 @@
#!/usr/bin/env node
// Script to print Babel plugins and Core JS polyfills that will be used by browserslist environments
import { version as babelVersion } from "@babel/core";
import presetEnv from "@babel/preset-env";
import compilationTargets from "@babel/helper-compilation-targets";
import coreJSCompat from "core-js-compat";
import { logPlugin } from "@babel/preset-env/lib/debug.js";
// eslint-disable-next-line import/no-relative-packages
import shippedPolyfills from "../node_modules/babel-plugin-polyfill-corejs3/lib/shipped-proposals.js";
import { babelOptions } from "./bundle.cjs";
const detailsOpen = (heading) =>
`<details>\n<summary><h4>${heading}</h4></summary>\n`;
const detailsClose = "</details>\n";
const dummyAPI = {
version: babelVersion,
assertVersion: () => {},
caller: (callback) =>
callback({
name: "Dummy Bundler",
supportsStaticESM: true,
supportsDynamicImport: true,
supportsTopLevelAwait: true,
supportsExportNamespaceFrom: true,
}),
targets: () => ({}),
};
// Generate filter function based on proposal/method inputs
// Copied and adapted from babel-plugin-polyfill-corejs3/esm/index.mjs
const polyfillFilter = (method, proposals, shippedProposals) => (name) => {
if (proposals || method === "entry-global") return true;
if (shippedProposals && shippedPolyfills.default.has(name)) {
return true;
}
if (name.startsWith("esnext.")) {
const esName = `es.${name.slice(7)}`;
// If its imaginative esName is not in latest compat data, it means the proposal is not stage 4
return esName in coreJSCompat.data;
}
return true;
};
// Log the plugins and polyfills for each build environment
for (const buildType of ["Modern", "Legacy"]) {
const browserslistEnv = buildType.toLowerCase();
const babelOpts = babelOptions({ latestBuild: browserslistEnv === "modern" });
const presetEnvOpts = babelOpts.presets[0][1];
// Invoking preset-env in debug mode will log the included plugins
console.log(detailsOpen(`${buildType} Build Babel Plugins`));
presetEnv.default(dummyAPI, {
...presetEnvOpts,
browserslistEnv,
debug: true,
});
console.log(detailsClose);
// Manually log the Core-JS polyfills using the same technique
if (presetEnvOpts.useBuiltIns) {
console.log(detailsOpen(`${buildType} Build Core-JS Polyfills`));
const targets = compilationTargets.default(babelOpts?.targets, {
browserslistEnv,
});
const polyfillList = coreJSCompat({ targets }).list.filter(
polyfillFilter(
`${presetEnvOpts.useBuiltIns}-global`,
presetEnvOpts?.corejs?.proposals,
presetEnvOpts?.shippedProposals
)
);
console.log(
"The following %i polyfills may be injected by Babel:\n",
polyfillList.length
);
for (const polyfill of polyfillList) {
logPlugin(polyfill, targets, coreJSCompat.data);
}
console.log(detailsClose);
}
}

View File

@@ -1,4 +1,4 @@
const path = require("path");
var path = require("path");
module.exports = {
polymer_dir: path.resolve(__dirname, ".."),
@@ -25,7 +25,6 @@ module.exports = {
cast_output_es5: path.resolve(__dirname, "../cast/dist/frontend_es5"),
gallery_dir: path.resolve(__dirname, "../gallery"),
gallery_build: path.resolve(__dirname, "../gallery/build"),
gallery_output_root: path.resolve(__dirname, "../gallery/dist"),
gallery_output_latest: path.resolve(
__dirname,
@@ -35,12 +34,6 @@ module.exports = {
hassio_dir: path.resolve(__dirname, "../hassio"),
hassio_output_root: path.resolve(__dirname, "../hassio/build"),
hassio_output_static: path.resolve(__dirname, "../hassio/build/static"),
hassio_output_latest: path.resolve(
__dirname,
"../hassio/build/frontend_latest"
),
hassio_output_es5: path.resolve(__dirname, "../hassio/build/frontend_es5"),
hassio_publicPath: "/api/hassio/app",
translations_src: path.resolve(__dirname, "../src/translations"),

View File

@@ -1 +0,0 @@
[]

View File

@@ -1,3 +1,5 @@
const path = require("path");
module.exports = function (userOptions = {}) {
// Files need to be absolute paths.
// This only works if the file has no exports

View File

@@ -81,13 +81,13 @@ module.exports = function (opts = {}) {
opts.workerRegexp.flags
);
if (!workerRegexp.test(code)) {
return undefined;
return;
}
const ms = new MagicString(code);
// Reset the regexp
workerRegexp.lastIndex = 0;
for (;;) {
while (true) {
const match = workerRegexp.exec(code);
if (!match) {
break;
@@ -98,12 +98,11 @@ module.exports = function (opts = {}) {
// Parse the optional options object
if (match[3] && match[3].length > 0) {
// FIXME: ooooof!
// eslint-disable-next-line @typescript-eslint/no-implied-eval
optionsObject = new Function(`return ${match[3].slice(1)};`)();
}
delete optionsObject.type;
if (!/^.*\//.test(workerFile)) {
if (!new RegExp("^.*/").test(workerFile)) {
this.warn(
`Paths passed to the Worker constructor must be relative or absolute, i.e. start with /, ./ or ../ (just like dynamic import!). Ignoring "${workerFile}".`
);
@@ -111,14 +110,12 @@ module.exports = function (opts = {}) {
}
// Find worker file and store it as a chunk with ID prefixed for our loader
// eslint-disable-next-line no-await-in-loop
const resolvedWorkerFile = (await this.resolve(workerFile, id)).id;
let chunkRefId;
if (resolvedWorkerFile in refIds) {
chunkRefId = refIds[resolvedWorkerFile];
} else {
this.addWatchFile(resolvedWorkerFile);
// eslint-disable-next-line no-await-in-loop
const source = await getBundledWorker(
resolvedWorkerFile,
rollupOptions

View File

@@ -1,146 +0,0 @@
const path = require("path");
const commonjs = require("@rollup/plugin-commonjs");
const resolve = require("@rollup/plugin-node-resolve");
const json = require("@rollup/plugin-json");
const { babel } = require("@rollup/plugin-babel");
const replace = require("@rollup/plugin-replace");
const visualizer = require("rollup-plugin-visualizer");
const { string } = require("rollup-plugin-string");
const { terser } = require("rollup-plugin-terser");
const manifest = require("./rollup-plugins/manifest-plugin.cjs");
const worker = require("./rollup-plugins/worker-plugin.cjs");
const dontHashPlugin = require("./rollup-plugins/dont-hash-plugin.cjs");
const ignore = require("./rollup-plugins/ignore-plugin.cjs");
const bundle = require("./bundle.cjs");
const paths = require("./paths.cjs");
const extensions = [".js", ".ts"];
/**
* @param {Object} arg
* @param { import("rollup").InputOption } arg.input
*/
const createRollupConfig = ({
entry,
outputPath,
defineOverlay,
isProdBuild,
latestBuild,
isStatsBuild,
publicPath,
dontHash,
isWDS,
}) => ({
/**
* @type { import("rollup").InputOptions }
*/
inputOptions: {
input: entry,
// Some entry points contain no JavaScript. This setting silences a warning about that.
// https://rollupjs.org/configuration-options/#preserveentrysignatures
preserveEntrySignatures: false,
plugins: [
ignore({
files: bundle
.emptyPackages({ latestBuild })
// TEMP HACK: Makes Rollup build work again
.concat(
require.resolve(
"@webcomponents/scoped-custom-element-registry/scoped-custom-element-registry.min"
)
),
}),
resolve({
extensions,
preferBuiltins: false,
browser: true,
rootDir: paths.polymer_dir,
}),
commonjs(),
json(),
babel({
...bundle.babelOptions({ latestBuild, isProdBuild }),
extensions,
babelHelpers: isWDS ? "inline" : "bundled",
}),
string({
// Import certain extensions as strings
include: [path.join(paths.polymer_dir, "node_modules/**/*.css")],
}),
replace(bundle.definedVars({ isProdBuild, latestBuild, defineOverlay })),
!isWDS &&
manifest({
publicPath,
}),
!isWDS && worker(),
!isWDS && dontHashPlugin({ dontHash }),
!isWDS && isProdBuild && terser(bundle.terserOptions({ latestBuild })),
!isWDS &&
isStatsBuild &&
visualizer({
// https://github.com/btd/rollup-plugin-visualizer#options
open: true,
sourcemap: true,
}),
].filter(Boolean),
},
/**
* @type { import("rollup").OutputOptions }
*/
outputOptions: {
// https://rollupjs.org/configuration-options/#output-dir
dir: outputPath,
// https://rollupjs.org/configuration-options/#output-format
format: latestBuild ? "es" : "systemjs",
// https://rollupjs.org/configuration-options/#output-externallivebindings
externalLiveBindings: false,
// https://rollupjs.org/configuration-options/#output-entryfilenames
// https://rollupjs.org/configuration-options/#output-chunkfilenames
// https://rollupjs.org/configuration-options/#output-assetfilenames
entryFileNames:
isProdBuild && !isStatsBuild ? "[name]-[hash].js" : "[name].js",
chunkFileNames: isProdBuild && !isStatsBuild ? "c.[hash].js" : "[name].js",
assetFileNames: isProdBuild && !isStatsBuild ? "a.[hash].js" : "[name].js",
// https://rollupjs.org/configuration-options/#output-sourcemap
sourcemap: isProdBuild ? true : "inline",
},
});
const createAppConfig = ({ isProdBuild, latestBuild, isStatsBuild, isWDS }) =>
createRollupConfig(
bundle.config.app({
isProdBuild,
latestBuild,
isStatsBuild,
isWDS,
})
);
const createDemoConfig = ({ isProdBuild, latestBuild, isStatsBuild }) =>
createRollupConfig(
bundle.config.demo({
isProdBuild,
latestBuild,
isStatsBuild,
})
);
const createCastConfig = ({ isProdBuild, latestBuild }) =>
createRollupConfig(bundle.config.cast({ isProdBuild, latestBuild }));
const createHassioConfig = ({ isProdBuild, latestBuild }) =>
createRollupConfig(bundle.config.hassio({ isProdBuild, latestBuild }));
const createGalleryConfig = ({ isProdBuild, latestBuild }) =>
createRollupConfig(bundle.config.gallery({ isProdBuild, latestBuild }));
module.exports = {
createAppConfig,
createDemoConfig,
createCastConfig,
createHassioConfig,
createGalleryConfig,
createRollupConfig,
};

151
build-scripts/rollup.js Normal file
View File

@@ -0,0 +1,151 @@
const path = require("path");
const commonjs = require("@rollup/plugin-commonjs");
const resolve = require("@rollup/plugin-node-resolve");
const json = require("@rollup/plugin-json");
const babel = require("rollup-plugin-babel");
const replace = require("@rollup/plugin-replace");
const visualizer = require("rollup-plugin-visualizer");
const { string } = require("rollup-plugin-string");
const { terser } = require("rollup-plugin-terser");
const manifest = require("./rollup-plugins/manifest-plugin");
const worker = require("./rollup-plugins/worker-plugin");
const dontHashPlugin = require("./rollup-plugins/dont-hash-plugin");
const ignore = require("./rollup-plugins/ignore-plugin");
const bundle = require("./bundle");
const paths = require("./paths");
const extensions = [".js", ".ts"];
/**
* @param {Object} arg
* @param { import("rollup").InputOption } arg.input
*/
const createRollupConfig = ({
entry,
outputPath,
defineOverlay,
isProdBuild,
latestBuild,
isStatsBuild,
publicPath,
dontHash,
}) => {
return {
/**
* @type { import("rollup").InputOptions }
*/
inputOptions: {
input: entry,
// Some entry points contain no JavaScript. This setting silences a warning about that.
// https://rollupjs.org/guide/en/#preserveentrysignatures
preserveEntrySignatures: false,
plugins: [
ignore({
files: bundle.emptyPackages({ latestBuild }),
}),
resolve({
extensions,
preferBuiltins: false,
browser: true,
rootDir: paths.polymer_dir,
}),
commonjs({
namedExports: {
"js-yaml": ["safeDump", "safeLoad"],
},
}),
json(),
babel({
...bundle.babelOptions({ latestBuild }),
extensions,
exclude: bundle.babelExclude(),
}),
string({
// Import certain extensions as strings
include: [path.join(paths.polymer_dir, "node_modules/**/*.css")],
}),
replace(
bundle.definedVars({ isProdBuild, latestBuild, defineOverlay })
),
manifest({
publicPath,
}),
worker(),
dontHashPlugin({ dontHash }),
isProdBuild && terser(bundle.terserOptions(latestBuild)),
isStatsBuild &&
visualizer({
// https://github.com/btd/rollup-plugin-visualizer#options
open: true,
sourcemap: true,
}),
],
},
/**
* @type { import("rollup").OutputOptions }
*/
outputOptions: {
// https://rollupjs.org/guide/en/#outputdir
dir: outputPath,
// https://rollupjs.org/guide/en/#outputformat
format: latestBuild ? "es" : "systemjs",
// https://rollupjs.org/guide/en/#outputexternallivebindings
externalLiveBindings: false,
// https://rollupjs.org/guide/en/#outputentryfilenames
// https://rollupjs.org/guide/en/#outputchunkfilenames
// https://rollupjs.org/guide/en/#outputassetfilenames
entryFileNames:
isProdBuild && !isStatsBuild ? "[name]-[hash].js" : "[name].js",
chunkFileNames:
isProdBuild && !isStatsBuild ? "c.[hash].js" : "[name].js",
assetFileNames:
isProdBuild && !isStatsBuild ? "a.[hash].js" : "[name].js",
// https://rollupjs.org/guide/en/#outputsourcemap
sourcemap: isProdBuild ? true : "inline",
},
};
};
const createAppConfig = ({ isProdBuild, latestBuild, isStatsBuild }) => {
return createRollupConfig(
bundle.config.app({
isProdBuild,
latestBuild,
isStatsBuild,
})
);
};
const createDemoConfig = ({ isProdBuild, latestBuild, isStatsBuild }) => {
return createRollupConfig(
bundle.config.demo({
isProdBuild,
latestBuild,
isStatsBuild,
})
);
};
const createCastConfig = ({ isProdBuild, latestBuild }) => {
return createRollupConfig(bundle.config.cast({ isProdBuild, latestBuild }));
};
const createHassioConfig = ({ isProdBuild, latestBuild }) => {
return createRollupConfig(bundle.config.hassio({ isProdBuild, latestBuild }));
};
const createGalleryConfig = ({ isProdBuild, latestBuild }) => {
return createRollupConfig(
bundle.config.gallery({ isProdBuild, latestBuild })
);
};
module.exports = {
createAppConfig,
createDemoConfig,
createCastConfig,
createHassioConfig,
createGalleryConfig,
};

16
build-scripts/util.js Normal file
View File

@@ -0,0 +1,16 @@
const path = require("path");
const fs = require("fs");
// Helper function to map recursively over files in a folder and it's subfolders
module.exports.mapFiles = function mapFiles(startPath, filter, mapFunc) {
const files = fs.readdirSync(startPath);
for (let i = 0; i < files.length; i++) {
const filename = path.join(startPath, files[i]);
const stat = fs.lstatSync(filename);
if (stat.isDirectory()) {
mapFiles(filename, filter, mapFunc);
} else if (filename.indexOf(filter) >= 0) {
mapFunc(filename);
}
}
};

View File

@@ -1,292 +0,0 @@
const { existsSync } = require("fs");
const path = require("path");
const webpack = require("webpack");
const { StatsWriterPlugin } = require("webpack-stats-plugin");
const filterStats = require("@bundle-stats/plugin-webpack-filter").default;
const TerserPlugin = require("terser-webpack-plugin");
const { WebpackManifestPlugin } = require("webpack-manifest-plugin");
const log = require("fancy-log");
const WebpackBar = require("webpackbar");
const {
TransformAsyncModulesPlugin,
} = require("transform-async-modules-webpack-plugin");
const { dependencies } = require("../package.json");
const paths = require("./paths.cjs");
const bundle = require("./bundle.cjs");
class LogStartCompilePlugin {
ignoredFirst = false;
apply(compiler) {
compiler.hooks.beforeCompile.tap("LogStartCompilePlugin", () => {
if (!this.ignoredFirst) {
this.ignoredFirst = true;
return;
}
log("Changes detected. Starting compilation");
});
}
}
const createWebpackConfig = ({
name,
entry,
outputPath,
publicPath,
defineOverlay,
isProdBuild,
latestBuild,
isStatsBuild,
isTestBuild,
isHassioBuild,
dontHash,
}) => {
if (!dontHash) {
dontHash = new Set();
}
const ignorePackages = bundle.ignorePackages({ latestBuild });
return {
name,
mode: isProdBuild ? "production" : "development",
target: `browserslist:${latestBuild ? "modern" : "legacy"}`,
// For tests/CI, source maps are skipped to gain build speed
// For production, generate source maps for accurate stack traces without source code
// For development, generate "cheap" versions that can map to original line numbers
devtool: isTestBuild
? false
: isProdBuild
? "nosources-source-map"
: "eval-cheap-module-source-map",
entry,
node: false,
module: {
rules: [
{
test: /\.m?js$|\.ts$/,
use: (info) => ({
loader: "babel-loader",
options: {
...bundle.babelOptions({
latestBuild,
isProdBuild,
isTestBuild,
sw: info.issuerLayer === "sw",
}),
cacheDirectory: !isProdBuild,
cacheCompression: false,
},
}),
resolve: {
fullySpecified: false,
},
parser: {
worker: ["*context.audioWorklet.addModule()", "..."],
},
},
{
test: /\.css$/,
type: "asset/source",
},
],
},
optimization: {
minimizer: [
new TerserPlugin({
parallel: true,
extractComments: true,
terserOptions: bundle.terserOptions({ latestBuild, isTestBuild }),
}),
],
moduleIds: isProdBuild && !isStatsBuild ? "deterministic" : "named",
chunkIds: isProdBuild && !isStatsBuild ? "deterministic" : "named",
splitChunks: {
// Disable splitting for web workers and worklets because imports of
// external chunks are broken for:
// - ESM output: https://github.com/webpack/webpack/issues/17014
// - Worklets use `importScripts`: https://github.com/webpack/webpack/issues/11543
chunks: (chunk) =>
!chunk.canBeInitial() &&
!new RegExp(`^.+-work${latestBuild ? "(?:let|er)" : "let"}$`).test(
chunk.name
),
},
},
plugins: [
!isStatsBuild && new WebpackBar({ fancy: !isProdBuild }),
new WebpackManifestPlugin({
// Only include the JS of entrypoints
filter: (file) => file.isInitial && !file.name.endsWith(".map"),
}),
new webpack.DefinePlugin(
bundle.definedVars({ isProdBuild, latestBuild, defineOverlay })
),
new webpack.IgnorePlugin({
checkResource(resource, context) {
// Only use ignore to intercept imports that we don't control
// inside node_module dependencies.
if (
!context.includes("/node_modules/") ||
// calling define.amd will call require("!!webpack amd options")
resource.startsWith("!!webpack") ||
// loaded by webpack dev server but doesn't exist.
resource === "webpack/hot"
) {
return false;
}
let fullPath;
try {
fullPath = resource.startsWith(".")
? path.resolve(context, resource)
: require.resolve(resource);
} catch (err) {
console.error(
"Error in Home Assistant ignore plugin",
resource,
context
);
throw err;
}
return ignorePackages.some((toIgnorePath) =>
fullPath.startsWith(toIgnorePath)
);
},
}),
new webpack.NormalModuleReplacementPlugin(
new RegExp(
bundle.emptyPackages({ latestBuild, isHassioBuild }).join("|")
),
path.resolve(paths.polymer_dir, "src/util/empty.js")
),
!isProdBuild && new LogStartCompilePlugin(),
isProdBuild &&
new StatsWriterPlugin({
filename: path.relative(
outputPath,
path.join(paths.build_dir, "stats", `${name}.json`)
),
stats: { assets: true, chunks: true, modules: true },
transform: (stats) => JSON.stringify(filterStats(stats)),
}),
!latestBuild &&
new TransformAsyncModulesPlugin({
browserslistEnv: "legacy",
runtime: { version: dependencies["@babel/runtime"] },
}),
].filter(Boolean),
resolve: {
extensions: [".ts", ".js", ".json"],
alias: {
"lit/static-html$": "lit/static-html.js",
"lit/decorators$": "lit/decorators.js",
"lit/directive$": "lit/directive.js",
"lit/directives/until$": "lit/directives/until.js",
"lit/directives/class-map$": "lit/directives/class-map.js",
"lit/directives/style-map$": "lit/directives/style-map.js",
"lit/directives/if-defined$": "lit/directives/if-defined.js",
"lit/directives/guard$": "lit/directives/guard.js",
"lit/directives/cache$": "lit/directives/cache.js",
"lit/directives/repeat$": "lit/directives/repeat.js",
"lit/directives/live$": "lit/directives/live.js",
"lit/polyfill-support$": "lit/polyfill-support.js",
"@lit-labs/virtualizer/layouts/grid":
"@lit-labs/virtualizer/layouts/grid.js",
"@lit-labs/virtualizer/polyfills/resize-observer-polyfill/ResizeObserver":
"@lit-labs/virtualizer/polyfills/resize-observer-polyfill/ResizeObserver.js",
"@lit-labs/observers/resize-controller":
"@lit-labs/observers/resize-controller.js",
},
},
output: {
module: latestBuild,
filename: ({ chunk }) =>
!isProdBuild || isStatsBuild || dontHash.has(chunk.name)
? "[name].js"
: "[name].[contenthash].js",
chunkFilename:
isProdBuild && !isStatsBuild ? "[name].[contenthash].js" : "[name].js",
assetModuleFilename:
isProdBuild && !isStatsBuild ? "[id].[contenthash][ext]" : "[id][ext]",
crossOriginLoading: "use-credentials",
hashFunction: "xxhash64",
hashDigest: "base64url",
hashDigestLength: 11, // full length of 64 bit base64url
path: outputPath,
publicPath,
// To silence warning in worker plugin
globalObject: "self",
// Since production source maps don't include sources, we need to point to them elsewhere
// For dependencies, just provide the path (no source in browser)
// Otherwise, point to the raw code on GitHub for browser to load
...Object.fromEntries(
["", "Fallback"].map((v) => [
`devtool${v}ModuleFilenameTemplate`,
!isTestBuild && isProdBuild
? (info) => {
if (
!path.isAbsolute(info.absoluteResourcePath) ||
!existsSync(info.resourcePath) ||
info.resourcePath.startsWith("./node_modules")
) {
// Source URLs are unknown for dependencies, so we use a relative URL with a
// non - existent top directory. This results in a clean source tree in browser
// dev tools, and they stay happy getting 404s with valid requests.
return `/unknown${path.resolve("/", info.resourcePath)}`;
}
return new URL(info.resourcePath, bundle.sourceMapURL()).href;
}
: undefined,
])
),
},
experiments: {
layers: true,
outputModule: true,
},
};
};
const createAppConfig = ({
isProdBuild,
latestBuild,
isStatsBuild,
isTestBuild,
}) =>
createWebpackConfig(
bundle.config.app({ isProdBuild, latestBuild, isStatsBuild, isTestBuild })
);
const createDemoConfig = ({ isProdBuild, latestBuild, isStatsBuild }) =>
createWebpackConfig(
bundle.config.demo({ isProdBuild, latestBuild, isStatsBuild })
);
const createCastConfig = ({ isProdBuild, latestBuild }) =>
createWebpackConfig(bundle.config.cast({ isProdBuild, latestBuild }));
const createHassioConfig = ({
isProdBuild,
latestBuild,
isStatsBuild,
isTestBuild,
}) =>
createWebpackConfig(
bundle.config.hassio({
isProdBuild,
latestBuild,
isStatsBuild,
isTestBuild,
})
);
const createGalleryConfig = ({ isProdBuild, latestBuild }) =>
createWebpackConfig(bundle.config.gallery({ isProdBuild, latestBuild }));
module.exports = {
createAppConfig,
createDemoConfig,
createCastConfig,
createHassioConfig,
createGalleryConfig,
createWebpackConfig,
};

159
build-scripts/webpack.js Normal file
View File

@@ -0,0 +1,159 @@
const webpack = require("webpack");
const path = require("path");
const TerserPlugin = require("terser-webpack-plugin");
const ManifestPlugin = require("webpack-manifest-plugin");
const WorkerPlugin = require("worker-plugin");
const paths = require("./paths.js");
const bundle = require("./bundle");
const createWebpackConfig = ({
entry,
outputPath,
publicPath,
defineOverlay,
isProdBuild,
latestBuild,
isStatsBuild,
dontHash,
}) => {
if (!dontHash) {
dontHash = new Set();
}
const ignorePackages = bundle.ignorePackages({ latestBuild });
return {
mode: isProdBuild ? "production" : "development",
devtool: isProdBuild
? "cheap-module-source-map"
: "eval-cheap-module-source-map",
entry,
node: false,
module: {
rules: [
{
test: /\.js$|\.ts$/,
exclude: bundle.babelExclude(),
use: {
loader: "babel-loader",
options: bundle.babelOptions({ latestBuild }),
},
},
{
test: /\.css$/,
use: "raw-loader",
},
],
},
optimization: {
minimizer: [
new TerserPlugin({
cache: true,
parallel: true,
extractComments: true,
sourceMap: true,
terserOptions: bundle.terserOptions(latestBuild),
}),
],
},
plugins: [
new WorkerPlugin(),
new ManifestPlugin({
// Only include the JS of entrypoints
filter: (file) => file.isInitial && !file.name.endsWith(".map"),
}),
new webpack.DefinePlugin(
bundle.definedVars({ isProdBuild, latestBuild, defineOverlay })
),
new webpack.IgnorePlugin({
checkResource(resource, context) {
// Only use ignore to intercept imports that we don't control
// inside node_module dependencies.
if (
!context.includes("/node_modules/") ||
// calling define.amd will call require("!!webpack amd options")
resource.startsWith("!!webpack") ||
// loaded by webpack dev server but doesn't exist.
resource === "webpack/hot"
) {
return false;
}
let fullPath;
try {
fullPath = resource.startsWith(".")
? path.resolve(context, resource)
: require.resolve(resource);
} catch (err) {
console.error(
"Error in Home Assistant ignore plugin",
resource,
context
);
throw err;
}
return ignorePackages.some((toIgnorePath) =>
fullPath.startsWith(toIgnorePath)
);
},
}),
new webpack.NormalModuleReplacementPlugin(
new RegExp(bundle.emptyPackages({ latestBuild }).join("|")),
path.resolve(paths.polymer_dir, "src/util/empty.js")
),
],
resolve: {
extensions: [".ts", ".js", ".json"],
},
output: {
filename: ({ chunk }) => {
if (!isProdBuild || dontHash.has(chunk.name)) {
return `${chunk.name}.js`;
}
return `${chunk.name}.${chunk.hash.substr(0, 8)}.js`;
},
chunkFilename:
isProdBuild && !isStatsBuild
? "chunk.[chunkhash].js"
: "[name].chunk.js",
path: outputPath,
publicPath,
// To silence warning in worker plugin
globalObject: "self",
},
};
};
const createAppConfig = ({ isProdBuild, latestBuild, isStatsBuild }) => {
return createWebpackConfig(
bundle.config.app({ isProdBuild, latestBuild, isStatsBuild })
);
};
const createDemoConfig = ({ isProdBuild, latestBuild, isStatsBuild }) => {
return createWebpackConfig(
bundle.config.demo({ isProdBuild, latestBuild, isStatsBuild })
);
};
const createCastConfig = ({ isProdBuild, latestBuild }) => {
return createWebpackConfig(bundle.config.cast({ isProdBuild, latestBuild }));
};
const createHassioConfig = ({ isProdBuild, latestBuild }) => {
return createWebpackConfig(
bundle.config.hassio({ isProdBuild, latestBuild })
);
};
const createGalleryConfig = ({ isProdBuild, latestBuild }) => {
return createWebpackConfig(
bundle.config.gallery({ isProdBuild, latestBuild })
);
};
module.exports = {
createAppConfig,
createDemoConfig,
createCastConfig,
createHassioConfig,
createGalleryConfig,
};

View File

@@ -1,9 +0,0 @@
# These redirects are handled by Netlify
#
# Some custom cards are not prefixing the instance URL when fetching data
# and can end up fetching the data from the Cast domain instead of HA.
# This will make sure that some common ones are replaced with a placeholder.
/api/camera_proxy/* /images/google-nest-hub.png
/api/camera_proxy_stream/* /images/google-nest-hub.png
/api/media_player_proxy/* /images/google-nest-hub.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 186 KiB

After

Width:  |  Height:  |  Size: 186 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

After

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.8 KiB

View File

@@ -0,0 +1,3 @@
self.addEventListener("fetch", function(event) {
event.respondWith(fetch(event.request));
});

View File

@@ -1,5 +0,0 @@
"use strict";
self.addEventListener("fetch", (event) => {
event.respondWith(fetch(event.request));
});

View File

@@ -1,3 +0,0 @@
self.addEventListener("fetch", (event) => {
event.respondWith(fetch(event.request));
});

View File

@@ -1,5 +1,5 @@
import rollup from "../build-scripts/rollup.cjs";
import env from "../build-scripts/env.cjs";
const rollup = require("../build-scripts/rollup.js");
const env = require("../build-scripts/env.js");
const config = rollup.createCastConfig({
isProdBuild: env.isProdBuild(),
@@ -7,4 +7,4 @@ const config = rollup.createCastConfig({
isStatsBuild: env.isStatsBuild(),
});
export default { ...config.inputOptions, output: config.outputOptions };
module.exports = { ...config.inputOptions, output: config.outputOptions };

View File

@@ -1,24 +0,0 @@
<meta property="fb:app_id" content="338291289691179" />
<meta property="og:title" content="Home Assistant Cast" />
<meta property="og:site_name" content="Home Assistant Cast" />
<meta property="og:url" content="https://cast.home-assistant.io/" />
<meta property="og:type" content="website" />
<meta
property="og:description"
content="Show Home Assistant on your Chromecast or Google Assistant devices with a screen."
/>
<meta
property="og:image"
content="https://cast.home-assistant.io/images/google-nest-hub.png"
/>
<meta name="twitter:card" content="summary_large_image" />
<meta name="twitter:site" content="@home_assistant" />
<meta name="twitter:title" content="Home Assistant Cast" />
<meta
name="twitter:description"
content="Show Home Assistant on your Chromecast or Google Assistant devices with a screen."
/>
<meta
name="twitter:image"
content="https://cast.home-assistant.io/images/google-nest-hub.png"
/>

View File

@@ -1,29 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>Home Assistant Cast</title>
<link rel="manifest" href="/manifest.json" />
<link rel="icon" href="/images/ha-cast-icon.png" type="image/png" />
<%= renderTemplate("../../../src/html/_style_base.html.template") %>
<style>
body {
background-color: #e5e5e5;
}
</style>
<%= renderTemplate("_social_meta.html.template") %>
</head>
<body>
<hc-connect></hc-connect>
<%= renderTemplate("../../../src/html/_js_base.html.template") %>
<%= renderTemplate("../../../src/html/_script_loader.html.template") %>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-57927901-9', 'auto');
ga('send', 'pageview', location.pathname.includes("auth_callback") === -1 ? location.pathname : "/");
</script>
</body>
</html>

View File

@@ -3,7 +3,7 @@
<head>
<title>Home Assistant Cast - FAQ</title>
<link rel="icon" href="/images/ha-cast-icon.png" type="image/png" />
<%= renderTemplate("../../../src/html/_style_base.html.template") %>
<%= renderTemplate('_style_base') %>
<style>
body {
background-color: #e5e5e5;
@@ -35,8 +35,27 @@
/>
</head>
<body>
<%= renderTemplate("../../../src/html/_js_base.html.template") %>
<%= renderTemplate("../../../src/html/_script_loader.html.template") %>
<%= renderTemplate('_js_base') %>
<script type="module" crossorigin="use-credentials">
import "<%= latestLauncherJS %>";
</script>
<script nomodule>
(function() {
// // Safari 10.1 supports type=module but ignores nomodule, so we add this check.
if (!isS101) {
<% if (useRollup) { %>
_ls("/static/js/s.min.js").onload = function() {
System.import("<%= es5LauncherJS %>");
};
<% } else { %>
_ls("<%= es5LauncherJS %>");
<% } %>
}
})();
</script>
<hc-layout subtitle="FAQ">
<style>
a {
@@ -122,7 +141,7 @@
Your authentication credentials or Home Assistant url are never sent
to the Cloud. You can validate this behavior in
<a
href="https://github.com/home-assistant/frontend/tree/dev/cast"
href="https://github.com/home-assistant/home-assistant-polymer/tree/dev/cast"
target="_blank"
>the source code</a
>.
@@ -139,7 +158,7 @@
</p>
</div>
<div class="section-header">What does Home Assistant Cast do?</div>
<div class="section-header">Wat does Home Assistant Cast do?</div>
<div class="card-content">
<p>
Home Assistant Cast is a receiver application for the Chromecast. When
@@ -195,8 +214,13 @@
Chromecast is a technology developed by Google, and is available on:
</p>
<ul>
<li>Google Chrome (all platforms except iOS)</li>
<li>Microsoft Edge (all platforms except iOS)</li>
<li>Google Chrome (all platforms except on iOS)</li>
<li>
Microsoft Edge (all platforms,
<a href="https://www.microsoftedgeinsider.com" target="_blank"
>dev and canary builds only</a
>)
</li>
</ul>
</div>
@@ -226,5 +250,17 @@ http:
</p>
</div>
</hc-layout>
<script>
var _gaq = [["_setAccount", "UA-57927901-9"], ["_trackPageview"]];
(function(d, t) {
var g = d.createElement(t),
s = d.getElementsByTagName(t)[0];
g.src =
("https:" == location.protocol ? "//ssl" : "//www") +
".google-analytics.com/ga.js";
s.parentNode.insertBefore(g, s);
})(document, "script");
</script>
</body>
</html>

Some files were not shown because too many files have changed in this diff Show More