mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-20 14:39:21 +00:00
Compare commits
122 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
49ca923e51 | ||
![]() |
7ad22e0399 | ||
![]() |
bb8acc6065 | ||
![]() |
c0fa4a19e9 | ||
![]() |
3f1741dd18 | ||
![]() |
9ef02e4110 | ||
![]() |
15a6f38ebb | ||
![]() |
227f2e5a21 | ||
![]() |
517d6ee981 | ||
![]() |
184eeb7f49 | ||
![]() |
a3555c74e8 | ||
![]() |
657bafd458 | ||
![]() |
2ad5df420c | ||
![]() |
b24d489ec5 | ||
![]() |
6bb0210f1f | ||
![]() |
c1de50266a | ||
![]() |
562e02bc64 | ||
![]() |
f71ec7913a | ||
![]() |
d98baaf660 | ||
![]() |
72db591576 | ||
![]() |
509a37fc04 | ||
![]() |
17f62b6e86 | ||
![]() |
b09aee7644 | ||
![]() |
babcc0de0c | ||
![]() |
5cc47c9222 | ||
![]() |
833559a3b3 | ||
![]() |
b8a976b344 | ||
![]() |
10b14132b9 | ||
![]() |
18953f0b7c | ||
![]() |
19f5fba3aa | ||
![]() |
636bc3e61a | ||
![]() |
521037e1a6 | ||
![]() |
e024c3e38d | ||
![]() |
6a0206c1e7 | ||
![]() |
69a8a83528 | ||
![]() |
0307d700fa | ||
![]() |
919c383b41 | ||
![]() |
d331af4d5a | ||
![]() |
b5467d3c23 | ||
![]() |
3ef0040d66 | ||
![]() |
ec4dfd2172 | ||
![]() |
8f54d7c8e9 | ||
![]() |
b59e709dc0 | ||
![]() |
b236e6c886 | ||
![]() |
8acbb7d6f0 | ||
![]() |
49e4bc9381 | ||
![]() |
36106cc08d | ||
![]() |
8f1763abe2 | ||
![]() |
480eebc6cb | ||
![]() |
dccfffd979 | ||
![]() |
1a978f4762 | ||
![]() |
1fbc8f4060 | ||
![]() |
db3fc1421c | ||
![]() |
9ecd03db0e | ||
![]() |
f111ccb1b6 | ||
![]() |
a32341cc5d | ||
![]() |
f73e277230 | ||
![]() |
8d8587ca29 | ||
![]() |
88eb9511bf | ||
![]() |
e1068997ea | ||
![]() |
560e04c64a | ||
![]() |
621ec03971 | ||
![]() |
d14a47d3f7 | ||
![]() |
3e9de0c210 | ||
![]() |
01a6e074a5 | ||
![]() |
1434077f4e | ||
![]() |
3922175af1 | ||
![]() |
ec6852a8d7 | ||
![]() |
b0b908b4ae | ||
![]() |
5a00336ef1 | ||
![]() |
d53f5e21f4 | ||
![]() |
bd173fa333 | ||
![]() |
6b32fa31b6 | ||
![]() |
d1dba89e39 | ||
![]() |
b2f2806465 | ||
![]() |
3b70cd58a3 | ||
![]() |
6769bfd824 | ||
![]() |
bff4af2534 | ||
![]() |
aabf575ac5 | ||
![]() |
4aacaf6bd6 | ||
![]() |
268070e89c | ||
![]() |
4fa2134cc6 | ||
![]() |
97c35de49a | ||
![]() |
32fb550969 | ||
![]() |
3457441929 | ||
![]() |
32f0fc7a46 | ||
![]() |
c891a8f164 | ||
![]() |
991764af94 | ||
![]() |
1df447272e | ||
![]() |
f032ae757b | ||
![]() |
e529b2859e | ||
![]() |
d1cb2368fa | ||
![]() |
7b812184d1 | ||
![]() |
4f7ce1c6ee | ||
![]() |
44a5ac63db | ||
![]() |
0989ee88cc | ||
![]() |
ba8b72c2c8 | ||
![]() |
2dbb4583f4 | ||
![]() |
81ad42e029 | ||
![]() |
5d3695f8ba | ||
![]() |
c771694aa0 | ||
![]() |
1ac0fd4c10 | ||
![]() |
631ff8caef | ||
![]() |
7382182132 | ||
![]() |
4ff9da68ef | ||
![]() |
dee2998ee3 | ||
![]() |
1d43236211 | ||
![]() |
792bc610a3 | ||
![]() |
7a51c828c2 | ||
![]() |
fab6fcd5ac | ||
![]() |
c8e00ba160 | ||
![]() |
6245b6d823 | ||
![]() |
0c55bf20fc | ||
![]() |
f8fd7b5933 | ||
![]() |
6462eea2ef | ||
![]() |
3d79891249 | ||
![]() |
80763c4bbf | ||
![]() |
59102afd45 | ||
![]() |
0b085354db | ||
![]() |
e2a473baa3 | ||
![]() |
06e10fdd3c | ||
![]() |
fb4386a7ad |
@@ -2,6 +2,11 @@ FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Set Docker daemon config
|
||||
RUN \
|
||||
mkdir -p /etc/docker \
|
||||
&& echo '{"storage-driver": "vfs"}' > /etc/docker/daemon.json
|
||||
|
||||
# Install Node/Yarn for Frontent
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
|
@@ -5,6 +5,7 @@
|
||||
"appPort": "9123:8123",
|
||||
"postCreateCommand": "pre-commit install",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"containerEnv": {"NVM_DIR":"/usr/local/share/nvm"},
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
|
22
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
22
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Report a bug with the Supervisor
|
||||
name: Report a bug with the Supervisor on a supported System
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
---
|
||||
@@ -11,6 +11,10 @@ labels: bug
|
||||
- If you have a problem with an add-on, make an issue in it's repository.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Important: You can only fill a bug repport for an supported system! If you run an unsupported installation. This report would be closed without comment.
|
||||
-->
|
||||
|
||||
### Describe the issue
|
||||
|
||||
<!-- Provide as many details as possible. -->
|
||||
@@ -47,3 +51,19 @@ Paste supervisor logs here
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### System Information
|
||||
|
||||
<details>
|
||||
<summary>System Information</summary>
|
||||
<!--
|
||||
- Use this command: ha info
|
||||
-->
|
||||
|
||||
```
|
||||
Paste system info here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
|
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1,9 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Report a bug/issues with an unsupported Supervisor
|
||||
url: https://community.home-assistant.io
|
||||
about: The Community guide can help or was updated to solve your issue
|
||||
|
||||
- name: Report a bug for the Supervisor panel
|
||||
url: https://github.com/home-assistant/frontend/issues
|
||||
about: The Supervisor panel is a part of the Home Assistant frontend
|
||||
|
80
.github/ISSUE_TEMPLATE/z_bug_report_form.yml
vendored
Normal file
80
.github/ISSUE_TEMPLATE/z_bug_report_form.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
name: Bug Report Form
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
title: ""
|
||||
issue_body: true
|
||||
inputs:
|
||||
- type: description
|
||||
attributes:
|
||||
value: |
|
||||
This issue form is for reporting bugs with **supported** setups only!
|
||||
|
||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
||||
[fr]: https://community.home-assistant.io/c/feature-requests
|
||||
- type: input
|
||||
attributes:
|
||||
label: What is the version of the Supervisor used?
|
||||
required: true
|
||||
placeholder: supervisor-
|
||||
description: >
|
||||
Can be found in the Supervisor panel -> System tab. Starts with
|
||||
`supervisor-....`.
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: What type of installation are you running?
|
||||
required: true
|
||||
description: >
|
||||
If you don't know, you can find it in: Configuration panel -> Info.
|
||||
choices:
|
||||
- Home Assistant OS
|
||||
- Home Assistant Supervised
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Which operating system are you running on?
|
||||
required: true
|
||||
choices:
|
||||
- Home Assistant Operating System
|
||||
- Debian
|
||||
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
|
||||
- type: input
|
||||
attributes:
|
||||
label: What is the version of your installed operating system?
|
||||
required: true
|
||||
placeholder: 5.10
|
||||
description: Can be found in the Supervisor panel -> System tab.
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Home Assistant Core is installed?
|
||||
required: true
|
||||
placeholder: core-
|
||||
description: >
|
||||
Can be found in the Supervisor panel -> System tab. Starts with
|
||||
`core-....`.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the issue you are experiencing
|
||||
required: true
|
||||
description: Provide a clear and concise description of what the bug is.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
required: true
|
||||
description: |
|
||||
Please tell us exactly how to reproduce your issue.
|
||||
Provide clear and concise step by step instructions and add code snippets if needed.
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
...
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything in the Supervisor logs that might be useful for us?
|
||||
required: false
|
||||
description: >
|
||||
The Supervisor logs can be found in the Supervisor panel -> System tab.
|
||||
- type: description
|
||||
attributes:
|
||||
value: |
|
||||
If you have any additional information for us, use the field below.
|
||||
Please note, you can attach screenshots or screen recordings here.
|
47
.github/release-drafter.yml
vendored
47
.github/release-drafter.yml
vendored
@@ -1,4 +1,49 @@
|
||||
change-template: "- #$NUMBER $TITLE @$AUTHOR"
|
||||
sort-direction: ascending
|
||||
|
||||
categories:
|
||||
- title: ":boom: Breaking Changes"
|
||||
label: "breaking-change"
|
||||
|
||||
- title: ":wrench: Build"
|
||||
label: "build"
|
||||
|
||||
- title: ":boar: Chore"
|
||||
label: "chore"
|
||||
|
||||
- title: ":sparkles: New Features"
|
||||
label: "new-feature"
|
||||
|
||||
- title: ":zap: Performance"
|
||||
label: "performance"
|
||||
|
||||
- title: ":recycle: Refactor"
|
||||
label: "refactor"
|
||||
|
||||
- title: ":green_heart: CI"
|
||||
label: "ci"
|
||||
|
||||
- title: ":bug: Bug Fixes"
|
||||
label: "bugfix"
|
||||
|
||||
- title: ":white_check_mark: Test"
|
||||
label: "test"
|
||||
|
||||
- title: ":arrow_up: Dependency Updates"
|
||||
label: "dependencies"
|
||||
|
||||
include-labels:
|
||||
- "breaking-change"
|
||||
- "build"
|
||||
- "chore"
|
||||
- "performance"
|
||||
- "refactor"
|
||||
- "new-feature"
|
||||
- "bugfix"
|
||||
- "dependencies"
|
||||
- "test"
|
||||
- "ci"
|
||||
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
$CHANGES
|
||||
|
31
.github/workflows/builder.yml
vendored
31
.github/workflows/builder.yml
vendored
@@ -35,6 +35,7 @@ on:
|
||||
env:
|
||||
BUILD_NAME: supervisor
|
||||
BUILD_TYPE: supervisor
|
||||
WHEELS_TAG: 3.8-alpine3.12
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -45,6 +46,7 @@ jobs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
@@ -61,6 +63,18 @@ jobs:
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
if: steps.version.outputs.publish == 'false'
|
||||
uses: jitterbit/get-changed-files@v1
|
||||
|
||||
- name: Check if requirements files changed
|
||||
id: requirements
|
||||
run: |
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ requirements.txt ]]; then
|
||||
echo "::set-output name=changed::true"
|
||||
fi
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.arch }} supervisor
|
||||
needs: init
|
||||
@@ -74,6 +88,19 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@master
|
||||
with:
|
||||
tag: ${{ env.WHEELS_TAG }}
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-host: ${{ secrets.WHEELS_HOST }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
wheels-user: wheels
|
||||
apk: "build-base;libffi-dev;openssl-dev"
|
||||
skip-binary: aiohttp
|
||||
requirements: "requirements.txt"
|
||||
|
||||
- name: Set version
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
@@ -92,7 +119,7 @@ jobs:
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2020.11.0
|
||||
uses: home-assistant/builder@2021.01.1
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -134,7 +161,7 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build the Supervisor
|
||||
uses: home-assistant/builder@2020.11.0
|
||||
uses: home-assistant/builder@2021.01.1
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
|
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Check PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
types: [labeled, unlabeled, synchronize]
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Check labels
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check labels
|
||||
run: |
|
||||
labels=$(jq -r '.pull_request.labels[] | .name' ${{github.event_path }})
|
||||
echo "$labels"
|
||||
if [ "$labels" == "cla-signed" ]; then
|
||||
exit 1
|
||||
fi
|
24
.github/workflows/ci.yaml
vendored
24
.github/workflows/ci.yaml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -112,7 +112,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -156,7 +156,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -188,7 +188,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -229,7 +229,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -273,7 +273,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -305,7 +305,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -349,7 +349,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@@ -390,7 +390,7 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v2.2.1
|
||||
uses: actions/upload-artifact@v2.2.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
@@ -403,7 +403,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -428,4 +428,4 @@ jobs:
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1.0.15
|
||||
uses: codecov/codecov-action@v1.2.1
|
||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v2.0.1
|
||||
- uses: dessant/lock-threads@v2.0.3
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-lock-inactive-days: "30"
|
||||
|
33
.github/workflows/release-drafter.yml
vendored
33
.github/workflows/release-drafter.yml
vendored
@@ -2,14 +2,43 @@ name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Find Next Version
|
||||
id: version
|
||||
run: |
|
||||
declare -i newpost
|
||||
latest=$(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
latestpre=$(echo "$latest" | awk '{split($0,a,"."); print a[1] "." a[2]}')
|
||||
datepre=$(date --utc '+%Y.%m')
|
||||
|
||||
|
||||
if [[ "$latestpre" == "$datepre" ]]; then
|
||||
latestpost=$(echo "$latest" | awk '{split($0,a,"."); print a[3]}')
|
||||
newpost=$latestpost+1
|
||||
else
|
||||
newpost=0
|
||||
fi
|
||||
|
||||
echo Current version: $latest
|
||||
echo New target version: $datepre.$newpost
|
||||
echo "::set-output name=version::$datepre.$newpost"
|
||||
|
||||
- name: Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v3.0.14
|
||||
- uses: actions/stale@v3.0.15
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
|
22
.vscode/tasks.json
vendored
22
.vscode/tasks.json
vendored
@@ -2,9 +2,9 @@
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"label": "Run Supervisor",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"command": "./scripts/run-supervisor.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
@@ -16,7 +16,21 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"label": "Build Supervisor",
|
||||
"type": "shell",
|
||||
"command": "./scripts/build-supervisor.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Supervisor CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
@@ -30,7 +44,7 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"label": "Update Supervisor Panel",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
|
29
README.md
29
README.md
@@ -10,26 +10,23 @@ network settings or installing and updating software.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at https://home-assistant.io/hassio.
|
||||
Installation instructions can be found at https://home-assistant.io/getting-started.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the Supervisor is not difficult but tricky.
|
||||
|
||||
- You can use the builder to create your Supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Access a HassOS device or VM and pull your Supervisor.
|
||||
- Set the developer modus with the CLI tool: `ha supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service with `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
||||
For small bugfixes or improvements, make a PR. For significant changes open a RFC first, please. Thanks.
|
||||
For small changes and bugfixes you can just follow this, but for significant changes open a RFC first.
|
||||
Development instructions can be found [here][development].
|
||||
|
||||
## Release
|
||||
|
||||
Follow is the relase circle process:
|
||||
Releases are done in 3 stages (channels) with this structure:
|
||||
|
||||
1. Merge master into dev / make sure version stay on dev
|
||||
2. Merge dev into master
|
||||
3. Bump the release on master
|
||||
4. Create a GitHub Release from master with the right version tag
|
||||
1. Pull requests are merged to the `main` branch.
|
||||
2. A new build is pushed to the `dev` stage.
|
||||
3. Releases are published.
|
||||
4. A new build is pushed to the `beta` stage.
|
||||
5. The [`stable.json`][stable] file is updated.
|
||||
6. The build that was pushed to `beta` will now be pushed to `stable`.
|
||||
|
||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||
|
@@ -1,26 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: "1.13.0-3.8-alpine3.12"
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: "home-assistant/ci-azure"
|
||||
endpoint: "home-assistant"
|
||||
|
||||
jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: "$(versionWheels)"
|
||||
builderApk: "build-base;libffi-dev;openssl-dev"
|
||||
builderPip: "Cython"
|
||||
skipBinary: "aiohttp"
|
||||
wheelsRequirement: "requirements.txt"
|
Submodule home-assistant-polymer updated: 1d367eca69...a9192ae2e1
@@ -2,19 +2,19 @@ aiohttp==3.7.3
|
||||
async_timeout==3.0.1
|
||||
atomicwrites==1.4.0
|
||||
attrs==20.3.0
|
||||
awesomeversion==21.2.0
|
||||
brotli==1.0.9
|
||||
cchardet==2.1.7
|
||||
colorlog==4.6.2
|
||||
colorlog==4.7.2
|
||||
cpe==1.2.1
|
||||
cryptography==3.2.1
|
||||
debugpy==1.2.0
|
||||
docker==4.4.0
|
||||
gitpython==3.1.11
|
||||
jinja2==2.11.2
|
||||
packaging==20.4
|
||||
cryptography==3.3.1
|
||||
debugpy==1.2.1
|
||||
docker==4.4.1
|
||||
gitpython==3.1.12
|
||||
jinja2==2.11.3
|
||||
pulsectl==20.5.1
|
||||
pytz==2020.4
|
||||
pytz==2021.1
|
||||
pyudev==0.22.0
|
||||
ruamel.yaml==0.15.100
|
||||
sentry-sdk==0.19.4
|
||||
voluptuous==0.12.0
|
||||
sentry-sdk==0.19.5
|
||||
voluptuous==0.12.1
|
||||
|
@@ -1,14 +1,14 @@
|
||||
black==20.8b1
|
||||
codecov==2.1.10
|
||||
coverage==5.3
|
||||
codecov==2.1.11
|
||||
coverage==5.4
|
||||
flake8-docstrings==1.5.0
|
||||
flake8==3.8.4
|
||||
pre-commit==2.9.2
|
||||
pre-commit==2.10.0
|
||||
pydocstyle==5.1.1
|
||||
pylint==2.6.0
|
||||
pytest-aiohttp==0.3.0
|
||||
pytest-asyncio==0.12.0 # NB!: Versions over 0.12.0 breaks pytest-aiohttp (https://github.com/aio-libs/pytest-aiohttp/issues/16)
|
||||
pytest-cov==2.10.1
|
||||
pytest-cov==2.11.1
|
||||
pytest-timeout==1.4.2
|
||||
pytest==6.1.2
|
||||
pyupgrade==2.7.4
|
||||
pytest==6.2.2
|
||||
pyupgrade==2.9.0
|
||||
|
@@ -2,9 +2,17 @@
|
||||
# ==============================================================================
|
||||
# Start udev service
|
||||
# ==============================================================================
|
||||
|
||||
if bashio::fs.directory_exists /run/udev && ! bashio::fs.file_exists /run/.old_udev; then
|
||||
bashio::log.info "Using udev information from host"
|
||||
bashio::exit.ok
|
||||
fi
|
||||
|
||||
bashio::log.info "Setup udev backend inside container"
|
||||
udevd --daemon
|
||||
|
||||
bashio::log.info "Update udev information"
|
||||
touch /run/.old_udev
|
||||
if udevadm trigger; then
|
||||
udevadm settle || true
|
||||
else
|
||||
|
28
scripts/build-supervisor.sh
Executable file
28
scripts/build-supervisor.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
source "${BASH_SOURCE[0]%/*}/common.sh"
|
||||
|
||||
set -eE
|
||||
|
||||
DOCKER_TIMEOUT=30
|
||||
DOCKER_PID=0
|
||||
|
||||
function build_supervisor() {
|
||||
docker pull homeassistant/amd64-builder:dev
|
||||
|
||||
docker run --rm \
|
||||
--privileged \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v "$(pwd):/data" \
|
||||
homeassistant/amd64-builder:dev \
|
||||
--generic latest \
|
||||
--target /data \
|
||||
--test \
|
||||
--amd64 \
|
||||
--no-cache
|
||||
}
|
||||
|
||||
echo "Build Supervisor"
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
build_supervisor
|
58
scripts/common.sh
Normal file
58
scripts/common.sh
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
|
||||
function start_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Starting docker."
|
||||
dockerd 2> /dev/null &
|
||||
DOCKER_PID=$!
|
||||
|
||||
echo "Waiting for docker to initialize..."
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
until docker info >/dev/null 2>&1; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for docker to come up"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "Docker was initialized"
|
||||
}
|
||||
|
||||
function stop_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Stopping in container docker..."
|
||||
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
|
||||
# Now wait for it to die
|
||||
kill "$DOCKER_PID"
|
||||
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for container docker to die"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "Your host might have been left with unreleased resources"
|
||||
fi
|
||||
}
|
||||
|
||||
function cleanup_lastboot() {
|
||||
if [[ -f /workspaces/test_supervisor/config.json ]]; then
|
||||
echo "Cleaning up last boot"
|
||||
cp /workspaces/test_supervisor/config.json /tmp/config.json
|
||||
jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json
|
||||
rm /tmp/config.json
|
||||
fi
|
||||
}
|
102
scripts/run-supervisor.sh
Executable file
102
scripts/run-supervisor.sh
Executable file
@@ -0,0 +1,102 @@
|
||||
#!/bin/bash
|
||||
source "${BASH_SOURCE[0]%/*}/common.sh"
|
||||
source "${BASH_SOURCE[0]%/*}/build-supervisor.sh"
|
||||
|
||||
set -eE
|
||||
|
||||
DOCKER_TIMEOUT=30
|
||||
DOCKER_PID=0
|
||||
|
||||
|
||||
function cleanup_docker() {
|
||||
echo "Cleaning up stopped containers..."
|
||||
docker rm $(docker ps -a -q) || true
|
||||
}
|
||||
|
||||
|
||||
function run_supervisor() {
|
||||
mkdir -p /workspaces/test_supervisor
|
||||
|
||||
echo "Start Supervisor"
|
||||
docker run --rm --privileged \
|
||||
--name hassio_supervisor \
|
||||
--privileged \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
-v /run/dbus:/run/dbus:ro \
|
||||
-v /run/udev:/run/udev:ro \
|
||||
-v "/workspaces/test_supervisor":/data:rw \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-v /workspaces/supervisor:/usr/src/supervisor \
|
||||
-e SUPERVISOR_SHARE="/workspaces/test_supervisor" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||
homeassistant/amd64-hassio-supervisor:latest
|
||||
|
||||
}
|
||||
|
||||
|
||||
function init_dbus() {
|
||||
if pgrep dbus-daemon; then
|
||||
echo "Dbus is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup dbus"
|
||||
mkdir -p /var/lib/dbus
|
||||
cp -f /etc/machine-id /var/lib/dbus/machine-id
|
||||
|
||||
# cleanups
|
||||
mkdir -p /run/dbus
|
||||
rm -f /run/dbus/pid
|
||||
|
||||
# run
|
||||
dbus-daemon --system --print-address
|
||||
}
|
||||
|
||||
|
||||
function init_udev() {
|
||||
if pgrep systemd-udevd; then
|
||||
echo "udev is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup udev"
|
||||
|
||||
# cleanups
|
||||
mkdir -p /run/udev
|
||||
|
||||
# run
|
||||
/lib/systemd/systemd-udevd --daemon
|
||||
sleep 3
|
||||
udevadm trigger && udevadm settle
|
||||
}
|
||||
|
||||
echo "Run Supervisor"
|
||||
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
|
||||
if [ "$( docker container inspect -f '{{.State.Status}}' hassio_supervisor )" == "running" ]; then
|
||||
echo "Restarting Supervisor"
|
||||
docker rm -f hassio_supervisor
|
||||
init_dbus
|
||||
init_udev
|
||||
cleanup_lastboot
|
||||
run_supervisor
|
||||
stop_docker
|
||||
|
||||
else
|
||||
echo "Starting Supervisor"
|
||||
docker system prune -f
|
||||
build_supervisor
|
||||
cleanup_lastboot
|
||||
cleanup_docker
|
||||
init_dbus
|
||||
init_udev
|
||||
run_supervisor
|
||||
stop_docker
|
||||
fi
|
@@ -1,135 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -eE
|
||||
|
||||
DOCKER_TIMEOUT=30
|
||||
DOCKER_PID=0
|
||||
|
||||
|
||||
function start_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Starting docker."
|
||||
dockerd 2> /dev/null &
|
||||
DOCKER_PID=$!
|
||||
|
||||
echo "Waiting for docker to initialize..."
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
until docker info >/dev/null 2>&1; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for docker to come up"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "Docker was initialized"
|
||||
}
|
||||
|
||||
|
||||
function stop_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Stopping in container docker..."
|
||||
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
|
||||
# Now wait for it to die
|
||||
kill "$DOCKER_PID"
|
||||
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for container docker to die"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "Your host might have been left with unreleased resources"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function build_supervisor() {
|
||||
docker pull homeassistant/amd64-builder:dev
|
||||
|
||||
docker run --rm --privileged \
|
||||
-v /run/docker.sock:/run/docker.sock -v "$(pwd):/data" \
|
||||
homeassistant/amd64-builder:dev \
|
||||
--generic dev -t /data --test --amd64 --no-cache
|
||||
}
|
||||
|
||||
|
||||
function cleanup_lastboot() {
|
||||
if [[ -f /workspaces/test_supervisor/config.json ]]; then
|
||||
echo "Cleaning up last boot"
|
||||
cp /workspaces/test_supervisor/config.json /tmp/config.json
|
||||
jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json
|
||||
rm /tmp/config.json
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function cleanup_docker() {
|
||||
echo "Cleaning up stopped containers..."
|
||||
docker rm $(docker ps -a -q) || true
|
||||
}
|
||||
|
||||
|
||||
function setup_test_env() {
|
||||
mkdir -p /workspaces/test_supervisor
|
||||
|
||||
echo "Start Supervisor"
|
||||
docker run --rm --privileged \
|
||||
--name hassio_supervisor \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v /run/dbus:/run/dbus \
|
||||
-v "/workspaces/test_supervisor":/data \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-e SUPERVISOR_SHARE="/workspaces/test_supervisor" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||
homeassistant/amd64-hassio-supervisor:latest
|
||||
|
||||
}
|
||||
|
||||
|
||||
function init_dbus() {
|
||||
if pgrep dbus-daemon; then
|
||||
echo "Dbus is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup dbus"
|
||||
mkdir -p /var/lib/dbus
|
||||
cp -f /etc/machine-id /var/lib/dbus/machine-id
|
||||
|
||||
# cleanups
|
||||
mkdir -p /run/dbus
|
||||
rm -f /run/dbus/pid
|
||||
|
||||
# run
|
||||
dbus-daemon --system --print-address
|
||||
}
|
||||
|
||||
echo "Start Test-Env"
|
||||
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
docker system prune -f
|
||||
|
||||
build_supervisor
|
||||
cleanup_lastboot
|
||||
cleanup_docker
|
||||
init_dbus
|
||||
setup_test_env
|
||||
stop_docker
|
@@ -10,6 +10,7 @@ from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerAPIError,
|
||||
@@ -147,7 +148,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
@@ -248,7 +250,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(self, slug: str) -> None:
|
||||
"""Update add-on."""
|
||||
@@ -297,7 +300,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
@@ -339,7 +343,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
|
@@ -10,7 +10,7 @@ import secrets
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
from typing import Any, Awaitable, Dict, List, Optional, Set
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
@@ -55,13 +55,15 @@ from ..exceptions import (
|
||||
HostAppArmorError,
|
||||
JsonFileError,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from ..utils import check_port
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from ..utils.tar import atomic_contents_add, secure_path
|
||||
from .model import AddonModel, Data
|
||||
from .options import AddonOptions
|
||||
from .utils import remove_data
|
||||
from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options
|
||||
from .validate import SCHEMA_ADDON_SNAPSHOT
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -101,7 +103,7 @@ class Addon(AddonModel):
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
with suppress(DockerError):
|
||||
await self.instance.attach(tag=self.version)
|
||||
await self.instance.attach(version=self.version)
|
||||
|
||||
# Evaluate state
|
||||
if await self.instance.is_running():
|
||||
@@ -394,6 +396,20 @@ class Addon(AddonModel):
|
||||
"""Return path to asound config for Docker."""
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_pulse")
|
||||
|
||||
@property
|
||||
def devices(self) -> Set[Device]:
|
||||
"""Create a schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
if isinstance(raw_schema, bool) or not raw_schema:
|
||||
return set()
|
||||
|
||||
# Validate devices
|
||||
options_validator = AddonOptions(self.coresys, raw_schema)
|
||||
with suppress(vol.Invalid):
|
||||
options_validator(self.options)
|
||||
|
||||
return options_validator.devices
|
||||
|
||||
def save_persist(self) -> None:
|
||||
"""Save data of add-on."""
|
||||
self.sys_addons.data.save_data()
|
||||
@@ -442,20 +458,17 @@ class Addon(AddonModel):
|
||||
|
||||
async def write_options(self) -> None:
|
||||
"""Return True if add-on options is written to data."""
|
||||
schema = self.schema
|
||||
options = self.options
|
||||
|
||||
# Update secrets for validation
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
|
||||
try:
|
||||
options = schema(options)
|
||||
options = self.schema(self.options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
"Add-on %s has invalid options: %s",
|
||||
self.slug,
|
||||
humanize_error(options, ex),
|
||||
humanize_error(self.options, ex),
|
||||
)
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Add-on %s can't write options", self.slug)
|
||||
@@ -538,7 +551,7 @@ class Addon(AddonModel):
|
||||
|
||||
# create voluptuous
|
||||
new_schema = vol.Schema(
|
||||
vol.All(dict, validate_options(self.coresys, new_raw_schema))
|
||||
vol.All(dict, AddonOptions(self.coresys, new_raw_schema))
|
||||
)
|
||||
|
||||
# validate
|
||||
@@ -581,7 +594,7 @@ class Addon(AddonModel):
|
||||
async def stop(self) -> None:
|
||||
"""Stop add-on."""
|
||||
try:
|
||||
return await self.instance.stop()
|
||||
await self.instance.stop()
|
||||
except DockerRequestError as err:
|
||||
raise AddonsError() from err
|
||||
except DockerError as err:
|
||||
|
@@ -4,6 +4,8 @@ from __future__ import annotations
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
@@ -46,11 +48,21 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Return additional Docker build arguments."""
|
||||
return self._data[ATTR_ARGS]
|
||||
|
||||
def get_docker_args(self, version):
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
"""Return true if the build env is valid."""
|
||||
return all(
|
||||
[
|
||||
self.addon.path_location.is_dir(),
|
||||
Path(self.addon.path_location, "Dockerfile").is_file(),
|
||||
]
|
||||
)
|
||||
|
||||
def get_docker_args(self, version: AwesomeVersion):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{self.addon.image}:{version}",
|
||||
"tag": f"{self.addon.image}:{version!s}",
|
||||
"pull": True,
|
||||
"forcerm": True,
|
||||
"squash": self.squash,
|
||||
|
@@ -3,7 +3,7 @@ from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
from packaging import version as pkg_version
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
@@ -12,7 +12,6 @@ from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_BOOT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
@@ -56,6 +55,7 @@ from ..const import (
|
||||
ATTR_STDIN,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
@@ -71,7 +71,8 @@ from ..const import (
|
||||
AddonStartup,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .validate import RE_SERVICE, RE_VOLUME, schema_ui_options, validate_options
|
||||
from .options import AddonOptions, UiOptions
|
||||
from .validate import RE_SERVICE, RE_VOLUME
|
||||
|
||||
Data = Dict[str, Any]
|
||||
|
||||
@@ -183,12 +184,12 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
def latest_version(self) -> AwesomeVersion:
|
||||
"""Return latest version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def version(self) -> Optional[str]:
|
||||
def version(self) -> AwesomeVersion:
|
||||
"""Return version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@@ -296,14 +297,9 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self) -> List[str]:
|
||||
"""Return devices of add-on."""
|
||||
return self.data.get(ATTR_DEVICES, [])
|
||||
|
||||
@property
|
||||
def tmpfs(self) -> Optional[str]:
|
||||
"""Return tmpfs of add-on."""
|
||||
return self.data.get(ATTR_TMPFS)
|
||||
def static_devices(self) -> List[Path]:
|
||||
"""Return static devices of add-on."""
|
||||
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
|
||||
|
||||
@property
|
||||
def environment(self) -> Optional[Dict[str, str]]:
|
||||
@@ -387,7 +383,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
@property
|
||||
def with_uart(self) -> bool:
|
||||
"""Return True if we should map all UART device."""
|
||||
return self.data[ATTR_AUTO_UART]
|
||||
return self.data[ATTR_UART]
|
||||
|
||||
@property
|
||||
def with_udev(self) -> bool:
|
||||
@@ -409,6 +405,11 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self.data[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def with_tmpfs(self) -> Optional[str]:
|
||||
"""Return if tmp is in memory of add-on."""
|
||||
return self.data[ATTR_TMPFS]
|
||||
|
||||
@property
|
||||
def access_auth_api(self) -> bool:
|
||||
"""Return True if the add-on access to login/auth backend."""
|
||||
@@ -522,8 +523,8 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(self.coresys, raw_schema)))
|
||||
raw_schema = {}
|
||||
return vol.Schema(vol.All(dict, AddonOptions(self.coresys, raw_schema)))
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> Optional[List[Dict[str, Any]]]:
|
||||
@@ -532,7 +533,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return None
|
||||
return schema_ui_options(raw_schema)
|
||||
return UiOptions(self.coresys)(raw_schema)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
@@ -554,15 +555,10 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return False
|
||||
|
||||
# Home Assistant
|
||||
version = config.get(ATTR_HOMEASSISTANT)
|
||||
if version is None or self.sys_homeassistant.version is None:
|
||||
return True
|
||||
|
||||
version: Optional[AwesomeVersion] = config.get(ATTR_HOMEASSISTANT)
|
||||
try:
|
||||
return pkg_version.parse(
|
||||
self.sys_homeassistant.version
|
||||
) >= pkg_version.parse(version)
|
||||
except pkg_version.InvalidVersion:
|
||||
return self.sys_homeassistant.version >= version
|
||||
except (AwesomeVersionException, TypeError):
|
||||
return True
|
||||
|
||||
def _image(self, config) -> str:
|
||||
|
381
supervisor/addons/options.py
Normal file
381
supervisor/addons/options.py
Normal file
@@ -0,0 +1,381 @@
|
||||
"""Add-on Options / UI rendering."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import Any, Dict, List, Set, Union
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import HardwareNotFound
|
||||
from ..hardware.const import UdevSubsystem
|
||||
from ..hardware.data import Device
|
||||
from ..validate import network_port
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
_STR = "str"
|
||||
_INT = "int"
|
||||
_FLOAT = "float"
|
||||
_BOOL = "bool"
|
||||
_PASSWORD = "password"
|
||||
_EMAIL = "email"
|
||||
_URL = "url"
|
||||
_PORT = "port"
|
||||
_MATCH = "match"
|
||||
_LIST = "list"
|
||||
_DEVICE = "device"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool"
|
||||
r"|email"
|
||||
r"|url"
|
||||
r"|port"
|
||||
r"|device(?:\((?P<filter>subsystem=[a-z]+)\))?"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r"|list\((?P<list>.+)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
|
||||
class AddonOptions(CoreSysAttributes):
|
||||
"""Validate Add-ons Options."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coresys: CoreSys,
|
||||
raw_schema: Dict[str, Any],
|
||||
):
|
||||
"""Validate schema."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.raw_schema: Dict[str, Any] = raw_schema
|
||||
self.devices: Set[Device] = set()
|
||||
|
||||
def __call__(self, struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in self.raw_schema:
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = self.raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = self._nested_validate_list(typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = self._nested_validate_dict(typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = self._single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
self._check_missing_options(self.raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(self, typ: str, value: Any, key: str):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'") from None
|
||||
|
||||
# Lookup secret
|
||||
if str(value).startswith("!secret "):
|
||||
secret: str = value.partition(" ")[2]
|
||||
value = self.sys_homeassistant.secrets.get(secret)
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Unknown secret {secret}") from None
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
if not match:
|
||||
raise vol.Invalid(f"Unknown type {typ}") from None
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(_STR) or typ.startswith(_PASSWORD):
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(_PORT):
|
||||
return network_port(value)
|
||||
elif typ.startswith(_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(_LIST):
|
||||
return vol.In(match.group("list").split("|"))(str(value))
|
||||
elif typ.startswith(_DEVICE):
|
||||
try:
|
||||
device = self.sys_hardware.get_by_path(Path(value))
|
||||
except HardwareNotFound:
|
||||
raise vol.Invalid(f"Device {value} does not exists!") from None
|
||||
|
||||
# Have filter
|
||||
if match.group("filter"):
|
||||
str_filter = match.group("filter")
|
||||
device_filter = _create_device_filter(str_filter)
|
||||
if device not in self.sys_hardware.filter_devices(**device_filter):
|
||||
raise vol.Invalid(
|
||||
f"Device {value} don't match the filter {str_filter}!"
|
||||
)
|
||||
|
||||
# Device valid
|
||||
self.devices.add(device)
|
||||
return str(device.path)
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}") from None
|
||||
|
||||
def _nested_validate_list(self, typ: Any, data_list: List[Any], key: str):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
# Make sure it is a list
|
||||
if not isinstance(data_list, list):
|
||||
raise vol.Invalid(f"Invalid list for {key}") from None
|
||||
|
||||
# Process list
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = self._nested_validate_dict(typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(self._single_validate(typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
def _nested_validate_dict(
|
||||
self, typ: Dict[Any, Any], data_dict: Dict[Any, Any], key: str
|
||||
):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
# Make sure it is a dict
|
||||
if not isinstance(data_dict, dict):
|
||||
raise vol.Invalid(f"Invalid dict for {key}") from None
|
||||
|
||||
# Process dict
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = self._nested_validate_list(
|
||||
typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = self._single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
self._check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
def _check_missing_options(
|
||||
self, origin: Dict[Any, Any], exists: Dict[Any, Any], root: str
|
||||
) -> None:
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}") from None
|
||||
|
||||
|
||||
class UiOptions(CoreSysAttributes):
|
||||
"""Render UI Add-ons Options."""
|
||||
|
||||
def __init__(self, coresys: CoreSys) -> None:
|
||||
"""Initialize UI option render."""
|
||||
self.coresys = coresys
|
||||
|
||||
def __call__(self, raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema: List[Dict[str, Any]] = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
self._nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
self._nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
self._single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
def _single_ui_option(
|
||||
self,
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
value: str,
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node: Dict[str, Union[str, bool, float, List[str]]] = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
if not match:
|
||||
return
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
elif value.startswith(_DEVICE):
|
||||
ui_node["type"] = "select"
|
||||
|
||||
# Have filter
|
||||
if match.group("filter"):
|
||||
device_filter = _create_device_filter(match.group("filter"))
|
||||
ui_node["options"] = [
|
||||
(device.by_id or device.path).as_posix()
|
||||
for device in self.sys_hardware.filter_devices(**device_filter)
|
||||
]
|
||||
else:
|
||||
ui_node["options"] = [
|
||||
(device.by_id or device.path).as_posix()
|
||||
for device in self.sys_hardware.devices()
|
||||
]
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
def _nested_ui_list(
|
||||
self,
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_list: List[Any],
|
||||
key: str,
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
self._nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
self._single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
def _nested_ui_dict(
|
||||
self,
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_dict: Dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {
|
||||
"name": key,
|
||||
"type": "schema",
|
||||
"optional": True,
|
||||
"multiple": multiple,
|
||||
}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
self._nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
self._single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _create_device_filter(str_filter: str) -> Dict[str, Any]:
|
||||
"""Generate device Filter."""
|
||||
raw_filter = dict(value.split("=") for value in str_filter.split(";"))
|
||||
|
||||
clean_filter = {}
|
||||
for key, value in raw_filter.items():
|
||||
if key == "subsystem":
|
||||
clean_filter[key] = UdevSubsystem(value)
|
||||
else:
|
||||
clean_filter[key] = value
|
||||
|
||||
return clean_filter
|
@@ -2,7 +2,7 @@
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
from typing import Any, Dict, List, Union
|
||||
from typing import Any, Dict
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -18,7 +18,6 @@ from ..const import (
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD_FROM,
|
||||
@@ -73,6 +72,7 @@ from ..const import (
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
@@ -90,9 +90,9 @@ from ..const import (
|
||||
AddonStartup,
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..validate import (
|
||||
docker_image,
|
||||
docker_ports,
|
||||
docker_ports_description,
|
||||
network_port,
|
||||
@@ -100,51 +100,14 @@ from ..validate import (
|
||||
uuid_match,
|
||||
version_tag,
|
||||
)
|
||||
from .options import RE_SCHEMA_ELEMENT
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = "str"
|
||||
V_INT = "int"
|
||||
V_FLOAT = "float"
|
||||
V_BOOL = "bool"
|
||||
V_PASSWORD = "password"
|
||||
V_EMAIL = "email"
|
||||
V_URL = "url"
|
||||
V_PORT = "port"
|
||||
V_MATCH = "match"
|
||||
V_LIST = "list"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool"
|
||||
r"|email"
|
||||
r"|url"
|
||||
r"|port"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r"|list\((?P<list>.+)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||
)
|
||||
@@ -173,27 +136,74 @@ RE_MACHINE = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def _simple_startup(value) -> str:
|
||||
"""Define startup schema."""
|
||||
if value == "before":
|
||||
return AddonStartup.SERVICES.value
|
||||
if value == "after":
|
||||
return AddonStartup.APPLICATION.value
|
||||
return value
|
||||
def _migrate_addon_config(protocol=False):
|
||||
"""Migrate addon config."""
|
||||
|
||||
def _migrate(config: Dict[str, Any]):
|
||||
name = config.get(ATTR_NAME)
|
||||
if not name:
|
||||
raise vol.Invalid("Invalid Add-on config!")
|
||||
|
||||
# Startup 2018-03-30
|
||||
if config.get(ATTR_STARTUP) in ("before", "after"):
|
||||
value = config[ATTR_STARTUP]
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'startup' with '%s' is deprecated. Please report this to the maintainer of %s",
|
||||
value,
|
||||
name,
|
||||
)
|
||||
if value == "before":
|
||||
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
|
||||
elif value == "after":
|
||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
|
||||
|
||||
# UART 2021-01-20
|
||||
if "auto_uart" in config:
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'auto_uart' is deprecated, use 'uart'. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_UART] = config.pop("auto_uart")
|
||||
|
||||
# Device 2021-01-20
|
||||
if ATTR_DEVICES in config and any(":" in line for line in config[ATTR_DEVICES]):
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'devices' use a deprecated format, the new format uses a list of paths only. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_DEVICES] = [line.split(":")[0] for line in config[ATTR_DEVICES]]
|
||||
|
||||
# TMPFS 2021-02-01
|
||||
if ATTR_TMPFS in config and not isinstance(config[ATTR_TMPFS], bool):
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'tmpfs' use a deprecated format, new it's only a boolean. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_TMPFS] = True
|
||||
|
||||
return config
|
||||
|
||||
return _migrate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
_SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.All(version_tag, str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): str,
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
vol.Required(ATTR_SLUG): str,
|
||||
vol.Required(ATTR_DESCRIPTON): str,
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.Coerce(AddonStartup)),
|
||||
vol.Required(ATTR_BOOT): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
||||
AddonStartup
|
||||
),
|
||||
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
||||
@@ -209,21 +219,20 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||
network_port, vol.Equal(0)
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): str,
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): str,
|
||||
vol.Optional(ATTR_PANEL_TITLE): str,
|
||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(version_tag),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [str],
|
||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
@@ -231,6 +240,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_VIDEO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_USB, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
@@ -242,32 +252,26 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [vol.Coerce(str)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(
|
||||
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [str],
|
||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
str: vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
[
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
|
||||
)
|
||||
},
|
||||
{str: vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])},
|
||||
)
|
||||
],
|
||||
vol.Schema(
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
vol.Schema({str: vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}),
|
||||
)
|
||||
}
|
||||
),
|
||||
False,
|
||||
),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||
),
|
||||
@@ -275,6 +279,8 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
SCHEMA_ADDON_CONFIG = vol.All(_migrate_addon_config(True), _SCHEMA_ADDON_CONFIG)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
@@ -294,19 +300,17 @@ SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||
str
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): str,
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_NETWORK): docker_ports,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
||||
@@ -315,19 +319,23 @@ SCHEMA_ADDON_USER = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
}
|
||||
SCHEMA_ADDON_SYSTEM = vol.All(
|
||||
_migrate_addon_config(),
|
||||
_SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): str,
|
||||
vol.Required(ATTR_REPOSITORY): str,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDONS_FILE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
|
||||
}
|
||||
vol.Optional(ATTR_USER, default=dict): {str: SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {str: SCHEMA_ADDON_SYSTEM},
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
@@ -336,263 +344,7 @@ SCHEMA_ADDON_SNAPSHOT = vol.Schema(
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.Coerce(AddonState),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def validate_options(coresys: CoreSys, raw_schema: Dict[str, Any]):
|
||||
"""Validate schema."""
|
||||
|
||||
def validate(struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in raw_schema:
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = _nested_validate_list(coresys, typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = _nested_validate_dict(coresys, typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(coresys, typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'") from None
|
||||
|
||||
# Lookup secret
|
||||
if str(value).startswith("!secret "):
|
||||
secret: str = value.partition(" ")[2]
|
||||
value = coresys.homeassistant.secrets.get(secret)
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Unknown secret {secret}") from None
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
if not match:
|
||||
raise vol.Invalid(f"Unknown type {typ}") from None
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR) or typ.startswith(V_PASSWORD):
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(V_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(V_PORT):
|
||||
return network_port(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(V_LIST):
|
||||
return vol.In(match.group("list").split("|"))(str(value))
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}") from None
|
||||
|
||||
|
||||
def _nested_validate_list(coresys, typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
# Make sure it is a list
|
||||
if not isinstance(data_list, list):
|
||||
raise vol.Invalid(f"Invalid list for {key}") from None
|
||||
|
||||
# Process list
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = _nested_validate_dict(coresys, typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(_single_validate(coresys, typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _nested_validate_dict(coresys, typ, data_dict, key):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
# Make sure it is a dict
|
||||
if not isinstance(data_dict, dict):
|
||||
raise vol.Invalid(f"Invalid dict for {key}") from None
|
||||
|
||||
# Process dict
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(
|
||||
coresys, typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = _single_validate(coresys, typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}") from None
|
||||
|
||||
|
||||
def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema: List[Dict[str, Any]] = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
_nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
_nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
_single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
|
||||
def _single_ui_option(
|
||||
ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node: Dict[str, Union[str, bool, float, List[str]]] = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
if not match:
|
||||
return
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(V_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(V_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(V_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(V_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(V_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(V_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _nested_ui_list(
|
||||
ui_schema: List[Dict[str, Any]], option_list: List[Any], key: str
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
_nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
_single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
|
||||
def _nested_ui_dict(
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_dict: Dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {"name": key, "type": "schema", "optional": True, "multiple": multiple}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
_nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
_single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
||||
|
@@ -112,6 +112,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/network/info", api_network.info),
|
||||
web.post("/network/reload", api_network.reload),
|
||||
web.get(
|
||||
"/network/interface/{interface}/info", api_network.interface_info
|
||||
),
|
||||
@@ -247,6 +248,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/auth", api_auth.auth),
|
||||
web.post("/auth", api_auth.auth),
|
||||
web.post("/auth/reset", api_auth.reset),
|
||||
web.delete("/auth/cache", api_auth.cache),
|
||||
|
@@ -82,6 +82,7 @@ from ..const import (
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_URL,
|
||||
@@ -237,7 +238,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: _pretty_devices(addon),
|
||||
ATTR_DEVICES: addon.static_devices,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
@@ -250,6 +251,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_USB: addon.with_usb,
|
||||
ATTR_UART: addon.with_uart,
|
||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_UDEV: addon.with_udev,
|
||||
@@ -286,6 +288,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_UPDATE_AVAILABLE: addon.need_update,
|
||||
ATTR_WATCHDOG: addon.watchdog,
|
||||
ATTR_DEVICES: addon.static_devices
|
||||
+ [device.path for device in addon.devices],
|
||||
}
|
||||
)
|
||||
|
||||
|
@@ -29,6 +29,10 @@ SCHEMA_PASSWORD_RESET = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
REALM_HEADER: Dict[str, str] = {
|
||||
WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'
|
||||
}
|
||||
|
||||
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
@@ -63,7 +67,9 @@ class APIAuth(CoreSysAttributes):
|
||||
|
||||
# BasicAuth
|
||||
if AUTHORIZATION in request.headers:
|
||||
return await self._process_basic(request, addon)
|
||||
if not await self._process_basic(request, addon):
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER)
|
||||
return True
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
@@ -75,9 +81,7 @@ class APIAuth(CoreSysAttributes):
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(
|
||||
headers={WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'}
|
||||
)
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER)
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
|
@@ -1,50 +1,46 @@
|
||||
"""Init file for Supervisor hardware RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict, List
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_AUDIO,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
ATTR_SERIAL,
|
||||
ATTR_USB,
|
||||
)
|
||||
from ..const import ATTR_AUDIO, ATTR_DEVICES, ATTR_INPUT, ATTR_NAME, ATTR_OUTPUT
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..hardware.const import (
|
||||
ATTR_ATTRIBUTES,
|
||||
ATTR_BY_ID,
|
||||
ATTR_DEV_PATH,
|
||||
ATTR_SUBSYSTEM,
|
||||
ATTR_SYSFS,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def device_struct(device: Device) -> Dict[str, Any]:
|
||||
"""Return a dict with information of a interface to be used in th API."""
|
||||
return {
|
||||
ATTR_NAME: device.name,
|
||||
ATTR_SYSFS: device.sysfs,
|
||||
ATTR_DEV_PATH: device.path,
|
||||
ATTR_SUBSYSTEM: device.subsystem,
|
||||
ATTR_BY_ID: device.by_id,
|
||||
ATTR_ATTRIBUTES: device.attributes,
|
||||
}
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show hardware info."""
|
||||
serial: List[str] = []
|
||||
|
||||
# Create Serial list with device links
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
serial.append(device.path.as_posix())
|
||||
for link in device.links:
|
||||
serial.append(link.as_posix())
|
||||
|
||||
return {
|
||||
ATTR_SERIAL: serial,
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: [
|
||||
device.path.as_posix() for device in self.sys_hardware.disk_devices
|
||||
],
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_USB: [
|
||||
device.path.as_posix() for device in self.sys_hardware.usb_devices
|
||||
],
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
ATTR_DEVICES: [
|
||||
device_struct(device) for device in self.sys_hardware.devices
|
||||
]
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -64,6 +60,6 @@ class APIHardware(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
def trigger(self, request: web.Request) -> Awaitable[None]:
|
||||
async def trigger(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger a udev device reload."""
|
||||
return asyncio.shield(self.sys_hardware.udev_trigger())
|
||||
_LOGGER.debug("Ignoring DEPRECATED hardware trigger function call.")
|
||||
|
@@ -11,6 +11,7 @@ from ..const import (
|
||||
ATTR_DEPLOYMENT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DISK_FREE,
|
||||
ATTR_DISK_LIFE_TIME,
|
||||
ATTR_DISK_TOTAL,
|
||||
ATTR_DISK_USED,
|
||||
ATTR_FEATURES,
|
||||
@@ -43,6 +44,7 @@ class APIHost(CoreSysAttributes):
|
||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||
ATTR_DISK_TOTAL: self.sys_host.info.total_space,
|
||||
ATTR_DISK_USED: self.sys_host.info.used_space,
|
||||
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
|
||||
ATTR_FEATURES: self.sys_host.features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_KERNEL: self.sys_host.info.kernel,
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""REST API for network."""
|
||||
import asyncio
|
||||
from ipaddress import ip_address, ip_interface
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
@@ -18,6 +18,7 @@ from ..const import (
|
||||
ATTR_FREQUENCY,
|
||||
ATTR_GATEWAY,
|
||||
ATTR_HOST_INTERNET,
|
||||
ATTR_ID,
|
||||
ATTR_INTERFACE,
|
||||
ATTR_INTERFACES,
|
||||
ATTR_IPV4,
|
||||
@@ -26,6 +27,7 @@ from ..const import (
|
||||
ATTR_METHOD,
|
||||
ATTR_MODE,
|
||||
ATTR_NAMESERVERS,
|
||||
ATTR_PARENT,
|
||||
ATTR_PRIMARY,
|
||||
ATTR_PSK,
|
||||
ATTR_SIGNAL,
|
||||
@@ -80,7 +82,7 @@ SCHEMA_UPDATE = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def ipconfig_struct(config: IpConfig) -> dict:
|
||||
def ipconfig_struct(config: IpConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about ip configuration."""
|
||||
return {
|
||||
ATTR_METHOD: config.method,
|
||||
@@ -90,7 +92,7 @@ def ipconfig_struct(config: IpConfig) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def wifi_struct(config: WifiConfig) -> dict:
|
||||
def wifi_struct(config: WifiConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about wifi configuration."""
|
||||
return {
|
||||
ATTR_MODE: config.mode,
|
||||
@@ -100,7 +102,15 @@ def wifi_struct(config: WifiConfig) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def interface_struct(interface: Interface) -> dict:
|
||||
def vlan_struct(config: VlanConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about VLAN configuration."""
|
||||
return {
|
||||
ATTR_ID: config.id,
|
||||
ATTR_PARENT: config.interface,
|
||||
}
|
||||
|
||||
|
||||
def interface_struct(interface: Interface) -> Dict[str, Any]:
|
||||
"""Return a dict with information of a interface to be used in th API."""
|
||||
return {
|
||||
ATTR_INTERFACE: interface.name,
|
||||
@@ -111,11 +121,11 @@ def interface_struct(interface: Interface) -> dict:
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
|
||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||
}
|
||||
|
||||
|
||||
def accesspoint_struct(accesspoint: AccessPoint) -> dict:
|
||||
def accesspoint_struct(accesspoint: AccessPoint) -> Dict[str, Any]:
|
||||
"""Return a dict for AccessPoint."""
|
||||
return {
|
||||
ATTR_MODE: accesspoint.mode,
|
||||
@@ -145,7 +155,7 @@ class APINetwork(CoreSysAttributes):
|
||||
except HostNetworkNotFound:
|
||||
pass
|
||||
|
||||
raise APIError(f"Interface {name} does not exsist") from None
|
||||
raise APIError(f"Interface {name} does not exist") from None
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
@@ -207,6 +217,11 @@ class APINetwork(CoreSysAttributes):
|
||||
|
||||
await asyncio.shield(self.sys_host.network.apply_changes(interface))
|
||||
|
||||
@api_process
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload network data."""
|
||||
return asyncio.shield(self.sys_host.network.update())
|
||||
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Scan and return a list of available networks."""
|
||||
|
@@ -1,9 +1,9 @@
|
||||
|
||||
try {
|
||||
new Function("import('/api/hassio/app/frontend_latest/entrypoint.df099659.js')")();
|
||||
new Function("import('/api/hassio/app/frontend_latest/entrypoint.cf81f6d9.js')")();
|
||||
} catch (err) {
|
||||
var el = document.createElement('script');
|
||||
el.src = '/api/hassio/app/frontend_es5/entrypoint.d303236e.js';
|
||||
el.src = '/api/hassio/app/frontend_es5/entrypoint.c258a457.js';
|
||||
document.body.appendChild(el);
|
||||
}
|
||||
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.0ca880f618245e0de2ab.js","sources":["webpack://home-assistant-frontend/chunk.0ca880f618245e0de2ab.js"],"mappings":"AAAA","sourceRoot":""}
|
@@ -1,2 +0,0 @@
|
||||
!function(){"use strict";var n,t={14971:function(n,t,e){e(58556);var r,o,i=e(91107),u=e(9902),a=e.n(u),s=e(62173),f={renderMarkdown:function(n,t){var e,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign({},(0,s.getDefaultWhiteList)(),{"ha-icon":["icon"],"ha-svg-icon":["path"]})),i.allowSvg?(o||(o=Object.assign({},r,{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=o):e=r,(0,s.filterXSS)(a()(n,t),{whiteList:e})}};(0,i.Jj)(f)}},e={};function r(n){if(e[n])return e[n].exports;var o=e[n]={exports:{}};return t[n].call(o.exports,o,o.exports,r),o.exports}r.m=t,r.x=function(){r(14971)},r.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return r.d(t,{a:t}),t},r.d=function(n,t){for(var e in t)r.o(t,e)&&!r.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},r.f={},r.e=function(n){return Promise.all(Object.keys(r.f).reduce((function(t,e){return r.f[e](n,t),t}),[]))},r.u=function(n){return"chunk.d93e72d29a82b1218f4a.js"},r.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},n=r.x,r.x=function(){return r.e(474).then(n)},r.p="/api/hassio/app/frontend_es5/",function(){var n={971:1};r.f.i=function(t,e){n[t]||importScripts(""+r.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var o=t[0],i=t[1],u=t[2];for(var a in i)r.o(i,a)&&(r.m[a]=i[a]);for(u&&u(r);o.length;)n[o.pop()]=1;e(t)}}(),r.x()}();
|
||||
//# sourceMappingURL=chunk.0cd3dee90fe9e2ba789d.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.0cd3dee90fe9e2ba789d.js","sources":["webpack://home-assistant-frontend/chunk.0cd3dee90fe9e2ba789d.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.1da2af7a753728051dc0.js","sources":["webpack://home-assistant-frontend/chunk.1da2af7a753728051dc0.js"],"mappings":";AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.42b19ffa1fa277adc9cd.js","sources":["webpack://home-assistant-frontend/chunk.42b19ffa1fa277adc9cd.js"],"mappings":"AAAA","sourceRoot":""}
|
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.4c51a194fe9fc242f107.js","sources":["webpack://home-assistant-frontend/chunk.4c51a194fe9fc242f107.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.63747632f287a94ecaa7.js","sources":["webpack://home-assistant-frontend/chunk.63747632f287a94ecaa7.js"],"mappings":"AAAA","sourceRoot":""}
|
@@ -0,0 +1,2 @@
|
||||
!function(){"use strict";var n,t={14971:function(n,t,e){var r,o,i=e(91107),u=e(9902),a=e.n(u),s=(e(58556),e(62173)),f={renderMarkdown:function(n,t){var e,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign({},(0,s.getDefaultWhiteList)(),{"ha-icon":["icon"],"ha-svg-icon":["path"]})),i.allowSvg?(o||(o=Object.assign({},r,{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=o):e=r,(0,s.filterXSS)(a()(n,t),{whiteList:e})}};(0,i.Jj)(f)}},e={};function r(n){if(e[n])return e[n].exports;var o=e[n]={exports:{}};return t[n].call(o.exports,o,o.exports,r),o.exports}r.m=t,r.x=function(){r(14971)},r.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return r.d(t,{a:t}),t},r.d=function(n,t){for(var e in t)r.o(t,e)&&!r.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},r.f={},r.e=function(n){return Promise.all(Object.keys(r.f).reduce((function(t,e){return r.f[e](n,t),t}),[]))},r.u=function(n){return"chunk.d93e72d29a82b1218f4a.js"},r.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},n=r.x,r.x=function(){return r.e(474).then(n)},r.p="/api/hassio/app/frontend_es5/",function(){var n={971:1};r.f.i=function(t,e){n[t]||importScripts(""+r.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var o=t[0],i=t[1],u=t[2];for(var a in i)r.o(i,a)&&(r.m[a]=i[a]);for(u&&u(r);o.length;)n[o.pop()]=1;e(t)}}(),r.x()}();
|
||||
//# sourceMappingURL=chunk.67e3627d3c04d75f6f9d.js.map
|
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.67e3627d3c04d75f6f9d.js","sources":["webpack://home-assistant-frontend/chunk.67e3627d3c04d75f6f9d.js"],"mappings":"AAAA","sourceRoot":""}
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.6c00b80674f8de8db25e.js","sources":["webpack://home-assistant-frontend/chunk.6c00b80674f8de8db25e.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.712a551bcc67f5a1a334.js","sources":["webpack://home-assistant-frontend/chunk.712a551bcc67f5a1a334.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.79b4e240c46ccbe25e4d.js","sources":["webpack://home-assistant-frontend/chunk.79b4e240c46ccbe25e4d.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.8c95d60c89a57c50576d.js","sources":["webpack://home-assistant-frontend/chunk.8c95d60c89a57c50576d.js"],"mappings":";AAAA","sourceRoot":""}
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.bf69fc6a1c0d21860285.js","sources":["webpack://home-assistant-frontend/chunk.bf69fc6a1c0d21860285.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.ca478c5ff8d9cf0edd66.js","sources":["webpack://home-assistant-frontend/chunk.ca478c5ff8d9cf0edd66.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.d0222f0b152d21991ec4.js","sources":["webpack://home-assistant-frontend/chunk.d0222f0b152d21991ec4.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.e7fd328546ebeeaadadc.js","sources":["webpack://home-assistant-frontend/chunk.e7fd328546ebeeaadadc.js"],"mappings":"AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.f5cf4c1fdbd3d8c6f907.js","sources":["webpack://home-assistant-frontend/chunk.f5cf4c1fdbd3d8c6f907.js"],"mappings":"AAAA","sourceRoot":""}
|
3
supervisor/api/panel/frontend_es5/entrypoint.c258a457.js
Normal file
3
supervisor/api/panel/frontend_es5/entrypoint.c258a457.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/entrypoint.c258a457.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/entrypoint.c258a457.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"entrypoint.c258a457.js","sources":["webpack://home-assistant-frontend/entrypoint.c258a457.js"],"mappings":";AAAA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"entrypoint.d303236e.js","sources":["webpack://home-assistant-frontend/entrypoint.d303236e.js"],"mappings":";AAAA","sourceRoot":""}
|
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"entrypoint.js": "/api/hassio/app/frontend_es5/entrypoint.d303236e.js"
|
||||
"entrypoint.js": "/api/hassio/app/frontend_es5/entrypoint.c258a457.js"
|
||||
}
|
@@ -1,2 +1,2 @@
|
||||
(()=>{"use strict";var e,t={4971:(e,t,r)=>{var n=r(1107),o=r(9902),s=r.n(o),a=r(2173);let i,p;const h={renderMarkdown:(e,t,r={})=>{let n;return i||(i={...(0,a.getDefaultWhiteList)(),"ha-icon":["icon"],"ha-svg-icon":["path"]}),r.allowSvg?(p||(p={...i,svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]}),n=p):n=i,(0,a.filterXSS)(s()(e,t),{whiteList:n})}};(0,n.Jj)(h)}},r={};function n(e){if(r[e])return r[e].exports;var o=r[e]={exports:{}};return t[e].call(o.exports,o,o.exports,n),o.exports}n.m=t,n.x=()=>{n(4971)},n.n=e=>{var t=e&&e.__esModule?()=>e.default:()=>e;return n.d(t,{a:t}),t},n.d=(e,t)=>{for(var r in t)n.o(t,r)&&!n.o(e,r)&&Object.defineProperty(e,r,{enumerable:!0,get:t[r]})},n.f={},n.e=e=>Promise.all(Object.keys(n.f).reduce(((t,r)=>(n.f[r](e,t),t)),[])),n.u=e=>"chunk.912db116e709889ddb20.js",n.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),e=n.x,n.x=()=>n.e(219).then(e),n.p="/api/hassio/app/frontend_latest/",(()=>{var e={971:1};n.f.i=(t,r)=>{e[t]||importScripts(""+n.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],r=t.push.bind(t);t.push=t=>{var[o,s,a]=t;for(var i in s)n.o(s,i)&&(n.m[i]=s[i]);for(a&&a(n);o.length;)e[o.pop()]=1;r(t)}})(),n.x()})();
|
||||
//# sourceMappingURL=chunk.7858204150418d6d4b9c.js.map
|
||||
//# sourceMappingURL=chunk.1f8c4c0bad1eec9e4ef3.js.map
|
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.1f8c4c0bad1eec9e4ef3.js","sources":["webpack://home-assistant-frontend/chunk.1f8c4c0bad1eec9e4ef3.js"],"mappings":"AAAA","sourceRoot":""}
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.2a461c1bd77195b530dc.js","sources":["webpack://home-assistant-frontend/chunk.2a461c1bd77195b530dc.js"],"mappings":"AAAA;AAgPA;AACA;AACA;AANA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"chunk.3b0c9ad1cbf10466830f.js","sources":["webpack://home-assistant-frontend/chunk.3b0c9ad1cbf10466830f.js"],"mappings":"AAAA;;;AA6HA;AACA;AACA;;AAEA;;;AAGA;;AAEA;;AAKA;;AAEA;AACA;;;AAGA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;;;AAIA;AACA;;;AAGA;;;AAKA;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiGA","sourceRoot":""}
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"chunk.4458b10ab597e137f420.js","sources":["webpack://home-assistant-frontend/chunk.4458b10ab597e137f420.js"],"mappings":"AAAA;;;AA6HA;AACA;AACA;;AAEA;;;AAGA;;AAEA;;AAKA;;AAEA;AACA;;;AAGA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;;;AAIA;AACA;;;AAGA;;;AAKA;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+FA","sourceRoot":""}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user