mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-27 01:49:21 +00:00
Compare commits
135 Commits
remove-pas
...
2023.10.0
Author | SHA1 | Date | |
---|---|---|---|
![]() |
afa467a32b | ||
![]() |
274218d48e | ||
![]() |
7e73df26ab | ||
![]() |
ef8fc80c95 | ||
![]() |
05c39144e3 | ||
![]() |
f5cd35af47 | ||
![]() |
c69ecdafd0 | ||
![]() |
fa90c247ec | ||
![]() |
0cd7bd47bb | ||
![]() |
36d48d19fc | ||
![]() |
9322b68d47 | ||
![]() |
e11ff64b15 | ||
![]() |
3776dabfcf | ||
![]() |
d4e5831f0f | ||
![]() |
7b3b478e88 | ||
![]() |
f5afe13e91 | ||
![]() |
49ce468d83 | ||
![]() |
b26551c812 | ||
![]() |
394ba580d2 | ||
![]() |
2f7a54f5fd | ||
![]() |
360e085926 | ||
![]() |
042921925d | ||
![]() |
dcf024387b | ||
![]() |
e1232bc9e7 | ||
![]() |
d96598b5dd | ||
![]() |
2605f85668 | ||
![]() |
2c8e6ca0cd | ||
![]() |
0225f574be | ||
![]() |
34090bf2eb | ||
![]() |
5ae585ce13 | ||
![]() |
2bb10a32d7 | ||
![]() |
435743dd2c | ||
![]() |
98589fba6d | ||
![]() |
32da679e02 | ||
![]() |
44daffc65b | ||
![]() |
0aafda1477 | ||
![]() |
60604e33b9 | ||
![]() |
98268b377a | ||
![]() |
de54979471 | ||
![]() |
ee6e339587 | ||
![]() |
c16cf89318 | ||
![]() |
c66cb7423e | ||
![]() |
f5bd95a519 | ||
![]() |
500f9ec1c1 | ||
![]() |
a4713d4a1e | ||
![]() |
04452dfb1a | ||
![]() |
69d09851d9 | ||
![]() |
1b649fe5cd | ||
![]() |
38572a5a86 | ||
![]() |
f5f51169e6 | ||
![]() |
07c2178ae1 | ||
![]() |
f30d21361f | ||
![]() |
6adb4fbcf7 | ||
![]() |
d73962bd7d | ||
![]() |
f4b43739da | ||
![]() |
4838b280ad | ||
![]() |
f93b753c03 | ||
![]() |
de06361cb0 | ||
![]() |
15ce48c8aa | ||
![]() |
38758d05a8 | ||
![]() |
a79fa14ee7 | ||
![]() |
1eb95b4d33 | ||
![]() |
d04e47f5b3 | ||
![]() |
dad5118f21 | ||
![]() |
acc0e5c989 | ||
![]() |
204fcdf479 | ||
![]() |
93ba8a3574 | ||
![]() |
f2f9e3b514 | ||
![]() |
61288559b3 | ||
![]() |
bd2c99a455 | ||
![]() |
1937348b24 | ||
![]() |
b7b2fae325 | ||
![]() |
11115923b2 | ||
![]() |
295133d2e9 | ||
![]() |
3018b851c8 | ||
![]() |
222c3fd485 | ||
![]() |
9650fd2ba1 | ||
![]() |
c88fd9a7d9 | ||
![]() |
1611beccd1 | ||
![]() |
71077fb0f7 | ||
![]() |
9647fba98f | ||
![]() |
86f004e45a | ||
![]() |
a98334ede8 | ||
![]() |
e19c2d6805 | ||
![]() |
847736dab8 | ||
![]() |
45f930ab21 | ||
![]() |
6ea54f1ddb | ||
![]() |
81ce0a60f6 | ||
![]() |
bf5d839c22 | ||
![]() |
fc385cfac0 | ||
![]() |
12d55b8411 | ||
![]() |
e60af93e2b | ||
![]() |
1691f0eac7 | ||
![]() |
be4a6a1564 | ||
![]() |
24c5613a50 | ||
![]() |
5266927bf7 | ||
![]() |
4bd2000174 | ||
![]() |
b8178414a4 | ||
![]() |
f9bc2f5993 | ||
![]() |
f1a72ee418 | ||
![]() |
b19dcef5b7 | ||
![]() |
1f92ab42ca | ||
![]() |
1f940a04fd | ||
![]() |
f771eaab5f | ||
![]() |
d1379a8154 | ||
![]() |
e488f02557 | ||
![]() |
f11cc86254 | ||
![]() |
175667bfe8 | ||
![]() |
0a0f14ddea | ||
![]() |
9e08677ade | ||
![]() |
abbf8b9b65 | ||
![]() |
96d5fc244e | ||
![]() |
3b38047fd4 | ||
![]() |
48e9e1c4f9 | ||
![]() |
355961a1eb | ||
![]() |
e68190b6b6 | ||
![]() |
e7cc7e971f | ||
![]() |
ee027eb510 | ||
![]() |
a584300bf3 | ||
![]() |
16e1f839d7 | ||
![]() |
c2123f0903 | ||
![]() |
9fbeb2a769 | ||
![]() |
3e0723ec24 | ||
![]() |
3e5f1d96b5 | ||
![]() |
be87082502 | ||
![]() |
f997e51249 | ||
![]() |
456316fdd4 | ||
![]() |
9a7d547394 | ||
![]() |
d3031e2eae | ||
![]() |
35bd66119a | ||
![]() |
9be3b47e0e | ||
![]() |
4bed8c1327 | ||
![]() |
254ec2d1af | ||
![]() |
e4ee3e4226 | ||
![]() |
65545e7218 |
93
.github/workflows/builder.yml
vendored
93
.github/workflows/builder.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
|||||||
requirements: ${{ steps.requirements.outputs.changed }}
|
requirements: ${{ steps.requirements.outputs.changed }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -83,12 +83,16 @@ jobs:
|
|||||||
name: Build ${{ matrix.arch }} supervisor
|
name: Build ${{ matrix.arch }} supervisor
|
||||||
needs: init
|
needs: init
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
id-token: write
|
||||||
|
packages: write
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -102,13 +106,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
if: needs.init.outputs.requirements == 'true'
|
if: needs.init.outputs.requirements == 'true'
|
||||||
uses: home-assistant/wheels@2023.04.0
|
uses: home-assistant/wheels@2023.10.1
|
||||||
with:
|
with:
|
||||||
abi: cp311
|
abi: cp311
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
apk: "libffi-dev;openssl-dev"
|
apk: "libffi-dev;openssl-dev;yaml-dev"
|
||||||
skip-binary: aiohttp
|
skip-binary: aiohttp
|
||||||
env-file: true
|
env-file: true
|
||||||
requirements: "requirements.txt"
|
requirements: "requirements.txt"
|
||||||
@@ -119,16 +123,33 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
type: ${{ env.BUILD_TYPE }}
|
type: ${{ env.BUILD_TYPE }}
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: docker/login-action@v2.2.0
|
uses: actions/setup-python@v4.7.1
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
- name: Install Cosign
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: sigstore/cosign-installer@v3.1.2
|
||||||
|
with:
|
||||||
|
cosign-release: "v2.0.2"
|
||||||
|
|
||||||
|
- name: Install dirhash and calc hash
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
run: |
|
||||||
|
pip3 install dirhash
|
||||||
|
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||||
|
echo "${dir_hash}" > rootfs/supervisor.sha256
|
||||||
|
|
||||||
|
- name: Sign supervisor SHA256
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
run: |
|
||||||
|
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: docker/login-action@v2.2.0
|
uses: docker/login-action@v3.0.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -139,55 +160,17 @@ jobs:
|
|||||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build supervisor
|
- name: Build supervisor
|
||||||
uses: home-assistant/builder@2023.06.0
|
uses: home-assistant/builder@2023.09.0
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
--${{ matrix.arch }} \
|
--${{ matrix.arch }} \
|
||||||
--target /data \
|
--target /data \
|
||||||
|
--cosign \
|
||||||
--generic ${{ needs.init.outputs.version }}
|
--generic ${{ needs.init.outputs.version }}
|
||||||
env:
|
env:
|
||||||
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
||||||
|
|
||||||
codenotary:
|
|
||||||
name: CAS signature
|
|
||||||
needs: init
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout the repository
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: actions/checkout@v3.5.3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: actions/setup-python@v4.6.1
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
|
||||||
|
|
||||||
- name: Set version
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: home-assistant/actions/helpers/version@master
|
|
||||||
with:
|
|
||||||
type: ${{ env.BUILD_TYPE }}
|
|
||||||
|
|
||||||
- name: Install dirhash and calc hash
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
id: dirhash
|
|
||||||
run: |
|
|
||||||
pip3 install dirhash
|
|
||||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
|
||||||
echo "::set-output name=dirhash::${dir_hash}"
|
|
||||||
|
|
||||||
- name: Signing Source
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: home-assistant/actions/helpers/codenotary@master
|
|
||||||
with:
|
|
||||||
source: hash://${{ steps.dirhash.outputs.dirhash }}
|
|
||||||
asset: supervisor-${{ needs.init.outputs.version }}
|
|
||||||
token: ${{ secrets.CAS_TOKEN }}
|
|
||||||
|
|
||||||
version:
|
version:
|
||||||
name: Update version
|
name: Update version
|
||||||
needs: ["init", "run_supervisor"]
|
needs: ["init", "run_supervisor"]
|
||||||
@@ -195,7 +178,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
@@ -216,15 +199,15 @@ jobs:
|
|||||||
run_supervisor:
|
run_supervisor:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Run the Supervisor
|
name: Run the Supervisor
|
||||||
needs: ["build", "codenotary", "init"]
|
needs: ["build", "init"]
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
|
|
||||||
- name: Build the Supervisor
|
- name: Build the Supervisor
|
||||||
if: needs.init.outputs.publish != 'true'
|
if: needs.init.outputs.publish != 'true'
|
||||||
uses: home-assistant/builder@2023.06.0
|
uses: home-assistant/builder@2023.09.0
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
--test \
|
--test \
|
||||||
@@ -236,7 +219,7 @@ jobs:
|
|||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
|
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
|
||||||
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} homeassistant/amd64-hassio-supervisor:runner
|
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
||||||
|
|
||||||
- name: Create the Supervisor
|
- name: Create the Supervisor
|
||||||
run: |
|
run: |
|
||||||
@@ -253,7 +236,7 @@ jobs:
|
|||||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||||
-e SUPERVISOR_DEV=1 \
|
-e SUPERVISOR_DEV=1 \
|
||||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||||
homeassistant/amd64-hassio-supervisor:runner
|
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
||||||
|
|
||||||
- name: Start the Supervisor
|
- name: Start the Supervisor
|
||||||
run: docker start hassio_supervisor
|
run: docker start hassio_supervisor
|
||||||
|
81
.github/workflows/ci.yaml
vendored
81
.github/workflows/ci.yaml
vendored
@@ -10,7 +10,6 @@ on:
|
|||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.11"
|
DEFAULT_PYTHON: "3.11"
|
||||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||||
DEFAULT_CAS: v1.0.2
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||||
@@ -26,15 +25,15 @@ jobs:
|
|||||||
name: Prepare Python dependencies
|
name: Prepare Python dependencies
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -48,7 +47,7 @@ jobs:
|
|||||||
pip install -r requirements.txt -r requirements_tests.txt
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
key: |
|
key: |
|
||||||
@@ -67,15 +66,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -96,7 +95,7 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -111,15 +110,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -131,7 +130,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
key: |
|
key: |
|
||||||
@@ -155,15 +154,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -187,15 +186,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -207,7 +206,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
key: |
|
key: |
|
||||||
@@ -228,15 +227,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -248,7 +247,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
key: |
|
key: |
|
||||||
@@ -272,15 +271,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -304,15 +303,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -324,7 +323,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
key: |
|
key: |
|
||||||
@@ -345,19 +344,19 @@ jobs:
|
|||||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Install CAS tools
|
- name: Install Cosign
|
||||||
uses: home-assistant/actions/helpers/cas@master
|
uses: sigstore/cosign-installer@v3.1.2
|
||||||
with:
|
with:
|
||||||
version: ${{ env.DEFAULT_CAS }}
|
cosign-release: "v2.0.2"
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -392,7 +391,7 @@ jobs:
|
|||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
tests
|
tests
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
uses: actions/upload-artifact@v3.1.2
|
uses: actions/upload-artifact@v3.1.3
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}
|
name: coverage-${{ matrix.python-version }}
|
||||||
path: .coverage
|
path: .coverage
|
||||||
@@ -403,15 +402,15 @@ jobs:
|
|||||||
needs: ["pytest", "prepare"]
|
needs: ["pytest", "prepare"]
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.6.1
|
uses: actions/setup-python@v4.7.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v3.3.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
|
4
.github/workflows/release-drafter.yml
vendored
4
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
name: Release Drafter
|
name: Release Drafter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
echo "::set-output name=version::$datepre.$newpost"
|
echo "::set-output name=version::$datepre.$newpost"
|
||||||
|
|
||||||
- name: Run Release Drafter
|
- name: Run Release Drafter
|
||||||
uses: release-drafter/release-drafter@v5.23.0
|
uses: release-drafter/release-drafter@v5.24.0
|
||||||
with:
|
with:
|
||||||
tag: ${{ steps.version.outputs.version }}
|
tag: ${{ steps.version.outputs.version }}
|
||||||
name: ${{ steps.version.outputs.version }}
|
name: ${{ steps.version.outputs.version }}
|
||||||
|
2
.github/workflows/sentry.yaml
vendored
2
.github/workflows/sentry.yaml
vendored
@@ -10,7 +10,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.1.0
|
||||||
- name: Sentry Release
|
- name: Sentry Release
|
||||||
uses: getsentry/action-release@v1.4.1
|
uses: getsentry/action-release@v1.4.1
|
||||||
env:
|
env:
|
||||||
|
18
Dockerfile
18
Dockerfile
@@ -7,7 +7,8 @@ ENV \
|
|||||||
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
|
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
|
||||||
|
|
||||||
ARG \
|
ARG \
|
||||||
CAS_VERSION
|
COSIGN_VERSION \
|
||||||
|
BUILD_ARCH
|
||||||
|
|
||||||
# Install base
|
# Install base
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
@@ -21,19 +22,10 @@ RUN \
|
|||||||
libpulse \
|
libpulse \
|
||||||
musl \
|
musl \
|
||||||
openssl \
|
openssl \
|
||||||
&& apk add --no-cache --virtual .build-dependencies \
|
yaml \
|
||||||
build-base \
|
|
||||||
go \
|
|
||||||
\
|
\
|
||||||
&& git clone -b "v${CAS_VERSION}" --depth 1 \
|
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
||||||
https://github.com/codenotary/cas \
|
&& chmod a+x /usr/bin/cosign
|
||||||
&& cd cas \
|
|
||||||
&& make cas \
|
|
||||||
&& mv cas /usr/bin/cas \
|
|
||||||
\
|
|
||||||
&& apk del .build-dependencies \
|
|
||||||
&& rm -rf /root/go /root/.cache \
|
|
||||||
&& rm -rf /usr/src/cas
|
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
image: homeassistant/{arch}-hassio-supervisor
|
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
||||||
shadow_repository: ghcr.io/home-assistant
|
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.16
|
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.16
|
||||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.16
|
armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.16
|
||||||
@@ -9,8 +8,11 @@ build_from:
|
|||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
|
cosign:
|
||||||
|
base_identity: https://github.com/home-assistant/docker-base/.*
|
||||||
|
identity: https://github.com/home-assistant/supervisor/.*
|
||||||
args:
|
args:
|
||||||
CAS_VERSION: 1.0.2
|
COSIGN_VERSION: 2.0.2
|
||||||
labels:
|
labels:
|
||||||
io.hass.type: supervisor
|
io.hass.type: supervisor
|
||||||
org.opencontainers.image.title: Home Assistant Supervisor
|
org.opencontainers.image.title: Home Assistant Supervisor
|
||||||
|
Submodule home-assistant-polymer updated: efa02c309b...9d457d52e8
@@ -1,26 +1,26 @@
|
|||||||
aiodns==3.0.0
|
aiodns==3.0.0
|
||||||
aiohttp==3.8.4
|
aiohttp==3.8.5
|
||||||
async_timeout==4.0.2
|
async_timeout==4.0.3
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==23.1.0
|
attrs==23.1.0
|
||||||
awesomeversion==23.5.0
|
awesomeversion==23.8.0
|
||||||
brotli==1.0.9
|
brotli==1.1.0
|
||||||
ciso8601==2.3.0
|
ciso8601==2.3.0
|
||||||
colorlog==6.7.0
|
colorlog==6.7.0
|
||||||
cpe==1.2.1
|
cpe==1.2.1
|
||||||
cryptography==41.0.1
|
cryptography==41.0.4
|
||||||
debugpy==1.6.7
|
debugpy==1.8.0
|
||||||
deepmerge==1.1.0
|
deepmerge==1.1.0
|
||||||
dirhash==0.2.1
|
dirhash==0.2.1
|
||||||
docker==6.1.3
|
docker==6.1.3
|
||||||
faust-cchardet==2.1.18
|
faust-cchardet==2.1.19
|
||||||
gitpython==3.1.31
|
gitpython==3.1.37
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
pulsectl==23.5.2
|
pulsectl==23.5.2
|
||||||
pyudev==0.24.1
|
pyudev==0.24.1
|
||||||
ruamel.yaml==0.17.21
|
PyYAML==6.0.1
|
||||||
securetar==2023.3.0
|
securetar==2023.3.0
|
||||||
sentry-sdk==1.25.1
|
sentry-sdk==1.31.0
|
||||||
voluptuous==0.13.1
|
voluptuous==0.13.1
|
||||||
dbus-fast==1.86.0
|
dbus-fast==2.10.0
|
||||||
typing_extensions==4.6.3
|
typing_extensions==4.8.0
|
||||||
|
@@ -1,16 +1,16 @@
|
|||||||
black==23.3.0
|
black==23.9.1
|
||||||
coverage==7.2.7
|
coverage==7.3.2
|
||||||
flake8-docstrings==1.7.0
|
flake8-docstrings==1.7.0
|
||||||
flake8==6.0.0
|
flake8==6.1.0
|
||||||
pre-commit==3.3.3
|
pre-commit==3.4.0
|
||||||
pydocstyle==6.3.0
|
pydocstyle==6.3.0
|
||||||
pylint==2.17.4
|
pylint==3.0.0
|
||||||
pytest-aiohttp==1.0.4
|
pytest-aiohttp==1.0.5
|
||||||
pytest-asyncio==0.18.3
|
pytest-asyncio==0.18.3
|
||||||
pytest-cov==4.1.0
|
pytest-cov==4.1.0
|
||||||
pytest-timeout==2.1.0
|
pytest-timeout==2.1.0
|
||||||
pytest==7.3.2
|
pytest==7.4.2
|
||||||
pyupgrade==3.6.0
|
pyupgrade==3.14.0
|
||||||
time-machine==2.9.0
|
time-machine==2.13.0
|
||||||
typing_extensions==4.6.3
|
typing_extensions==4.8.0
|
||||||
urllib3==2.0.3
|
urllib3==2.0.6
|
||||||
|
@@ -1,4 +0,0 @@
|
|||||||
-----BEGIN PUBLIC KEY-----
|
|
||||||
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE03LvYuz79GTJx4uKp3w6NrSe5JZI
|
|
||||||
iBtgzzYi0YQYtZO/r+xFpgDJEa0gLHkXtl94fpqrFiN89In83lzaszbZtA==
|
|
||||||
-----END PUBLIC KEY-----
|
|
@@ -1,8 +0,0 @@
|
|||||||
{
|
|
||||||
"currentcontext": {
|
|
||||||
"LcHost": "cas.codenotary.com",
|
|
||||||
"LcPort": "443"
|
|
||||||
},
|
|
||||||
"schemaversion": 3,
|
|
||||||
"users": null
|
|
||||||
}
|
|
@@ -1,5 +1,6 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Awaitable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
import tarfile
|
import tarfile
|
||||||
@@ -104,9 +105,13 @@ class AddonManager(CoreSysAttributes):
|
|||||||
|
|
||||||
# Start Add-ons sequential
|
# Start Add-ons sequential
|
||||||
# avoid issue on slow IO
|
# avoid issue on slow IO
|
||||||
|
# Config.wait_boot is deprecated. Until addons update with healthchecks,
|
||||||
|
# add a sleep task for it to keep the same minimum amount of wait time
|
||||||
|
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
|
||||||
for addon in tasks:
|
for addon in tasks:
|
||||||
try:
|
try:
|
||||||
await addon.start()
|
if start_task := await addon.start():
|
||||||
|
wait_boot.append(start_task)
|
||||||
except AddonsError as err:
|
except AddonsError as err:
|
||||||
# Check if there is an system/user issue
|
# Check if there is an system/user issue
|
||||||
if check_exception_chain(
|
if check_exception_chain(
|
||||||
@@ -121,7 +126,8 @@ class AddonManager(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
||||||
|
|
||||||
await asyncio.sleep(self.sys_config.wait_boot)
|
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
||||||
|
await asyncio.gather(*wait_boot, return_exceptions=True)
|
||||||
|
|
||||||
async def shutdown(self, stage: AddonStartup) -> None:
|
async def shutdown(self, stage: AddonStartup) -> None:
|
||||||
"""Shutdown addons."""
|
"""Shutdown addons."""
|
||||||
@@ -146,11 +152,14 @@ class AddonManager(CoreSysAttributes):
|
|||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
|
name="addon_manager_install",
|
||||||
conditions=ADDON_UPDATE_CONDITIONS,
|
conditions=ADDON_UPDATE_CONDITIONS,
|
||||||
on_condition=AddonsJobError,
|
on_condition=AddonsJobError,
|
||||||
)
|
)
|
||||||
async def install(self, slug: str) -> None:
|
async def install(self, slug: str) -> None:
|
||||||
"""Install an add-on."""
|
"""Install an add-on."""
|
||||||
|
self.sys_jobs.current.reference = slug
|
||||||
|
|
||||||
if slug in self.local:
|
if slug in self.local:
|
||||||
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
||||||
store = self.store.get(slug)
|
store = self.store.get(slug)
|
||||||
@@ -241,11 +250,20 @@ class AddonManager(CoreSysAttributes):
|
|||||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
|
name="addon_manager_update",
|
||||||
conditions=ADDON_UPDATE_CONDITIONS,
|
conditions=ADDON_UPDATE_CONDITIONS,
|
||||||
on_condition=AddonsJobError,
|
on_condition=AddonsJobError,
|
||||||
)
|
)
|
||||||
async def update(self, slug: str, backup: bool | None = False) -> None:
|
async def update(
|
||||||
"""Update add-on."""
|
self, slug: str, backup: bool | None = False
|
||||||
|
) -> Awaitable[None] | None:
|
||||||
|
"""Update add-on.
|
||||||
|
|
||||||
|
Returns a coroutine that completes when addon has state 'started' (see addon.start)
|
||||||
|
if addon is started after update. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
self.sys_jobs.current.reference = slug
|
||||||
|
|
||||||
if slug not in self.local:
|
if slug not in self.local:
|
||||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||||
addon = self.local[slug]
|
addon = self.local[slug]
|
||||||
@@ -288,10 +306,14 @@ class AddonManager(CoreSysAttributes):
|
|||||||
await addon.install_apparmor()
|
await addon.install_apparmor()
|
||||||
|
|
||||||
# restore state
|
# restore state
|
||||||
if last_state == AddonState.STARTED:
|
return (
|
||||||
await addon.start()
|
await addon.start()
|
||||||
|
if last_state in [AddonState.STARTED, AddonState.STARTUP]
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
|
name="addon_manager_rebuild",
|
||||||
conditions=[
|
conditions=[
|
||||||
JobCondition.FREE_SPACE,
|
JobCondition.FREE_SPACE,
|
||||||
JobCondition.INTERNET_HOST,
|
JobCondition.INTERNET_HOST,
|
||||||
@@ -299,8 +321,14 @@ class AddonManager(CoreSysAttributes):
|
|||||||
],
|
],
|
||||||
on_condition=AddonsJobError,
|
on_condition=AddonsJobError,
|
||||||
)
|
)
|
||||||
async def rebuild(self, slug: str) -> None:
|
async def rebuild(self, slug: str) -> Awaitable[None] | None:
|
||||||
"""Perform a rebuild of local build add-on."""
|
"""Perform a rebuild of local build add-on.
|
||||||
|
|
||||||
|
Returns a coroutine that completes when addon has state 'started' (see addon.start)
|
||||||
|
if addon is started after rebuild. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
self.sys_jobs.current.reference = slug
|
||||||
|
|
||||||
if slug not in self.local:
|
if slug not in self.local:
|
||||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||||
addon = self.local[slug]
|
addon = self.local[slug]
|
||||||
@@ -333,10 +361,14 @@ class AddonManager(CoreSysAttributes):
|
|||||||
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
||||||
|
|
||||||
# restore state
|
# restore state
|
||||||
if last_state == AddonState.STARTED:
|
return (
|
||||||
await addon.start()
|
await addon.start()
|
||||||
|
if last_state in [AddonState.STARTED, AddonState.STARTUP]
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
|
name="addon_manager_restore",
|
||||||
conditions=[
|
conditions=[
|
||||||
JobCondition.FREE_SPACE,
|
JobCondition.FREE_SPACE,
|
||||||
JobCondition.INTERNET_HOST,
|
JobCondition.INTERNET_HOST,
|
||||||
@@ -344,16 +376,26 @@ class AddonManager(CoreSysAttributes):
|
|||||||
],
|
],
|
||||||
on_condition=AddonsJobError,
|
on_condition=AddonsJobError,
|
||||||
)
|
)
|
||||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
async def restore(
|
||||||
"""Restore state of an add-on."""
|
self, slug: str, tar_file: tarfile.TarFile
|
||||||
|
) -> Awaitable[None] | None:
|
||||||
|
"""Restore state of an add-on.
|
||||||
|
|
||||||
|
Returns a coroutine that completes when addon has state 'started' (see addon.start)
|
||||||
|
if addon is started after restore. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
self.sys_jobs.current.reference = slug
|
||||||
|
|
||||||
if slug not in self.local:
|
if slug not in self.local:
|
||||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||||
addon = Addon(self.coresys, slug)
|
addon = Addon(self.coresys, slug)
|
||||||
|
had_ingress = False
|
||||||
else:
|
else:
|
||||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||||
addon = self.local[slug]
|
addon = self.local[slug]
|
||||||
|
had_ingress = addon.ingress_panel
|
||||||
|
|
||||||
await addon.restore(tar_file)
|
wait_for_start = await addon.restore(tar_file)
|
||||||
|
|
||||||
# Check if new
|
# Check if new
|
||||||
if slug not in self.local:
|
if slug not in self.local:
|
||||||
@@ -361,12 +403,17 @@ class AddonManager(CoreSysAttributes):
|
|||||||
self.local[slug] = addon
|
self.local[slug] = addon
|
||||||
|
|
||||||
# Update ingress
|
# Update ingress
|
||||||
if addon.with_ingress:
|
if had_ingress != addon.ingress_panel:
|
||||||
await self.sys_ingress.reload()
|
await self.sys_ingress.reload()
|
||||||
with suppress(HomeAssistantAPIError):
|
with suppress(HomeAssistantAPIError):
|
||||||
await self.sys_ingress.update_hass_panel(addon)
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
|
return wait_for_start
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_manager_repair",
|
||||||
|
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
|
||||||
|
)
|
||||||
async def repair(self) -> None:
|
async def repair(self) -> None:
|
||||||
"""Repair local add-ons."""
|
"""Repair local add-ons."""
|
||||||
needs_repair: list[Addon] = []
|
needs_repair: list[Addon] = []
|
||||||
@@ -404,6 +451,7 @@ class AddonManager(CoreSysAttributes):
|
|||||||
async def sync_dns(self) -> None:
|
async def sync_dns(self) -> None:
|
||||||
"""Sync add-ons DNS names."""
|
"""Sync add-ons DNS names."""
|
||||||
# Update hosts
|
# Update hosts
|
||||||
|
add_host_coros: list[Awaitable[None]] = []
|
||||||
for addon in self.installed:
|
for addon in self.installed:
|
||||||
try:
|
try:
|
||||||
if not await addon.instance.is_running():
|
if not await addon.instance.is_running():
|
||||||
@@ -418,10 +466,14 @@ class AddonManager(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
else:
|
else:
|
||||||
self.sys_plugins.dns.add_host(
|
add_host_coros.append(
|
||||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
self.sys_plugins.dns.add_host(
|
||||||
|
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
await asyncio.gather(*add_host_coros)
|
||||||
|
|
||||||
# Write hosts files
|
# Write hosts files
|
||||||
with suppress(CoreDNSError):
|
with suppress(CoreDNSError):
|
||||||
self.sys_plugins.dns.write_hosts()
|
await self.sys_plugins.dns.write_hosts()
|
||||||
|
@@ -99,6 +99,7 @@ RE_WATCHDOG = re.compile(
|
|||||||
)
|
)
|
||||||
|
|
||||||
WATCHDOG_TIMEOUT = aiohttp.ClientTimeout(total=10)
|
WATCHDOG_TIMEOUT = aiohttp.ClientTimeout(total=10)
|
||||||
|
STARTUP_TIMEOUT = 120
|
||||||
|
|
||||||
_OPTIONS_MERGER: Final = Merger(
|
_OPTIONS_MERGER: Final = Merger(
|
||||||
type_strategies=[(dict, ["merge"])],
|
type_strategies=[(dict, ["merge"])],
|
||||||
@@ -106,6 +107,14 @@ _OPTIONS_MERGER: Final = Merger(
|
|||||||
type_conflict_strategies=["override"],
|
type_conflict_strategies=["override"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Backups just need to know if an addon was running or not
|
||||||
|
# Map other addon states to those two
|
||||||
|
_MAP_ADDON_STATE = {
|
||||||
|
AddonState.STARTUP: AddonState.STARTED,
|
||||||
|
AddonState.ERROR: AddonState.STOPPED,
|
||||||
|
AddonState.UNKNOWN: AddonState.STOPPED,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class Addon(AddonModel):
|
class Addon(AddonModel):
|
||||||
"""Hold data for add-on inside Supervisor."""
|
"""Hold data for add-on inside Supervisor."""
|
||||||
@@ -119,54 +128,8 @@ class Addon(AddonModel):
|
|||||||
self.sys_hardware.helper.last_boot != self.sys_config.last_boot
|
self.sys_hardware.helper.last_boot != self.sys_config.last_boot
|
||||||
)
|
)
|
||||||
self._listeners: list[EventListener] = []
|
self._listeners: list[EventListener] = []
|
||||||
|
self._startup_event = asyncio.Event()
|
||||||
@Job(
|
self._startup_task: asyncio.Task | None = None
|
||||||
name=f"addon_{slug}_restart_after_problem",
|
|
||||||
limit=JobExecutionLimit.THROTTLE_RATE_LIMIT,
|
|
||||||
throttle_period=WATCHDOG_THROTTLE_PERIOD,
|
|
||||||
throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS,
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def restart_after_problem(addon: Addon, state: ContainerState):
|
|
||||||
"""Restart unhealthy or failed addon."""
|
|
||||||
attempts = 0
|
|
||||||
while await addon.instance.current_state() == state:
|
|
||||||
if not addon.in_progress:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Watchdog found addon %s is %s, restarting...",
|
|
||||||
addon.name,
|
|
||||||
state.value,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
if state == ContainerState.FAILED:
|
|
||||||
# Ensure failed container is removed before attempting reanimation
|
|
||||||
if attempts == 0:
|
|
||||||
with suppress(DockerError):
|
|
||||||
await addon.instance.stop(remove_container=True)
|
|
||||||
|
|
||||||
await addon.start()
|
|
||||||
else:
|
|
||||||
await addon.restart()
|
|
||||||
except AddonsError as err:
|
|
||||||
attempts = attempts + 1
|
|
||||||
_LOGGER.error(
|
|
||||||
"Watchdog restart of addon %s failed!", addon.name
|
|
||||||
)
|
|
||||||
capture_exception(err)
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
if attempts >= WATCHDOG_MAX_ATTEMPTS:
|
|
||||||
_LOGGER.critical(
|
|
||||||
"Watchdog cannot restart addon %s, failed all %s attempts",
|
|
||||||
addon.name,
|
|
||||||
attempts,
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
await asyncio.sleep(WATCHDOG_RETRY_SECONDS)
|
|
||||||
|
|
||||||
self._restart_after_problem = restart_after_problem
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""Return internal representation."""
|
"""Return internal representation."""
|
||||||
@@ -182,7 +145,13 @@ class Addon(AddonModel):
|
|||||||
"""Set the add-on into new state."""
|
"""Set the add-on into new state."""
|
||||||
if self._state == new_state:
|
if self._state == new_state:
|
||||||
return
|
return
|
||||||
|
old_state = self._state
|
||||||
self._state = new_state
|
self._state = new_state
|
||||||
|
|
||||||
|
# Signal listeners about addon state change
|
||||||
|
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
|
||||||
|
self._startup_event.set()
|
||||||
|
|
||||||
self.sys_homeassistant.websocket.send_message(
|
self.sys_homeassistant.websocket.send_message(
|
||||||
{
|
{
|
||||||
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
|
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
|
||||||
@@ -592,6 +561,12 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
async def unload(self) -> None:
|
async def unload(self) -> None:
|
||||||
"""Unload add-on and remove data."""
|
"""Unload add-on and remove data."""
|
||||||
|
if self._startup_task:
|
||||||
|
# If we were waiting on startup, cancel that and let the task finish before proceeding
|
||||||
|
self._startup_task.cancel(f"Removing add-on {self.name} from system")
|
||||||
|
with suppress(asyncio.CancelledError):
|
||||||
|
await self._startup_task
|
||||||
|
|
||||||
for listener in self._listeners:
|
for listener in self._listeners:
|
||||||
self.sys_bus.remove_listener(listener)
|
self.sys_bus.remove_listener(listener)
|
||||||
|
|
||||||
@@ -680,11 +655,32 @@ class Addon(AddonModel):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def start(self) -> None:
|
async def _wait_for_startup(self) -> None:
|
||||||
"""Set options and start add-on."""
|
"""Wait for startup event to be set with timeout."""
|
||||||
|
try:
|
||||||
|
self._startup_task = self.sys_create_task(self._startup_event.wait())
|
||||||
|
await asyncio.wait_for(self._startup_task, STARTUP_TIMEOUT)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Timeout while waiting for addon %s to start, took more then %s seconds",
|
||||||
|
self.name,
|
||||||
|
STARTUP_TIMEOUT,
|
||||||
|
)
|
||||||
|
except asyncio.CancelledError as err:
|
||||||
|
_LOGGER.info("Wait for addon startup task cancelled due to: %s", err)
|
||||||
|
finally:
|
||||||
|
self._startup_task = None
|
||||||
|
|
||||||
|
async def start(self) -> Awaitable[None]:
|
||||||
|
"""Set options and start add-on.
|
||||||
|
|
||||||
|
Returns a coroutine that completes when addon has state 'started'.
|
||||||
|
For addons with a healthcheck, that is when they become healthy or unhealthy.
|
||||||
|
Addons without a healthcheck have state 'started' immediately.
|
||||||
|
"""
|
||||||
if await self.instance.is_running():
|
if await self.instance.is_running():
|
||||||
_LOGGER.warning("%s is already running!", self.slug)
|
_LOGGER.warning("%s is already running!", self.slug)
|
||||||
return
|
return self._wait_for_startup()
|
||||||
|
|
||||||
# Access Token
|
# Access Token
|
||||||
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||||
@@ -698,12 +694,15 @@ class Addon(AddonModel):
|
|||||||
self.write_pulse()
|
self.write_pulse()
|
||||||
|
|
||||||
# Start Add-on
|
# Start Add-on
|
||||||
|
self._startup_event.clear()
|
||||||
try:
|
try:
|
||||||
await self.instance.run()
|
await self.instance.run()
|
||||||
except DockerError as err:
|
except DockerError as err:
|
||||||
self.state = AddonState.ERROR
|
self.state = AddonState.ERROR
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
return self._wait_for_startup()
|
||||||
|
|
||||||
async def stop(self) -> None:
|
async def stop(self) -> None:
|
||||||
"""Stop add-on."""
|
"""Stop add-on."""
|
||||||
self._manual_stop = True
|
self._manual_stop = True
|
||||||
@@ -713,11 +712,14 @@ class Addon(AddonModel):
|
|||||||
self.state = AddonState.ERROR
|
self.state = AddonState.ERROR
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
async def restart(self) -> None:
|
async def restart(self) -> Awaitable[None]:
|
||||||
"""Restart add-on."""
|
"""Restart add-on.
|
||||||
|
|
||||||
|
Returns a coroutine that completes when addon has state 'started' (see start).
|
||||||
|
"""
|
||||||
with suppress(AddonsError):
|
with suppress(AddonsError):
|
||||||
await self.stop()
|
await self.stop()
|
||||||
await self.start()
|
return await self.start()
|
||||||
|
|
||||||
def logs(self) -> Awaitable[bytes]:
|
def logs(self) -> Awaitable[bytes]:
|
||||||
"""Return add-ons log output.
|
"""Return add-ons log output.
|
||||||
@@ -741,10 +743,7 @@ class Addon(AddonModel):
|
|||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
async def write_stdin(self, data) -> None:
|
async def write_stdin(self, data) -> None:
|
||||||
"""Write data to add-on stdin.
|
"""Write data to add-on stdin."""
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
if not self.with_stdin:
|
if not self.with_stdin:
|
||||||
raise AddonsNotSupportedError(
|
raise AddonsNotSupportedError(
|
||||||
f"Add-on {self.slug} does not support writing to stdin!", _LOGGER.error
|
f"Add-on {self.slug} does not support writing to stdin!", _LOGGER.error
|
||||||
@@ -772,9 +771,50 @@ class Addon(AddonModel):
|
|||||||
_LOGGER.error,
|
_LOGGER.error,
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
async def backup(self, tar_file: tarfile.TarFile) -> None:
|
@Job(name="addon_begin_backup")
|
||||||
"""Backup state of an add-on."""
|
async def begin_backup(self) -> bool:
|
||||||
is_running = await self.is_running()
|
"""Execute pre commands or stop addon if necessary.
|
||||||
|
|
||||||
|
Returns value of `is_running`. Caller should not call `end_backup` if return is false.
|
||||||
|
"""
|
||||||
|
if not await self.is_running():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.backup_mode == AddonBackupMode.COLD:
|
||||||
|
_LOGGER.info("Shutdown add-on %s for cold backup", self.slug)
|
||||||
|
try:
|
||||||
|
await self.instance.stop()
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
elif self.backup_pre is not None:
|
||||||
|
await self._backup_command(self.backup_pre)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@Job(name="addon_end_backup")
|
||||||
|
async def end_backup(self) -> Awaitable[None] | None:
|
||||||
|
"""Execute post commands or restart addon if necessary.
|
||||||
|
|
||||||
|
Returns a coroutine that completes when addon has state 'started' (see start)
|
||||||
|
for cold backup. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
if self.backup_mode is AddonBackupMode.COLD:
|
||||||
|
_LOGGER.info("Starting add-on %s again", self.slug)
|
||||||
|
return await self.start()
|
||||||
|
|
||||||
|
if self.backup_post is not None:
|
||||||
|
await self._backup_command(self.backup_post)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@Job(name="addon_backup")
|
||||||
|
async def backup(self, tar_file: tarfile.TarFile) -> Awaitable[None] | None:
|
||||||
|
"""Backup state of an add-on.
|
||||||
|
|
||||||
|
Returns a coroutine that completes when addon has state 'started' (see start)
|
||||||
|
for cold backup. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
wait_for_start: Awaitable[None] | None = None
|
||||||
|
|
||||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||||
temp_path = Path(temp)
|
temp_path = Path(temp)
|
||||||
@@ -790,7 +830,7 @@ class Addon(AddonModel):
|
|||||||
ATTR_USER: self.persist,
|
ATTR_USER: self.persist,
|
||||||
ATTR_SYSTEM: self.data,
|
ATTR_SYSTEM: self.data,
|
||||||
ATTR_VERSION: self.version,
|
ATTR_VERSION: self.version,
|
||||||
ATTR_STATE: self.state,
|
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
||||||
}
|
}
|
||||||
|
|
||||||
# Store local configs/state
|
# Store local configs/state
|
||||||
@@ -826,16 +866,7 @@ class Addon(AddonModel):
|
|||||||
arcname="data",
|
arcname="data",
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
is_running = await self.begin_backup()
|
||||||
is_running
|
|
||||||
and self.backup_mode == AddonBackupMode.HOT
|
|
||||||
and self.backup_pre is not None
|
|
||||||
):
|
|
||||||
await self._backup_command(self.backup_pre)
|
|
||||||
elif is_running and self.backup_mode == AddonBackupMode.COLD:
|
|
||||||
_LOGGER.info("Shutdown add-on %s for cold backup", self.slug)
|
|
||||||
await self.instance.stop()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||||
await self.sys_run_in_executor(_write_tarfile)
|
await self.sys_run_in_executor(_write_tarfile)
|
||||||
@@ -844,20 +875,19 @@ class Addon(AddonModel):
|
|||||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||||
) from err
|
) from err
|
||||||
finally:
|
finally:
|
||||||
if (
|
if is_running:
|
||||||
is_running
|
wait_for_start = await self.end_backup()
|
||||||
and self.backup_mode == AddonBackupMode.HOT
|
|
||||||
and self.backup_post is not None
|
|
||||||
):
|
|
||||||
await self._backup_command(self.backup_post)
|
|
||||||
elif is_running and self.backup_mode is AddonBackupMode.COLD:
|
|
||||||
_LOGGER.info("Starting add-on %s again", self.slug)
|
|
||||||
await self.start()
|
|
||||||
|
|
||||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||||
|
return wait_for_start
|
||||||
|
|
||||||
async def restore(self, tar_file: tarfile.TarFile) -> None:
|
async def restore(self, tar_file: tarfile.TarFile) -> Awaitable[None] | None:
|
||||||
"""Restore state of an add-on."""
|
"""Restore state of an add-on.
|
||||||
|
|
||||||
|
Returns a coroutine that completes when addon has state 'started' (see start)
|
||||||
|
if addon is started after restore. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
wait_for_start: Awaitable[None] | None = None
|
||||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||||
# extract backup
|
# extract backup
|
||||||
def _extract_tarfile():
|
def _extract_tarfile():
|
||||||
@@ -901,6 +931,11 @@ class Addon(AddonModel):
|
|||||||
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
|
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Stop it first if its running
|
||||||
|
if await self.instance.is_running():
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.stop()
|
||||||
|
|
||||||
# Check version / restore image
|
# Check version / restore image
|
||||||
version = data[ATTR_VERSION]
|
version = data[ATTR_VERSION]
|
||||||
if not await self.instance.exists():
|
if not await self.instance.exists():
|
||||||
@@ -918,9 +953,6 @@ class Addon(AddonModel):
|
|||||||
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
||||||
with suppress(DockerError):
|
with suppress(DockerError):
|
||||||
await self.instance.update(version, restore_image)
|
await self.instance.update(version, restore_image)
|
||||||
else:
|
|
||||||
with suppress(DockerError):
|
|
||||||
await self.instance.stop()
|
|
||||||
|
|
||||||
# Restore data
|
# Restore data
|
||||||
def _restore_data():
|
def _restore_data():
|
||||||
@@ -958,9 +990,10 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
# Run add-on
|
# Run add-on
|
||||||
if data[ATTR_STATE] == AddonState.STARTED:
|
if data[ATTR_STATE] == AddonState.STARTED:
|
||||||
return await self.start()
|
wait_for_start = await self.start()
|
||||||
|
|
||||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||||
|
return wait_for_start
|
||||||
|
|
||||||
def check_trust(self) -> Awaitable[None]:
|
def check_trust(self) -> Awaitable[None]:
|
||||||
"""Calculate Addon docker content trust.
|
"""Calculate Addon docker content trust.
|
||||||
@@ -969,17 +1002,64 @@ class Addon(AddonModel):
|
|||||||
"""
|
"""
|
||||||
return self.instance.check_trust()
|
return self.instance.check_trust()
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_restart_after_problem",
|
||||||
|
limit=JobExecutionLimit.GROUP_THROTTLE_RATE_LIMIT,
|
||||||
|
throttle_period=WATCHDOG_THROTTLE_PERIOD,
|
||||||
|
throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def _restart_after_problem(self, state: ContainerState):
|
||||||
|
"""Restart unhealthy or failed addon."""
|
||||||
|
attempts = 0
|
||||||
|
while await self.instance.current_state() == state:
|
||||||
|
if not self.in_progress:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Watchdog found addon %s is %s, restarting...",
|
||||||
|
self.name,
|
||||||
|
state,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
if state == ContainerState.FAILED:
|
||||||
|
# Ensure failed container is removed before attempting reanimation
|
||||||
|
if attempts == 0:
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.stop(remove_container=True)
|
||||||
|
|
||||||
|
await (await self.start())
|
||||||
|
else:
|
||||||
|
await (await self.restart())
|
||||||
|
except AddonsError as err:
|
||||||
|
attempts = attempts + 1
|
||||||
|
_LOGGER.error("Watchdog restart of addon %s failed!", self.name)
|
||||||
|
capture_exception(err)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
if attempts >= WATCHDOG_MAX_ATTEMPTS:
|
||||||
|
_LOGGER.critical(
|
||||||
|
"Watchdog cannot restart addon %s, failed all %s attempts",
|
||||||
|
self.name,
|
||||||
|
attempts,
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
await asyncio.sleep(WATCHDOG_RETRY_SECONDS)
|
||||||
|
|
||||||
async def container_state_changed(self, event: DockerContainerStateEvent) -> None:
|
async def container_state_changed(self, event: DockerContainerStateEvent) -> None:
|
||||||
"""Set addon state from container state."""
|
"""Set addon state from container state."""
|
||||||
if event.name != self.instance.name:
|
if event.name != self.instance.name:
|
||||||
return
|
return
|
||||||
|
|
||||||
if event.state in [
|
if event.state == ContainerState.RUNNING:
|
||||||
ContainerState.RUNNING,
|
self._manual_stop = False
|
||||||
|
self.state = (
|
||||||
|
AddonState.STARTUP if self.instance.healthcheck else AddonState.STARTED
|
||||||
|
)
|
||||||
|
elif event.state in [
|
||||||
ContainerState.HEALTHY,
|
ContainerState.HEALTHY,
|
||||||
ContainerState.UNHEALTHY,
|
ContainerState.UNHEALTHY,
|
||||||
]:
|
]:
|
||||||
self._manual_stop = False
|
|
||||||
self.state = AddonState.STARTED
|
self.state = AddonState.STARTED
|
||||||
elif event.state == ContainerState.STOPPED:
|
elif event.state == ContainerState.STOPPED:
|
||||||
self.state = AddonState.STOPPED
|
self.state = AddonState.STOPPED
|
||||||
@@ -1000,4 +1080,4 @@ class Addon(AddonModel):
|
|||||||
ContainerState.STOPPED,
|
ContainerState.STOPPED,
|
||||||
ContainerState.UNHEALTHY,
|
ContainerState.UNHEALTHY,
|
||||||
]:
|
]:
|
||||||
await self._restart_after_problem(self, event.state)
|
await self._restart_after_problem(event.state)
|
||||||
|
@@ -1,11 +1,11 @@
|
|||||||
"""Add-on static data."""
|
"""Add-on static data."""
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
from ..jobs.const import JobCondition
|
from ..jobs.const import JobCondition
|
||||||
|
|
||||||
|
|
||||||
class AddonBackupMode(str, Enum):
|
class AddonBackupMode(StrEnum):
|
||||||
"""Backup mode of an Add-on."""
|
"""Backup mode of an Add-on."""
|
||||||
|
|
||||||
HOT = "hot"
|
HOT = "hot"
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from collections import defaultdict
|
||||||
from collections.abc import Awaitable, Callable
|
from collections.abc import Awaitable, Callable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
@@ -79,9 +80,11 @@ from ..const import (
|
|||||||
AddonStage,
|
AddonStage,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys
|
||||||
from ..docker.const import Capabilities
|
from ..docker.const import Capabilities
|
||||||
from ..exceptions import AddonsNotSupportedError
|
from ..exceptions import AddonsNotSupportedError
|
||||||
|
from ..jobs.const import JOB_GROUP_ADDON
|
||||||
|
from ..jobs.job_group import JobGroup
|
||||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
|
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
|
||||||
from .options import AddonOptions, UiOptions
|
from .options import AddonOptions, UiOptions
|
||||||
from .validate import RE_SERVICE, RE_VOLUME
|
from .validate import RE_SERVICE, RE_VOLUME
|
||||||
@@ -91,12 +94,14 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
Data = dict[str, Any]
|
Data = dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
class AddonModel(CoreSysAttributes, ABC):
|
class AddonModel(JobGroup, ABC):
|
||||||
"""Add-on Data layout."""
|
"""Add-on Data layout."""
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys, slug: str):
|
def __init__(self, coresys: CoreSys, slug: str):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
self.coresys: CoreSys = coresys
|
super().__init__(
|
||||||
|
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
||||||
|
)
|
||||||
self.slug: str = slug
|
self.slug: str = slug
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -673,10 +678,10 @@ class AddonModel(CoreSysAttributes, ABC):
|
|||||||
"""Uninstall this add-on."""
|
"""Uninstall this add-on."""
|
||||||
return self.sys_addons.uninstall(self.slug)
|
return self.sys_addons.uninstall(self.slug)
|
||||||
|
|
||||||
def update(self, backup: bool | None = False) -> Awaitable[None]:
|
def update(self, backup: bool | None = False) -> Awaitable[Awaitable[None] | None]:
|
||||||
"""Update this add-on."""
|
"""Update this add-on."""
|
||||||
return self.sys_addons.update(self.slug, backup=backup)
|
return self.sys_addons.update(self.slug, backup=backup)
|
||||||
|
|
||||||
def rebuild(self) -> Awaitable[None]:
|
def rebuild(self) -> Awaitable[Awaitable[None] | None]:
|
||||||
"""Rebuild this add-on."""
|
"""Rebuild this add-on."""
|
||||||
return self.sys_addons.rebuild(self.slug)
|
return self.sys_addons.rebuild(self.slug)
|
||||||
|
@@ -143,6 +143,8 @@ RE_MACHINE = re.compile(
|
|||||||
r"|raspberrypi3"
|
r"|raspberrypi3"
|
||||||
r"|raspberrypi4-64"
|
r"|raspberrypi4-64"
|
||||||
r"|raspberrypi4"
|
r"|raspberrypi4"
|
||||||
|
r"|yellow"
|
||||||
|
r"|green"
|
||||||
r"|tinker"
|
r"|tinker"
|
||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
@@ -175,6 +177,20 @@ def _warn_addon_config(config: dict[str, Any]):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
invalid_services: list[str] = []
|
||||||
|
for service in config.get(ATTR_DISCOVERY, []):
|
||||||
|
try:
|
||||||
|
valid_discovery_service(service)
|
||||||
|
except vol.Invalid:
|
||||||
|
invalid_services.append(service)
|
||||||
|
|
||||||
|
if invalid_services:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s",
|
||||||
|
", ".join(invalid_services),
|
||||||
|
name,
|
||||||
|
)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
@@ -196,9 +212,9 @@ def _migrate_addon_config(protocol=False):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
if value == "before":
|
if value == "before":
|
||||||
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
|
config[ATTR_STARTUP] = AddonStartup.SERVICES
|
||||||
elif value == "after":
|
elif value == "after":
|
||||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
|
config[ATTR_STARTUP] = AddonStartup.APPLICATION
|
||||||
|
|
||||||
# UART 2021-01-20
|
# UART 2021-01-20
|
||||||
if "auto_uart" in config:
|
if "auto_uart" in config:
|
||||||
@@ -313,7 +329,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
vol.Optional(ATTR_DISCOVERY): [str],
|
||||||
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
|
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
|
||||||
vol.Optional(ATTR_BACKUP_PRE): str,
|
vol.Optional(ATTR_BACKUP_PRE): str,
|
||||||
vol.Optional(ATTR_BACKUP_POST): str,
|
vol.Optional(ATTR_BACKUP_POST): str,
|
||||||
|
@@ -186,6 +186,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
# Boards endpoints
|
# Boards endpoints
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
|
web.get("/os/boards/green", api_os.boards_green_info),
|
||||||
|
web.post("/os/boards/green", api_os.boards_green_options),
|
||||||
web.get("/os/boards/yellow", api_os.boards_yellow_info),
|
web.get("/os/boards/yellow", api_os.boards_yellow_info),
|
||||||
web.post("/os/boards/yellow", api_os.boards_yellow_options),
|
web.post("/os/boards/yellow", api_os.boards_yellow_options),
|
||||||
web.get("/os/boards/{board}", api_os.boards_other_info),
|
web.get("/os/boards/{board}", api_os.boards_other_info),
|
||||||
@@ -485,6 +487,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/backups/info", api_backups.info),
|
web.get("/backups/info", api_backups.info),
|
||||||
web.post("/backups/options", api_backups.options),
|
web.post("/backups/options", api_backups.options),
|
||||||
web.post("/backups/reload", api_backups.reload),
|
web.post("/backups/reload", api_backups.reload),
|
||||||
|
web.post("/backups/freeze", api_backups.freeze),
|
||||||
|
web.post("/backups/thaw", api_backups.thaw),
|
||||||
web.post("/backups/new/full", api_backups.backup_full),
|
web.post("/backups/new/full", api_backups.backup_full),
|
||||||
web.post("/backups/new/partial", api_backups.backup_partial),
|
web.post("/backups/new/partial", api_backups.backup_partial),
|
||||||
web.post("/backups/new/upload", api_backups.upload),
|
web.post("/backups/new/upload", api_backups.upload),
|
||||||
|
@@ -391,10 +391,11 @@ class APIAddons(CoreSysAttributes):
|
|||||||
return asyncio.shield(addon.uninstall())
|
return asyncio.shield(addon.uninstall())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request: web.Request) -> Awaitable[None]:
|
async def start(self, request: web.Request) -> None:
|
||||||
"""Start add-on."""
|
"""Start add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.start())
|
if start_task := await asyncio.shield(addon.start()):
|
||||||
|
await start_task
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
@@ -403,16 +404,18 @@ class APIAddons(CoreSysAttributes):
|
|||||||
return asyncio.shield(addon.stop())
|
return asyncio.shield(addon.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
async def restart(self, request: web.Request) -> None:
|
||||||
"""Restart add-on."""
|
"""Restart add-on."""
|
||||||
addon: Addon = self._extract_addon(request)
|
addon: Addon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.restart())
|
if start_task := await asyncio.shield(addon.restart()):
|
||||||
|
await start_task
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
async def rebuild(self, request: web.Request) -> None:
|
||||||
"""Rebuild local build add-on."""
|
"""Rebuild local build add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.rebuild())
|
if start_task := await asyncio.shield(addon.rebuild()):
|
||||||
|
await start_task
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
@@ -1,11 +1,11 @@
|
|||||||
"""Init file for Supervisor Audio RESTful API."""
|
"""Init file for Supervisor Audio RESTful API."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
|
from dataclasses import asdict
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import attr
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
@@ -76,15 +76,11 @@ class APIAudio(CoreSysAttributes):
|
|||||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
|
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
|
||||||
ATTR_HOST: str(self.sys_docker.network.audio),
|
ATTR_HOST: str(self.sys_docker.network.audio),
|
||||||
ATTR_AUDIO: {
|
ATTR_AUDIO: {
|
||||||
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
|
ATTR_CARD: [asdict(card) for card in self.sys_host.sound.cards],
|
||||||
ATTR_INPUT: [
|
ATTR_INPUT: [asdict(stream) for stream in self.sys_host.sound.inputs],
|
||||||
attr.asdict(stream) for stream in self.sys_host.sound.inputs
|
ATTR_OUTPUT: [asdict(stream) for stream in self.sys_host.sound.outputs],
|
||||||
],
|
|
||||||
ATTR_OUTPUT: [
|
|
||||||
attr.asdict(stream) for stream in self.sys_host.sound.outputs
|
|
||||||
],
|
|
||||||
ATTR_APPLICATION: [
|
ATTR_APPLICATION: [
|
||||||
attr.asdict(stream) for stream in self.sys_host.sound.applications
|
asdict(stream) for stream in self.sys_host.sound.applications
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@@ -28,6 +28,7 @@ from ..const import (
|
|||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
ATTR_SUPERVISOR_VERSION,
|
ATTR_SUPERVISOR_VERSION,
|
||||||
|
ATTR_TIMEOUT,
|
||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
)
|
)
|
||||||
@@ -80,6 +81,12 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
SCHEMA_FREEZE = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class APIBackups(CoreSysAttributes):
|
class APIBackups(CoreSysAttributes):
|
||||||
"""Handle RESTful API for backups functions."""
|
"""Handle RESTful API for backups functions."""
|
||||||
@@ -142,7 +149,7 @@ class APIBackups(CoreSysAttributes):
|
|||||||
self.sys_backups.save_data()
|
self.sys_backups.save_data()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request):
|
async def reload(self, _):
|
||||||
"""Reload backup list."""
|
"""Reload backup list."""
|
||||||
await asyncio.shield(self.sys_backups.reload())
|
await asyncio.shield(self.sys_backups.reload())
|
||||||
return True
|
return True
|
||||||
@@ -233,6 +240,17 @@ class APIBackups(CoreSysAttributes):
|
|||||||
|
|
||||||
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def freeze(self, request):
|
||||||
|
"""Initiate manual freeze for external backup."""
|
||||||
|
body = await api_validate(SCHEMA_FREEZE, request)
|
||||||
|
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def thaw(self, request):
|
||||||
|
"""Begin thaw after manual freeze."""
|
||||||
|
await self.sys_backups.thaw_all()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove(self, request):
|
async def remove(self, request):
|
||||||
"""Remove a backup."""
|
"""Remove a backup."""
|
||||||
|
@@ -23,7 +23,6 @@ ATTR_CONNECTION_BUS = "connection_bus"
|
|||||||
ATTR_DATA_DISK = "data_disk"
|
ATTR_DATA_DISK = "data_disk"
|
||||||
ATTR_DEVICE = "device"
|
ATTR_DEVICE = "device"
|
||||||
ATTR_DEV_PATH = "dev_path"
|
ATTR_DEV_PATH = "dev_path"
|
||||||
ATTR_DISK_LED = "disk_led"
|
|
||||||
ATTR_DISKS = "disks"
|
ATTR_DISKS = "disks"
|
||||||
ATTR_DRIVES = "drives"
|
ATTR_DRIVES = "drives"
|
||||||
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
||||||
@@ -31,8 +30,8 @@ ATTR_DT_UTC = "dt_utc"
|
|||||||
ATTR_EJECTABLE = "ejectable"
|
ATTR_EJECTABLE = "ejectable"
|
||||||
ATTR_FALLBACK = "fallback"
|
ATTR_FALLBACK = "fallback"
|
||||||
ATTR_FILESYSTEMS = "filesystems"
|
ATTR_FILESYSTEMS = "filesystems"
|
||||||
ATTR_HEARTBEAT_LED = "heartbeat_led"
|
|
||||||
ATTR_IDENTIFIERS = "identifiers"
|
ATTR_IDENTIFIERS = "identifiers"
|
||||||
|
ATTR_JOBS = "jobs"
|
||||||
ATTR_LLMNR = "llmnr"
|
ATTR_LLMNR = "llmnr"
|
||||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||||
ATTR_MDNS = "mdns"
|
ATTR_MDNS = "mdns"
|
||||||
@@ -40,7 +39,6 @@ ATTR_MODEL = "model"
|
|||||||
ATTR_MOUNTS = "mounts"
|
ATTR_MOUNTS = "mounts"
|
||||||
ATTR_MOUNT_POINTS = "mount_points"
|
ATTR_MOUNT_POINTS = "mount_points"
|
||||||
ATTR_PANEL_PATH = "panel_path"
|
ATTR_PANEL_PATH = "panel_path"
|
||||||
ATTR_POWER_LED = "power_led"
|
|
||||||
ATTR_REMOVABLE = "removable"
|
ATTR_REMOVABLE = "removable"
|
||||||
ATTR_REVISION = "revision"
|
ATTR_REVISION = "revision"
|
||||||
ATTR_SEAT = "seat"
|
ATTR_SEAT = "seat"
|
||||||
@@ -48,6 +46,7 @@ ATTR_SIGNED = "signed"
|
|||||||
ATTR_STARTUP_TIME = "startup_time"
|
ATTR_STARTUP_TIME = "startup_time"
|
||||||
ATTR_SUBSYSTEM = "subsystem"
|
ATTR_SUBSYSTEM = "subsystem"
|
||||||
ATTR_SYSFS = "sysfs"
|
ATTR_SYSFS = "sysfs"
|
||||||
|
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
||||||
ATTR_TIME_DETECTED = "time_detected"
|
ATTR_TIME_DETECTED = "time_detected"
|
||||||
ATTR_UPDATE_TYPE = "update_type"
|
ATTR_UPDATE_TYPE = "update_type"
|
||||||
ATTR_USE_NTP = "use_ntp"
|
ATTR_USE_NTP = "use_ntp"
|
||||||
|
@@ -1,6 +1,9 @@
|
|||||||
"""Init file for Supervisor network RESTful API."""
|
"""Init file for Supervisor network RESTful API."""
|
||||||
|
import logging
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..addons.addon import Addon
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDON,
|
ATTR_ADDON,
|
||||||
ATTR_CONFIG,
|
ATTR_CONFIG,
|
||||||
@@ -9,15 +12,18 @@ from ..const import (
|
|||||||
ATTR_SERVICES,
|
ATTR_SERVICES,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
|
AddonState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..discovery.validate import valid_discovery_service
|
from ..discovery.validate import valid_discovery_service
|
||||||
from ..exceptions import APIError, APIForbidden
|
from ..exceptions import APIError, APIForbidden
|
||||||
from .utils import api_process, api_validate, require_home_assistant
|
from .utils import api_process, api_validate, require_home_assistant
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCHEMA_DISCOVERY = vol.Schema(
|
SCHEMA_DISCOVERY = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
vol.Required(ATTR_SERVICE): str,
|
||||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -36,19 +42,19 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
@require_home_assistant
|
@require_home_assistant
|
||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Show register services."""
|
"""Show registered and available services."""
|
||||||
|
|
||||||
# Get available discovery
|
# Get available discovery
|
||||||
discovery = []
|
discovery = [
|
||||||
for message in self.sys_discovery.list_messages:
|
{
|
||||||
discovery.append(
|
ATTR_ADDON: message.addon,
|
||||||
{
|
ATTR_SERVICE: message.service,
|
||||||
ATTR_ADDON: message.addon,
|
ATTR_UUID: message.uuid,
|
||||||
ATTR_SERVICE: message.service,
|
ATTR_CONFIG: message.config,
|
||||||
ATTR_UUID: message.uuid,
|
}
|
||||||
ATTR_CONFIG: message.config,
|
for message in self.sys_discovery.list_messages
|
||||||
}
|
if (addon := self.sys_addons.get(message.addon, local_only=True))
|
||||||
)
|
and addon.state == AddonState.STARTED
|
||||||
|
]
|
||||||
|
|
||||||
# Get available services/add-ons
|
# Get available services/add-ons
|
||||||
services = {}
|
services = {}
|
||||||
@@ -62,11 +68,28 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
async def set_discovery(self, request):
|
async def set_discovery(self, request):
|
||||||
"""Write data into a discovery pipeline."""
|
"""Write data into a discovery pipeline."""
|
||||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||||
addon = request[REQUEST_FROM]
|
addon: Addon = request[REQUEST_FROM]
|
||||||
|
service = body[ATTR_SERVICE]
|
||||||
|
|
||||||
|
try:
|
||||||
|
valid_discovery_service(service)
|
||||||
|
except vol.Invalid:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on",
|
||||||
|
service,
|
||||||
|
addon.name,
|
||||||
|
)
|
||||||
|
|
||||||
# Access?
|
# Access?
|
||||||
if body[ATTR_SERVICE] not in addon.discovery:
|
if body[ATTR_SERVICE] not in addon.discovery:
|
||||||
raise APIForbidden("Can't use discovery!")
|
_LOGGER.error(
|
||||||
|
"Add-on %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the add-on",
|
||||||
|
addon.name,
|
||||||
|
service,
|
||||||
|
)
|
||||||
|
raise APIForbidden(
|
||||||
|
"Add-ons must list services they provide via discovery in their config!"
|
||||||
|
)
|
||||||
|
|
||||||
# Process discovery message
|
# Process discovery message
|
||||||
message = self.sys_discovery.send(addon, **body)
|
message = self.sys_discovery.send(addon, **body)
|
||||||
|
@@ -21,11 +21,18 @@ from ..const import (
|
|||||||
ATTR_ICON,
|
ATTR_ICON,
|
||||||
ATTR_PANELS,
|
ATTR_PANELS,
|
||||||
ATTR_SESSION,
|
ATTR_SESSION,
|
||||||
|
ATTR_SESSION_DATA_USER_ID,
|
||||||
ATTR_TITLE,
|
ATTR_TITLE,
|
||||||
|
HEADER_REMOTE_USER_DISPLAY_NAME,
|
||||||
|
HEADER_REMOTE_USER_ID,
|
||||||
|
HEADER_REMOTE_USER_NAME,
|
||||||
HEADER_TOKEN,
|
HEADER_TOKEN,
|
||||||
HEADER_TOKEN_OLD,
|
HEADER_TOKEN_OLD,
|
||||||
|
IngressSessionData,
|
||||||
|
IngressSessionDataUser,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import HomeAssistantAPIError
|
||||||
from .const import COOKIE_INGRESS
|
from .const import COOKIE_INGRESS
|
||||||
from .utils import api_process, api_validate, require_home_assistant
|
from .utils import api_process, api_validate, require_home_assistant
|
||||||
|
|
||||||
@@ -33,10 +40,23 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
|
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
|
||||||
|
|
||||||
|
"""Expected optional payload of create session request"""
|
||||||
|
SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_SESSION_DATA_USER_ID): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class APIIngress(CoreSysAttributes):
|
class APIIngress(CoreSysAttributes):
|
||||||
"""Ingress view to handle add-on webui routing."""
|
"""Ingress view to handle add-on webui routing."""
|
||||||
|
|
||||||
|
_list_of_users: list[IngressSessionDataUser]
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize APIIngress."""
|
||||||
|
self._list_of_users = []
|
||||||
|
|
||||||
def _extract_addon(self, request: web.Request) -> Addon:
|
def _extract_addon(self, request: web.Request) -> Addon:
|
||||||
"""Return addon, throw an exception it it doesn't exist."""
|
"""Return addon, throw an exception it it doesn't exist."""
|
||||||
token = request.match_info.get("token")
|
token = request.match_info.get("token")
|
||||||
@@ -71,7 +91,19 @@ class APIIngress(CoreSysAttributes):
|
|||||||
@require_home_assistant
|
@require_home_assistant
|
||||||
async def create_session(self, request: web.Request) -> dict[str, Any]:
|
async def create_session(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Create a new session."""
|
"""Create a new session."""
|
||||||
session = self.sys_ingress.create_session()
|
schema_ingress_config_session_data = await api_validate(
|
||||||
|
SCHEMA_INGRESS_CREATE_SESSION_DATA, request
|
||||||
|
)
|
||||||
|
data: IngressSessionData | None = None
|
||||||
|
|
||||||
|
if ATTR_SESSION_DATA_USER_ID in schema_ingress_config_session_data:
|
||||||
|
user = await self._find_user_by_id(
|
||||||
|
schema_ingress_config_session_data[ATTR_SESSION_DATA_USER_ID]
|
||||||
|
)
|
||||||
|
if user:
|
||||||
|
data = IngressSessionData(user)
|
||||||
|
|
||||||
|
session = self.sys_ingress.create_session(data)
|
||||||
return {ATTR_SESSION: session}
|
return {ATTR_SESSION: session}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -99,13 +131,14 @@ class APIIngress(CoreSysAttributes):
|
|||||||
# Process requests
|
# Process requests
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
path = request.match_info.get("path")
|
path = request.match_info.get("path")
|
||||||
|
session_data = self.sys_ingress.get_session_data(session)
|
||||||
try:
|
try:
|
||||||
# Websocket
|
# Websocket
|
||||||
if _is_websocket(request):
|
if _is_websocket(request):
|
||||||
return await self._handle_websocket(request, addon, path)
|
return await self._handle_websocket(request, addon, path, session_data)
|
||||||
|
|
||||||
# Request
|
# Request
|
||||||
return await self._handle_request(request, addon, path)
|
return await self._handle_request(request, addon, path, session_data)
|
||||||
|
|
||||||
except aiohttp.ClientError as err:
|
except aiohttp.ClientError as err:
|
||||||
_LOGGER.error("Ingress error: %s", err)
|
_LOGGER.error("Ingress error: %s", err)
|
||||||
@@ -113,7 +146,11 @@ class APIIngress(CoreSysAttributes):
|
|||||||
raise HTTPBadGateway()
|
raise HTTPBadGateway()
|
||||||
|
|
||||||
async def _handle_websocket(
|
async def _handle_websocket(
|
||||||
self, request: web.Request, addon: Addon, path: str
|
self,
|
||||||
|
request: web.Request,
|
||||||
|
addon: Addon,
|
||||||
|
path: str,
|
||||||
|
session_data: IngressSessionData | None,
|
||||||
) -> web.WebSocketResponse:
|
) -> web.WebSocketResponse:
|
||||||
"""Ingress route for websocket."""
|
"""Ingress route for websocket."""
|
||||||
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||||
@@ -131,7 +168,7 @@ class APIIngress(CoreSysAttributes):
|
|||||||
|
|
||||||
# Preparing
|
# Preparing
|
||||||
url = self._create_url(addon, path)
|
url = self._create_url(addon, path)
|
||||||
source_header = _init_header(request, addon)
|
source_header = _init_header(request, addon, session_data)
|
||||||
|
|
||||||
# Support GET query
|
# Support GET query
|
||||||
if request.query_string:
|
if request.query_string:
|
||||||
@@ -157,11 +194,15 @@ class APIIngress(CoreSysAttributes):
|
|||||||
return ws_server
|
return ws_server
|
||||||
|
|
||||||
async def _handle_request(
|
async def _handle_request(
|
||||||
self, request: web.Request, addon: Addon, path: str
|
self,
|
||||||
|
request: web.Request,
|
||||||
|
addon: Addon,
|
||||||
|
path: str,
|
||||||
|
session_data: IngressSessionData | None,
|
||||||
) -> web.Response | web.StreamResponse:
|
) -> web.Response | web.StreamResponse:
|
||||||
"""Ingress route for request."""
|
"""Ingress route for request."""
|
||||||
url = self._create_url(addon, path)
|
url = self._create_url(addon, path)
|
||||||
source_header = _init_header(request, addon)
|
source_header = _init_header(request, addon, session_data)
|
||||||
|
|
||||||
# Passing the raw stream breaks requests for some webservers
|
# Passing the raw stream breaks requests for some webservers
|
||||||
# since we just need it for POST requests really, for all other methods
|
# since we just need it for POST requests really, for all other methods
|
||||||
@@ -217,11 +258,35 @@ class APIIngress(CoreSysAttributes):
|
|||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
async def _find_user_by_id(self, user_id: str) -> IngressSessionDataUser | None:
|
||||||
|
"""Find user object by the user's ID."""
|
||||||
|
try:
|
||||||
|
list_of_users = await self.sys_homeassistant.get_users()
|
||||||
|
except (HomeAssistantAPIError, TypeError) as err:
|
||||||
|
_LOGGER.error(
|
||||||
|
"%s error occurred while requesting list of users: %s", type(err), err
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, str]:
|
if list_of_users is not None:
|
||||||
|
self._list_of_users = list_of_users
|
||||||
|
|
||||||
|
return next((user for user in self._list_of_users if user.id == user_id), None)
|
||||||
|
|
||||||
|
|
||||||
|
def _init_header(
|
||||||
|
request: web.Request, addon: Addon, session_data: IngressSessionData | None
|
||||||
|
) -> CIMultiDict | dict[str, str]:
|
||||||
"""Create initial header."""
|
"""Create initial header."""
|
||||||
headers = {}
|
headers = {}
|
||||||
|
|
||||||
|
if session_data is not None:
|
||||||
|
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
|
||||||
|
if session_data.user.username is not None:
|
||||||
|
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
|
||||||
|
if session_data.user.display_name is not None:
|
||||||
|
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
|
||||||
|
|
||||||
# filter flags
|
# filter flags
|
||||||
for name, value in request.headers.items():
|
for name, value in request.headers.items():
|
||||||
if name in (
|
if name in (
|
||||||
@@ -234,6 +299,9 @@ def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, st
|
|||||||
hdrs.SEC_WEBSOCKET_KEY,
|
hdrs.SEC_WEBSOCKET_KEY,
|
||||||
istr(HEADER_TOKEN),
|
istr(HEADER_TOKEN),
|
||||||
istr(HEADER_TOKEN_OLD),
|
istr(HEADER_TOKEN_OLD),
|
||||||
|
istr(HEADER_REMOTE_USER_ID),
|
||||||
|
istr(HEADER_REMOTE_USER_NAME),
|
||||||
|
istr(HEADER_REMOTE_USER_DISPLAY_NAME),
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
headers[name] = value
|
headers[name] = value
|
||||||
|
@@ -6,7 +6,9 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..jobs import SupervisorJob
|
||||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||||
|
from .const import ATTR_JOBS
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -19,11 +21,45 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
class APIJobs(CoreSysAttributes):
|
class APIJobs(CoreSysAttributes):
|
||||||
"""Handle RESTful API for OS functions."""
|
"""Handle RESTful API for OS functions."""
|
||||||
|
|
||||||
|
def _list_jobs(self) -> list[dict[str, Any]]:
|
||||||
|
"""Return current job tree."""
|
||||||
|
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
||||||
|
for job in self.sys_jobs.jobs:
|
||||||
|
if job.internal:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if job.parent_id not in jobs_by_parent:
|
||||||
|
jobs_by_parent[job.parent_id] = [job]
|
||||||
|
else:
|
||||||
|
jobs_by_parent[job.parent_id].append(job)
|
||||||
|
|
||||||
|
job_list: list[dict[str, Any]] = []
|
||||||
|
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = [
|
||||||
|
(job_list, job) for job in jobs_by_parent.get(None, [])
|
||||||
|
]
|
||||||
|
|
||||||
|
while queue:
|
||||||
|
(current_list, current_job) = queue.pop(0)
|
||||||
|
child_jobs: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
# We remove parent_id and instead use that info to represent jobs as a tree
|
||||||
|
job_dict = current_job.as_dict() | {"child_jobs": child_jobs}
|
||||||
|
job_dict.pop("parent_id")
|
||||||
|
current_list.append(job_dict)
|
||||||
|
|
||||||
|
if current_job.uuid in jobs_by_parent:
|
||||||
|
queue.extend(
|
||||||
|
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
|
||||||
|
)
|
||||||
|
|
||||||
|
return job_list
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return JobManager information."""
|
"""Return JobManager information."""
|
||||||
return {
|
return {
|
||||||
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
||||||
|
ATTR_JOBS: self._list_jobs(),
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
|
@@ -195,7 +195,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
CoreState.FREEZE,
|
CoreState.FREEZE,
|
||||||
):
|
):
|
||||||
return api_return_error(
|
return api_return_error(
|
||||||
message=f"System is not ready with state: {self.sys_core.state.value}"
|
message=f"System is not ready with state: {self.sys_core.state}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return await handler(request)
|
return await handler(request)
|
||||||
|
@@ -1,11 +1,11 @@
|
|||||||
"""REST API for network."""
|
"""REST API for network."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
|
from dataclasses import replace
|
||||||
from ipaddress import ip_address, ip_interface
|
from ipaddress import ip_address, ip_interface
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import attr
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
@@ -43,8 +43,7 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, HostNetworkNotFound
|
from ..exceptions import APIError, HostNetworkNotFound
|
||||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
from ..host.configuration import (
|
||||||
from ..host.network import (
|
|
||||||
AccessPoint,
|
AccessPoint,
|
||||||
Interface,
|
Interface,
|
||||||
InterfaceMethod,
|
InterfaceMethod,
|
||||||
@@ -52,6 +51,7 @@ from ..host.network import (
|
|||||||
VlanConfig,
|
VlanConfig,
|
||||||
WifiConfig,
|
WifiConfig,
|
||||||
)
|
)
|
||||||
|
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_SCHEMA_IP_CONFIG = vol.Schema(
|
_SCHEMA_IP_CONFIG = vol.Schema(
|
||||||
@@ -121,6 +121,7 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
|||||||
ATTR_ENABLED: interface.enabled,
|
ATTR_ENABLED: interface.enabled,
|
||||||
ATTR_CONNECTED: interface.connected,
|
ATTR_CONNECTED: interface.connected,
|
||||||
ATTR_PRIMARY: interface.primary,
|
ATTR_PRIMARY: interface.primary,
|
||||||
|
ATTR_MAC: interface.mac,
|
||||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
||||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
||||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||||
@@ -196,19 +197,19 @@ class APINetwork(CoreSysAttributes):
|
|||||||
# Apply config
|
# Apply config
|
||||||
for key, config in body.items():
|
for key, config in body.items():
|
||||||
if key == ATTR_IPV4:
|
if key == ATTR_IPV4:
|
||||||
interface.ipv4 = attr.evolve(
|
interface.ipv4 = replace(
|
||||||
interface.ipv4
|
interface.ipv4
|
||||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
||||||
**config,
|
**config,
|
||||||
)
|
)
|
||||||
elif key == ATTR_IPV6:
|
elif key == ATTR_IPV6:
|
||||||
interface.ipv6 = attr.evolve(
|
interface.ipv6 = replace(
|
||||||
interface.ipv6
|
interface.ipv6
|
||||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
||||||
**config,
|
**config,
|
||||||
)
|
)
|
||||||
elif key == ATTR_WIFI:
|
elif key == ATTR_WIFI:
|
||||||
interface.wifi = attr.evolve(
|
interface.wifi = replace(
|
||||||
interface.wifi
|
interface.wifi
|
||||||
or WifiConfig(
|
or WifiConfig(
|
||||||
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
|
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
|
||||||
@@ -276,6 +277,8 @@ class APINetwork(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
vlan_interface = Interface(
|
vlan_interface = Interface(
|
||||||
|
"",
|
||||||
|
"",
|
||||||
"",
|
"",
|
||||||
True,
|
True,
|
||||||
True,
|
True,
|
||||||
|
@@ -8,11 +8,15 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
|
ATTR_ACTIVITY_LED,
|
||||||
ATTR_BOARD,
|
ATTR_BOARD,
|
||||||
ATTR_BOOT,
|
ATTR_BOOT,
|
||||||
ATTR_DEVICES,
|
ATTR_DEVICES,
|
||||||
|
ATTR_DISK_LED,
|
||||||
|
ATTR_HEARTBEAT_LED,
|
||||||
ATTR_ID,
|
ATTR_ID,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
|
ATTR_POWER_LED,
|
||||||
ATTR_SERIAL,
|
ATTR_SERIAL,
|
||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
ATTR_UPDATE_AVAILABLE,
|
ATTR_UPDATE_AVAILABLE,
|
||||||
@@ -27,21 +31,19 @@ from .const import (
|
|||||||
ATTR_DATA_DISK,
|
ATTR_DATA_DISK,
|
||||||
ATTR_DEV_PATH,
|
ATTR_DEV_PATH,
|
||||||
ATTR_DEVICE,
|
ATTR_DEVICE,
|
||||||
ATTR_DISK_LED,
|
|
||||||
ATTR_DISKS,
|
ATTR_DISKS,
|
||||||
ATTR_HEARTBEAT_LED,
|
|
||||||
ATTR_MODEL,
|
ATTR_MODEL,
|
||||||
ATTR_POWER_LED,
|
ATTR_SYSTEM_HEALTH_LED,
|
||||||
ATTR_VENDOR,
|
ATTR_VENDOR,
|
||||||
)
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
|
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
|
||||||
@@ -49,6 +51,14 @@ SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
|||||||
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
SCHEMA_GREEN_OPTIONS = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# pylint: enable=no-value-for-parameter
|
||||||
|
|
||||||
|
|
||||||
class APIOS(CoreSysAttributes):
|
class APIOS(CoreSysAttributes):
|
||||||
@@ -105,6 +115,31 @@ class APIOS(CoreSysAttributes):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
|
||||||
|
"""Get green board settings."""
|
||||||
|
return {
|
||||||
|
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
|
||||||
|
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
|
||||||
|
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def boards_green_options(self, request: web.Request) -> None:
|
||||||
|
"""Update green board settings."""
|
||||||
|
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
||||||
|
|
||||||
|
if ATTR_ACTIVITY_LED in body:
|
||||||
|
self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED]
|
||||||
|
|
||||||
|
if ATTR_POWER_LED in body:
|
||||||
|
self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED]
|
||||||
|
|
||||||
|
if ATTR_SYSTEM_HEALTH_LED in body:
|
||||||
|
self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED]
|
||||||
|
|
||||||
|
self.sys_dbus.agent.board.green.save_data()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Get yellow board settings."""
|
"""Get yellow board settings."""
|
||||||
@@ -128,6 +163,7 @@ class APIOS(CoreSysAttributes):
|
|||||||
if ATTR_POWER_LED in body:
|
if ATTR_POWER_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
|
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
|
||||||
|
|
||||||
|
self.sys_dbus.agent.board.yellow.save_data()
|
||||||
self.sys_resolution.create_issue(
|
self.sys_resolution.create_issue(
|
||||||
IssueType.REBOOT_REQUIRED,
|
IssueType.REBOOT_REQUIRED,
|
||||||
ContextType.SYSTEM,
|
ContextType.SYSTEM,
|
||||||
|
@@ -1 +1 @@
|
|||||||
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-NoHhvMA3Ku8.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-G81gb268sps.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-NoHhvMA3Ku8.js")}}()
|
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
2
supervisor/api/panel/frontend_es5/1036-G1AUvfK_ULU.js
Normal file
2
supervisor/api/panel/frontend_es5/1036-G1AUvfK_ULU.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1036-G1AUvfK_ULU.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1036-G1AUvfK_ULU.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230601.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230601.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}
|
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}
|
2
supervisor/api/panel/frontend_es5/1074-djfpWNdWsA8.js
Normal file
2
supervisor/api/panel/frontend_es5/1074-djfpWNdWsA8.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1074-djfpWNdWsA8.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1074-djfpWNdWsA8.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1116-xNyDWQHsExg.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1116-xNyDWQHsExg.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1193--qnpEuA6qSY.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1193--qnpEuA6qSY.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/1265-yCkoy0FMl6o.js
Normal file
2
supervisor/api/panel/frontend_es5/1265-yCkoy0FMl6o.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1265-yCkoy0FMl6o.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1265-yCkoy0FMl6o.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1281-On4tZThCfZs.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1281-On4tZThCfZs.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
2
supervisor/api/panel/frontend_es5/1402-6WKUruvoXtM.js
Normal file
2
supervisor/api/panel/frontend_es5/1402-6WKUruvoXtM.js
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
!function(){"use strict";var n,t,e={14595:function(n,t,e){e(58556);var r,i,o=e(93217),u=e(422),a=e(62173),s=function(n,t,e){if("input"===n){if("type"===t&&"checkbox"===e||"checked"===t||"disabled"===t)return;return""}},c={renderMarkdown:function(n,t){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign(Object.assign({},(0,a.getDefaultWhiteList)()),{},{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign(Object.assign({},r),{},{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(n,t),{whiteList:e,onTagAttr:s})}};(0,o.Jj)(c)}},r={};function i(n){var t=r[n];if(void 0!==t)return t.exports;var o=r[n]={exports:{}};return e[n](o,o.exports,i),o.exports}i.m=e,i.x=function(){var n=i.O(void 0,[9191,215],(function(){return i(14595)}));return n=i.O(n)},n=[],i.O=function(t,e,r,o){if(!e){var u=1/0;for(f=0;f<n.length;f++){e=n[f][0],r=n[f][1],o=n[f][2];for(var a=!0,s=0;s<e.length;s++)(!1&o||u>=o)&&Object.keys(i.O).every((function(n){return i.O[n](e[s])}))?e.splice(s--,1):(a=!1,o<u&&(u=o));if(a){n.splice(f--,1);var c=r();void 0!==c&&(t=c)}}return t}o=o||0;for(var f=n.length;f>0&&n[f-1][2]>o;f--)n[f]=n[f-1];n[f]=[e,r,o]},i.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return i.d(t,{a:t}),t},i.d=function(n,t){for(var e in t)i.o(t,e)&&!i.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},i.f={},i.e=function(n){return Promise.all(Object.keys(i.f).reduce((function(t,e){return i.f[e](n,t),t}),[]))},i.u=function(n){return n+"-"+{215:"FPZmDYZTPdk",9191:"37260H-osZ4"}[n]+".js"},i.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},i.p="/api/hassio/app/frontend_es5/",function(){var n={1402:1};i.f.i=function(t,e){n[t]||importScripts(i.p+i.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var r=t[0],o=t[1],u=t[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)n[r.pop()]=1;e(t)}}(),t=i.x,i.x=function(){return Promise.all([i.e(9191),i.e(215)]).then(t)};i.x()}();
|
||||||
|
//# sourceMappingURL=1402-6WKUruvoXtM.js.map
|
BIN
supervisor/api/panel/frontend_es5/1402-6WKUruvoXtM.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1402-6WKUruvoXtM.js.gz
Normal file
Binary file not shown.
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"4971-bQWIUsuRYEk.js","mappings":"6BAAIA,ECAAC,E,sCCMAC,EACAC,E,+BAMEC,EAAY,SAChBC,EACAC,EACAC,GAEA,GAAY,UAARF,EAAiB,CACnB,GACY,SAATC,GAA6B,aAAVC,GACX,YAATD,GACS,aAATA,EAEA,OAEF,MAAO,EACT,CAEF,EA0CME,EAAM,CACVC,eAzCqB,SACrBC,EACAC,GAKW,IAWPC,EAfJC,EAGCC,UAAAC,OAAA,QAAAC,IAAAF,UAAA,GAAAA,UAAA,GAAG,CAAC,EA4BL,OA1BKZ,IACHA,EAAee,OAAAC,OAAAD,OAAAC,OAAA,IACVC,EAAAA,EAAAA,wBAAqB,IACxBC,MAAO,CAAC,OAAQ,WAAY,WAC5B,UAAW,CAAC,QACZ,cAAe,CAAC,QAChB,WAAY,CAAC,aAAc,YAM3BP,EAAYQ,UACTlB,IACHA,EAAYc,OAAAC,OAAAD,OAAAC,OAAA,GACPhB,GAAe,IAClBoB,IAAK,CAAC,QAAS,SAAU,SACzBC,KAAM,CAAC,YAAa,SAAU,KAC9BC,IAAK,CAAC,UAGVZ,EAAYT,GAEZS,EAAYV,GAGPuB,EAAAA,EAAAA,YAAUC,EAAAA,EAAAA,IAAOhB,EAASC,GAAgB,CAC/CC,UAAAA,EACAR,UAAAA,GAEJ,IAQAuB,EAAAA,EAAAA,IAAOnB,E,GC5EHoB,EAA2B,CAAC,EAGhC,SAASC,EAAoBC,GAE5B,IAAIC,EAAeH,EAAyBE,GAC5C,QAAqBd,IAAjBe,EACH,OAAOA,EAAaC,QAGrB,IAAIC,EAASL,EAAyBE,GAAY,CAGjDE,QAAS,CAAC,GAOX,OAHAE,EAAoBJ,GAAUG,EAAQA,EAAOD,QAASH,GAG/CI,EAAOD,OACf,CAGAH,EAAoBM,EAAID,EAGxBL,EAAoBO,EAAI,WAGvB,IAAIC,EAAsBR,EAAoBS,OAAEtB,EAAW,CAAC,KAAK,MAAM,WAAa,OAAOa,EAAoB,MAAQ,IAEvH,OADAQ,EAAsBR,EAAoBS,EAAED,EAE7C,EHlCIrC,EAAW,GACf6B,EAAoBS,EAAI,SAASC,EAAQC,EAAUC,EAAIC,GACtD,IAAGF,EAAH,CAMA,IAAIG,EAAeC,IACnB,IAASC,EAAI,EAAGA,EAAI7C,EAASe,OAAQ8B,IAAK,CACrCL,EAAWxC,EAAS6C,GAAG,GACvBJ,EAAKzC,EAAS6C,GAAG,GACjBH,EAAW1C,EAAS6C,GAAG,GAE3B,IAJA,IAGIC,GAAY,EACPC,EAAI,EAAGA,EAAIP,EAASzB,OAAQgC,MACpB,EAAXL,GAAsBC,GAAgBD,IAAazB,OAAO+B,KAAKnB,EAAoBS,GAAGW,OAAM,SAASC,GAAO,OAAOrB,EAAoBS,EAAEY,GAAKV,EAASO,GAAK,IAChKP,EAASW,OAAOJ,IAAK,IAErBD,GAAY,EACTJ,EAAWC,IAAcA,EAAeD,IAG7C,GAAGI,EAAW,CACb9C,EAASmD,OAAON,IAAK,GACrB,IAAIO,EAAIX,SACEzB,IAANoC,IAAiBb,EAASa,EAC/B,CACD,CACA,OAAOb,CArBP,CAJCG,EAAWA,GAAY,EACvB,IAAI,IAAIG,EAAI7C,EAASe,OAAQ8B,EAAI,GAAK7C,EAAS6C,EAAI,GAAG,GAAKH,EAAUG,IAAK7C,EAAS6C,GAAK7C,EAAS6C,EAAI,GACrG7C,EAAS6C,GAAK,CAACL,EAAUC,EAAIC,EAwB/B,EI5BAb,EAAoBwB,EAAI,SAASpB,GAChC,IAAIqB,EAASrB,GAAUA,EAAOsB,WAC7B,WAAa,OAAOtB,EAAgB,OAAG,EACvC,WAAa,OAAOA,CAAQ,EAE7B,OADAJ,EAAoB2B,EAAEF,EAAQ,CAAEG,EAAGH,IAC5BA,CACR,ECNAzB,EAAoB2B,EAAI,SAASxB,EAAS0B,GACzC,IAAI,IAAIR,KAAOQ,EACX7B,EAAoB8B,EAAED,EAAYR,KAASrB,EAAoB8B,EAAE3B,EAASkB,IAC5EjC,OAAO2C,eAAe5B,EAASkB,EAAK,CAAEW,YAAY,EAAMC,IAAKJ,EAAWR,IAG3E,ECPArB,EAAoBkC,EAAI,CAAC,EAGzBlC,EAAoBmC,EAAI,SAASC,GAChC,OAAOC,QAAQC,IAAIlD,OAAO+B,KAAKnB,EAAoBkC,GAAGK,QAAO,SAASC,EAAUnB,GAE/E,OADArB,EAAoBkC,EAAEb,GAAKe,EAASI,GAC7BA,CACR,GAAG,IACJ,ECPAxC,EAAoByC,EAAI,SAASL,GAEhC,OAAYA,EAAU,IAAM,CAAC,IAAM,cAAc,KAAO,eAAeA,GAAW,KACnF,ECJApC,EAAoB8B,EAAI,SAASY,EAAKC,GAAQ,OAAOvD,OAAOwD,UAAUC,eAAeC,KAAKJ,EAAKC,EAAO,ECAtG3C,EAAoB+C,EAAI,gC,WCIxB,IAAIC,EAAkB,CACrB,KAAM,GAkBPhD,EAAoBkC,EAAElB,EAAI,SAASoB,EAASI,GAEvCQ,EAAgBZ,IAElBa,cAAcjD,EAAoB+C,EAAI/C,EAAoByC,EAAEL,GAG/D,EAEA,IAAIc,EAAqBC,KAA0C,oCAAIA,KAA0C,qCAAK,GAClHC,EAA6BF,EAAmBG,KAAKC,KAAKJ,GAC9DA,EAAmBG,KAzBA,SAASE,GAC3B,IAAI5C,EAAW4C,EAAK,GAChBC,EAAcD,EAAK,GACnBE,EAAUF,EAAK,GACnB,IAAI,IAAItD,KAAYuD,EAChBxD,EAAoB8B,EAAE0B,EAAavD,KACrCD,EAAoBM,EAAEL,GAAYuD,EAAYvD,IAIhD,IADGwD,GAASA,EAAQzD,GACdW,EAASzB,QACd8D,EAAgBrC,EAAS+C,OAAS,EACnCN,EAA2BG,EAC5B,C,ITtBInF,EAAO4B,EAAoBO,EAC/BP,EAAoBO,EAAI,WACvB,OAAO8B,QAAQC,IAAI,CAClBtC,EAAoBmC,EAAE,MACtBnC,EAAoBmC,EAAE,OACpBwB,KAAKvF,EACT,EUL0B4B,EAAoBO,G","sources":["no-source/webpack/runtime/chunk loaded","no-source/webpack/runtime/startup chunk dependencies","https://raw.githubusercontent.com/home-assistant/frontend/20230601.0/src/resources/markdown_worker.ts","no-source/webpack/bootstrap","no-source/webpack/runtime/compat get default export","no-source/webpack/runtime/define property getters","no-source/webpack/runtime/ensure chunk","no-source/webpack/runtime/get javascript chunk filename","no-source/webpack/runtime/hasOwnProperty shorthand","no-source/webpack/runtime/publicPath","no-source/webpack/runtime/importScripts chunk loading","no-source/webpack/startup"],"names":["deferred","next","whiteListNormal","whiteListSvg","onTagAttr","tag","name","value","api","renderMarkdown","content","markedOptions","whiteList","hassOptions","arguments","length","undefined","Object","assign","getDefaultWhiteList","input","allowSvg","svg","path","img","filterXSS","marked","expose","__webpack_module_cache__","__webpack_require__","moduleId","cachedModule","exports","module","__webpack_modules__","m","x","__webpack_exports__","O","result","chunkIds","fn","priority","notFulfilled","Infinity","i","fulfilled","j","keys","every","key","splice","r","n","getter","__esModule","d","a","definition","o","defineProperty","enumerable","get","f","e","chunkId","Promise","all","reduce","promises","u","obj","prop","prototype","hasOwnProperty","call","p","installedChunks","importScripts","chunkLoadingGlobal","self","parentChunkLoadingFunction","push","bind","data","moreModules","runtime","pop","then"],"sourceRoot":""}
|
{"version":3,"file":"1402-6WKUruvoXtM.js","mappings":"6BAAIA,ECAAC,E,sCCMAC,EACAC,E,+BAMEC,EAAY,SAChBC,EACAC,EACAC,GAEA,GAAY,UAARF,EAAiB,CACnB,GACY,SAATC,GAA6B,aAAVC,GACX,YAATD,GACS,aAATA,EAEA,OAEF,MAAO,EACT,CAEF,EA0CME,EAAM,CACVC,eAzCqB,SACrBC,EACAC,GAKW,IAWPC,EAfJC,EAGCC,UAAAC,OAAA,QAAAC,IAAAF,UAAA,GAAAA,UAAA,GAAG,CAAC,EA4BL,OA1BKZ,IACHA,EAAee,OAAAC,OAAAD,OAAAC,OAAA,IACVC,EAAAA,EAAAA,wBAAqB,IACxBC,MAAO,CAAC,OAAQ,WAAY,WAC5B,UAAW,CAAC,QACZ,cAAe,CAAC,QAChB,WAAY,CAAC,aAAc,YAM3BP,EAAYQ,UACTlB,IACHA,EAAYc,OAAAC,OAAAD,OAAAC,OAAA,GACPhB,GAAe,IAClBoB,IAAK,CAAC,QAAS,SAAU,SACzBC,KAAM,CAAC,YAAa,SAAU,KAC9BC,IAAK,CAAC,UAGVZ,EAAYT,GAEZS,EAAYV,GAGPuB,EAAAA,EAAAA,YAAUC,EAAAA,EAAAA,IAAOhB,EAASC,GAAgB,CAC/CC,UAAAA,EACAR,UAAAA,GAEJ,IAQAuB,EAAAA,EAAAA,IAAOnB,E,GC5EHoB,EAA2B,CAAC,EAGhC,SAASC,EAAoBC,GAE5B,IAAIC,EAAeH,EAAyBE,GAC5C,QAAqBd,IAAjBe,EACH,OAAOA,EAAaC,QAGrB,IAAIC,EAASL,EAAyBE,GAAY,CAGjDE,QAAS,CAAC,GAOX,OAHAE,EAAoBJ,GAAUG,EAAQA,EAAOD,QAASH,GAG/CI,EAAOD,OACf,CAGAH,EAAoBM,EAAID,EAGxBL,EAAoBO,EAAI,WAGvB,IAAIC,EAAsBR,EAAoBS,OAAEtB,EAAW,CAAC,KAAK,MAAM,WAAa,OAAOa,EAAoB,MAAQ,IAEvH,OADAQ,EAAsBR,EAAoBS,EAAED,EAE7C,EHlCIrC,EAAW,GACf6B,EAAoBS,EAAI,SAASC,EAAQC,EAAUC,EAAIC,GACtD,IAAGF,EAAH,CAMA,IAAIG,EAAeC,IACnB,IAASC,EAAI,EAAGA,EAAI7C,EAASe,OAAQ8B,IAAK,CACrCL,EAAWxC,EAAS6C,GAAG,GACvBJ,EAAKzC,EAAS6C,GAAG,GACjBH,EAAW1C,EAAS6C,GAAG,GAE3B,IAJA,IAGIC,GAAY,EACPC,EAAI,EAAGA,EAAIP,EAASzB,OAAQgC,MACpB,EAAXL,GAAsBC,GAAgBD,IAAazB,OAAO+B,KAAKnB,EAAoBS,GAAGW,OAAM,SAASC,GAAO,OAAOrB,EAAoBS,EAAEY,GAAKV,EAASO,GAAK,IAChKP,EAASW,OAAOJ,IAAK,IAErBD,GAAY,EACTJ,EAAWC,IAAcA,EAAeD,IAG7C,GAAGI,EAAW,CACb9C,EAASmD,OAAON,IAAK,GACrB,IAAIO,EAAIX,SACEzB,IAANoC,IAAiBb,EAASa,EAC/B,CACD,CACA,OAAOb,CArBP,CAJCG,EAAWA,GAAY,EACvB,IAAI,IAAIG,EAAI7C,EAASe,OAAQ8B,EAAI,GAAK7C,EAAS6C,EAAI,GAAG,GAAKH,EAAUG,IAAK7C,EAAS6C,GAAK7C,EAAS6C,EAAI,GACrG7C,EAAS6C,GAAK,CAACL,EAAUC,EAAIC,EAwB/B,EI5BAb,EAAoBwB,EAAI,SAASpB,GAChC,IAAIqB,EAASrB,GAAUA,EAAOsB,WAC7B,WAAa,OAAOtB,EAAgB,OAAG,EACvC,WAAa,OAAOA,CAAQ,EAE7B,OADAJ,EAAoB2B,EAAEF,EAAQ,CAAEG,EAAGH,IAC5BA,CACR,ECNAzB,EAAoB2B,EAAI,SAASxB,EAAS0B,GACzC,IAAI,IAAIR,KAAOQ,EACX7B,EAAoB8B,EAAED,EAAYR,KAASrB,EAAoB8B,EAAE3B,EAASkB,IAC5EjC,OAAO2C,eAAe5B,EAASkB,EAAK,CAAEW,YAAY,EAAMC,IAAKJ,EAAWR,IAG3E,ECPArB,EAAoBkC,EAAI,CAAC,EAGzBlC,EAAoBmC,EAAI,SAASC,GAChC,OAAOC,QAAQC,IAAIlD,OAAO+B,KAAKnB,EAAoBkC,GAAGK,QAAO,SAASC,EAAUnB,GAE/E,OADArB,EAAoBkC,EAAEb,GAAKe,EAASI,GAC7BA,CACR,GAAG,IACJ,ECPAxC,EAAoByC,EAAI,SAASL,GAEhC,OAAYA,EAAU,IAAM,CAAC,IAAM,cAAc,KAAO,eAAeA,GAAW,KACnF,ECJApC,EAAoB8B,EAAI,SAASY,EAAKC,GAAQ,OAAOvD,OAAOwD,UAAUC,eAAeC,KAAKJ,EAAKC,EAAO,ECAtG3C,EAAoB+C,EAAI,gC,WCIxB,IAAIC,EAAkB,CACrB,KAAM,GAkBPhD,EAAoBkC,EAAElB,EAAI,SAASoB,EAASI,GAEvCQ,EAAgBZ,IAElBa,cAAcjD,EAAoB+C,EAAI/C,EAAoByC,EAAEL,GAG/D,EAEA,IAAIc,EAAqBC,KAA0C,oCAAIA,KAA0C,qCAAK,GAClHC,EAA6BF,EAAmBG,KAAKC,KAAKJ,GAC9DA,EAAmBG,KAzBA,SAASE,GAC3B,IAAI5C,EAAW4C,EAAK,GAChBC,EAAcD,EAAK,GACnBE,EAAUF,EAAK,GACnB,IAAI,IAAItD,KAAYuD,EAChBxD,EAAoB8B,EAAE0B,EAAavD,KACrCD,EAAoBM,EAAEL,GAAYuD,EAAYvD,IAIhD,IADGwD,GAASA,EAAQzD,GACdW,EAASzB,QACd8D,EAAgBrC,EAAS+C,OAAS,EACnCN,EAA2BG,EAC5B,C,ITtBInF,EAAO4B,EAAoBO,EAC/BP,EAAoBO,EAAI,WACvB,OAAO8B,QAAQC,IAAI,CAClBtC,EAAoBmC,EAAE,MACtBnC,EAAoBmC,EAAE,OACpBwB,KAAKvF,EACT,EUL0B4B,EAAoBO,G","sources":["no-source/webpack/runtime/chunk loaded","no-source/webpack/runtime/startup chunk dependencies","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/resources/markdown-worker.ts","no-source/webpack/bootstrap","no-source/webpack/runtime/compat get default export","no-source/webpack/runtime/define property getters","no-source/webpack/runtime/ensure chunk","no-source/webpack/runtime/get javascript chunk filename","no-source/webpack/runtime/hasOwnProperty shorthand","no-source/webpack/runtime/publicPath","no-source/webpack/runtime/importScripts chunk loading","no-source/webpack/startup"],"names":["deferred","next","whiteListNormal","whiteListSvg","onTagAttr","tag","name","value","api","renderMarkdown","content","markedOptions","whiteList","hassOptions","arguments","length","undefined","Object","assign","getDefaultWhiteList","input","allowSvg","svg","path","img","filterXSS","marked","expose","__webpack_module_cache__","__webpack_require__","moduleId","cachedModule","exports","module","__webpack_modules__","m","x","__webpack_exports__","O","result","chunkIds","fn","priority","notFulfilled","Infinity","i","fulfilled","j","keys","every","key","splice","r","n","getter","__esModule","d","a","definition","o","defineProperty","enumerable","get","f","e","chunkId","Promise","all","reduce","promises","u","obj","prop","prototype","hasOwnProperty","call","p","installedChunks","importScripts","chunkLoadingGlobal","self","parentChunkLoadingFunction","push","bind","data","moreModules","runtime","pop","then"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/1601-w9Tpb2p6Eog.js
Normal file
2
supervisor/api/panel/frontend_es5/1601-w9Tpb2p6Eog.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1601-w9Tpb2p6Eog.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1601-w9Tpb2p6Eog.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1838-_4LQjq4VcpM.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1838-_4LQjq4VcpM.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/184-GFdCAdhSahg.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/184-GFdCAdhSahg.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/19-D0tvRrMhJ24.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/19-D0tvRrMhJ24.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1927-qgtda9tVF5c.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1927-qgtda9tVF5c.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1985-We0XP5osZmE.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1985-We0XP5osZmE.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/2094-jp3V24jtsr8.js
Normal file
2
supervisor/api/panel/frontend_es5/2094-jp3V24jtsr8.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/2094-jp3V24jtsr8.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/2094-jp3V24jtsr8.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user