mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-17 13:09:22 +00:00
Compare commits
157 Commits
faster_bac
...
2024.04.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2a622a929d | ||
![]() |
ca8eeaa68c | ||
![]() |
d1b8ac1249 | ||
![]() |
3f629c4d60 | ||
![]() |
3fa910e68b | ||
![]() |
e3cf2989c9 | ||
![]() |
136b2f402d | ||
![]() |
8d18d2d9c6 | ||
![]() |
f18213361a | ||
![]() |
18d9d32bca | ||
![]() |
1246e429c9 | ||
![]() |
77bc46bc37 | ||
![]() |
ce16963c94 | ||
![]() |
a70e8cfe58 | ||
![]() |
ba922a1aaa | ||
![]() |
b09230a884 | ||
![]() |
f1cb9ca08e | ||
![]() |
06513e88c6 | ||
![]() |
b4a79bd068 | ||
![]() |
dfd8fe84e0 | ||
![]() |
4857c2e243 | ||
![]() |
7d384f6160 | ||
![]() |
672a7621f9 | ||
![]() |
f0e2fb3f57 | ||
![]() |
8c3a520512 | ||
![]() |
22e50d56db | ||
![]() |
a0735f3585 | ||
![]() |
50a2e8fde3 | ||
![]() |
55ed63cc79 | ||
![]() |
97e9dfff3f | ||
![]() |
501c9579fb | ||
![]() |
f9aedadee6 | ||
![]() |
c3c17b2bc3 | ||
![]() |
a894c4589e | ||
![]() |
56a8a1b5a1 | ||
![]() |
be3f7a6c37 | ||
![]() |
906e400ab7 | ||
![]() |
a9265afd4c | ||
![]() |
d26058ac80 | ||
![]() |
ebd1f30606 | ||
![]() |
c78e077649 | ||
![]() |
07619223b0 | ||
![]() |
25c326ec6c | ||
![]() |
df167b94c2 | ||
![]() |
3730908881 | ||
![]() |
975dc1bc11 | ||
![]() |
31409f0c32 | ||
![]() |
b19273227b | ||
![]() |
f89179fb03 | ||
![]() |
90c971f9f1 | ||
![]() |
d685780a4a | ||
![]() |
b6bc8b7b7c | ||
![]() |
92daba898f | ||
![]() |
138843591e | ||
![]() |
0814552b2a | ||
![]() |
0e0fadd72d | ||
![]() |
5426bd4392 | ||
![]() |
3520a65099 | ||
![]() |
b15a5c2c87 | ||
![]() |
a8af04ff82 | ||
![]() |
2148de45a0 | ||
![]() |
c4143dacee | ||
![]() |
a8025e77b3 | ||
![]() |
dd1e76be93 | ||
![]() |
36f997959a | ||
![]() |
c1faed163a | ||
![]() |
9ca927dbe7 | ||
![]() |
02c6011818 | ||
![]() |
2e96b16396 | ||
![]() |
53b8de6c1c | ||
![]() |
daea9f893c | ||
![]() |
d1b5b1734c | ||
![]() |
74a5899626 | ||
![]() |
202ebf6d4e | ||
![]() |
2c7b417e25 | ||
![]() |
bb5e138134 | ||
![]() |
3a2c3e2f84 | ||
![]() |
d5be0c34ac | ||
![]() |
ea5431ef2b | ||
![]() |
9c4cdcd11f | ||
![]() |
e5ef6333e4 | ||
![]() |
98779a48b1 | ||
![]() |
9d4848ee77 | ||
![]() |
5126820619 | ||
![]() |
8b5c808e8c | ||
![]() |
9c75996c40 | ||
![]() |
d524778e42 | ||
![]() |
52d4bc660e | ||
![]() |
8884696a6c | ||
![]() |
d493ccde28 | ||
![]() |
1ececaaaa2 | ||
![]() |
91b48ad432 | ||
![]() |
f3fe40a19f | ||
![]() |
cf4b29c425 | ||
![]() |
4344e14a9d | ||
![]() |
df935ec423 | ||
![]() |
e7f9f7504e | ||
![]() |
5721b2353a | ||
![]() |
c9de846d0e | ||
![]() |
a598108c26 | ||
![]() |
5467aa399d | ||
![]() |
da052b074a | ||
![]() |
90c035edd0 | ||
![]() |
fc4eb44a24 | ||
![]() |
a71111b378 | ||
![]() |
52e0c7e484 | ||
![]() |
e32970f191 | ||
![]() |
897cc36017 | ||
![]() |
d79c575860 | ||
![]() |
1f19f84edd | ||
![]() |
27c37b8b84 | ||
![]() |
06a5dd3153 | ||
![]() |
b5bf270d22 | ||
![]() |
8e71d69a64 | ||
![]() |
06edb6f8a8 | ||
![]() |
dca82ec0a1 | ||
![]() |
9c82ce4103 | ||
![]() |
8a23a9eb1b | ||
![]() |
e1b7e515df | ||
![]() |
c8ff335ed7 | ||
![]() |
5736da8ab7 | ||
![]() |
060bba4dce | ||
![]() |
4c573991d2 | ||
![]() |
7fd6dce55f | ||
![]() |
1861d756e9 | ||
![]() |
c36c041f5e | ||
![]() |
c3d877bdd2 | ||
![]() |
1242030d4a | ||
![]() |
1626e74608 | ||
![]() |
b1b913777f | ||
![]() |
190894010c | ||
![]() |
765265723c | ||
![]() |
7e20502379 | ||
![]() |
366fc30e9d | ||
![]() |
aa91788a69 | ||
![]() |
375789b019 | ||
![]() |
140b769a42 | ||
![]() |
88d718271d | ||
![]() |
6ed26cdd1f | ||
![]() |
d1851fa607 | ||
![]() |
e846157c52 | ||
![]() |
e190bb4c1a | ||
![]() |
137fbe7acd | ||
![]() |
9ccdb2ae3a | ||
![]() |
f5f7515744 | ||
![]() |
ddadbec7e3 | ||
![]() |
d24543e103 | ||
![]() |
f80c4c9565 | ||
![]() |
480b383782 | ||
![]() |
d3efd4c24b | ||
![]() |
67a0acffa2 | ||
![]() |
41b07da399 | ||
![]() |
a6ce55d5b5 | ||
![]() |
98c01fe1b3 | ||
![]() |
51df986222 | ||
![]() |
9c625f93a5 | ||
![]() |
7101d47e2e |
@@ -10,11 +10,13 @@
|
|||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
"ms-python.python",
|
"charliermarsh.ruff",
|
||||||
"ms-python.pylint",
|
"ms-python.pylint",
|
||||||
"ms-python.vscode-pylance",
|
"ms-python.vscode-pylance",
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
"esbenp.prettier-vscode"
|
"redhat.vscode-yaml",
|
||||||
|
"esbenp.prettier-vscode",
|
||||||
|
"GitHub.vscode-pull-request-github"
|
||||||
],
|
],
|
||||||
"settings": {
|
"settings": {
|
||||||
"terminal.integrated.profiles.linux": {
|
"terminal.integrated.profiles.linux": {
|
||||||
@@ -28,9 +30,9 @@
|
|||||||
"editor.formatOnType": true,
|
"editor.formatOnType": true,
|
||||||
"files.trimTrailingWhitespace": true,
|
"files.trimTrailingWhitespace": true,
|
||||||
"python.pythonPath": "/usr/local/bin/python3",
|
"python.pythonPath": "/usr/local/bin/python3",
|
||||||
"python.formatting.provider": "black",
|
"[python]": {
|
||||||
"python.formatting.blackArgs": ["--target-version", "py312"],
|
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||||
"python.formatting.blackPath": "/usr/local/bin/black"
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -52,7 +52,7 @@
|
|||||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||||
- [ ] There is no commented out code in this PR.
|
- [ ] There is no commented out code in this PR.
|
||||||
- [ ] I have followed the [development checklist][dev-checklist]
|
- [ ] I have followed the [development checklist][dev-checklist]
|
||||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
|
||||||
- [ ] Tests have been added to verify that the new code works.
|
- [ ] Tests have been added to verify that the new code works.
|
||||||
|
|
||||||
If API endpoints of add-on configuration are added/changed:
|
If API endpoints of add-on configuration are added/changed:
|
||||||
|
22
.github/workflows/builder.yml
vendored
22
.github/workflows/builder.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
|||||||
requirements: ${{ steps.requirements.outputs.changed }}
|
requirements: ${{ steps.requirements.outputs.changed }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -92,7 +92,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -125,20 +125,20 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: sigstore/cosign-installer@v3.3.0
|
uses: sigstore/cosign-installer@v3.5.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.0.2"
|
cosign-release: "v2.2.3"
|
||||||
|
|
||||||
- name: Install dirhash and calc hash
|
- name: Install dirhash and calc hash
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
run: |
|
run: |
|
||||||
pip3 install dirhash
|
pip3 install setuptools dirhash
|
||||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||||
echo "${dir_hash}" > rootfs/supervisor.sha256
|
echo "${dir_hash}" > rootfs/supervisor.sha256
|
||||||
|
|
||||||
@@ -149,7 +149,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: docker/login-action@v3.0.0
|
uses: docker/login-action@v3.1.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -160,7 +160,7 @@ jobs:
|
|||||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build supervisor
|
- name: Build supervisor
|
||||||
uses: home-assistant/builder@2024.01.0
|
uses: home-assistant/builder@2024.03.5
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@@ -178,7 +178,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
@@ -203,11 +203,11 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
|
|
||||||
- name: Build the Supervisor
|
- name: Build the Supervisor
|
||||||
if: needs.init.outputs.publish != 'true'
|
if: needs.init.outputs.publish != 'true'
|
||||||
uses: home-assistant/builder@2024.01.0
|
uses: home-assistant/builder@2024.03.5
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
--test \
|
--test \
|
||||||
|
239
.github/workflows/ci.yaml
vendored
239
.github/workflows/ci.yaml
vendored
@@ -25,15 +25,15 @@ jobs:
|
|||||||
name: Prepare Python dependencies
|
name: Prepare Python dependencies
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
pip install -r requirements.txt -r requirements_tests.txt
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@@ -61,21 +61,21 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit install-hooks
|
pre-commit install-hooks
|
||||||
|
|
||||||
lint-black:
|
lint-ruff-format:
|
||||||
name: Check black
|
name: Check ruff-format
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -85,10 +85,67 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Run black
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v4.0.2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run ruff-format
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
black --target-version py312 --check supervisor tests setup.py
|
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||||
|
env:
|
||||||
|
RUFF_OUTPUT_FORMAT: github
|
||||||
|
|
||||||
|
lint-ruff:
|
||||||
|
name: Check ruff
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v4.1.4
|
||||||
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
|
uses: actions/setup-python@v5.1.0
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v4.0.2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v4.0.2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run ruff
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||||
|
env:
|
||||||
|
RUFF_OUTPUT_FORMAT: github
|
||||||
|
|
||||||
lint-dockerfile:
|
lint-dockerfile:
|
||||||
name: Check Dockerfile
|
name: Check Dockerfile
|
||||||
@@ -96,7 +153,7 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -111,15 +168,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -131,7 +188,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
@@ -149,94 +206,21 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||||
|
|
||||||
lint-flake8:
|
|
||||||
name: Check flake8
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.0.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Register flake8 problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
|
||||||
- name: Run flake8
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
flake8 supervisor tests
|
|
||||||
|
|
||||||
lint-isort:
|
|
||||||
name: Check isort
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.0.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run isort
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
|
||||||
|
|
||||||
lint-json:
|
lint-json:
|
||||||
name: Check JSON
|
name: Check JSON
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -248,7 +232,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
@@ -272,15 +256,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -298,66 +282,25 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pylint supervisor tests
|
pylint supervisor tests
|
||||||
|
|
||||||
lint-pyupgrade:
|
|
||||||
name: Check pyupgrade
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.0.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run pyupgrade
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
|
||||||
|
|
||||||
pytest:
|
pytest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.3.0
|
uses: sigstore/cosign-installer@v3.5.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.0.2"
|
cosign-release: "v2.2.3"
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -392,7 +335,7 @@ jobs:
|
|||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
tests
|
tests
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
uses: actions/upload-artifact@v4.0.0
|
uses: actions/upload-artifact@v4.3.3
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}
|
name: coverage-${{ matrix.python-version }}
|
||||||
path: .coverage
|
path: .coverage
|
||||||
@@ -403,15 +346,15 @@ jobs:
|
|||||||
needs: ["pytest", "prepare"]
|
needs: ["pytest", "prepare"]
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.1.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.0.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -422,7 +365,7 @@ jobs:
|
|||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.1
|
uses: actions/download-artifact@v4.1.7
|
||||||
- name: Combine coverage results
|
- name: Combine coverage results
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
@@ -430,4 +373,4 @@ jobs:
|
|||||||
coverage report
|
coverage report
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v3.1.4
|
uses: codecov/codecov-action@v4.3.0
|
||||||
|
30
.github/workflows/matchers/flake8.json
vendored
30
.github/workflows/matchers/flake8.json
vendored
@@ -1,30 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "flake8-error",
|
|
||||||
"severity": "error",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"owner": "flake8-warning",
|
|
||||||
"severity": "warning",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
4
.github/workflows/release-drafter.yml
vendored
4
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
name: Release Drafter
|
name: Release Drafter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Run Release Drafter
|
- name: Run Release Drafter
|
||||||
uses: release-drafter/release-drafter@v5.25.0
|
uses: release-drafter/release-drafter@v6.0.0
|
||||||
with:
|
with:
|
||||||
tag: ${{ steps.version.outputs.version }}
|
tag: ${{ steps.version.outputs.version }}
|
||||||
name: ${{ steps.version.outputs.version }}
|
name: ${{ steps.version.outputs.version }}
|
||||||
|
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@@ -10,9 +10,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.4
|
||||||
- name: Sentry Release
|
- name: Sentry Release
|
||||||
uses: getsentry/action-release@v1.6.0
|
uses: getsentry/action-release@v1.7.0
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
@@ -1,34 +1,15 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 23.12.1
|
rev: v0.2.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
- --safe
|
- --fix
|
||||||
- --quiet
|
- id: ruff-format
|
||||||
- --target-version
|
|
||||||
- py312
|
|
||||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||||
- repo: https://github.com/PyCQA/flake8
|
|
||||||
rev: 7.0.0
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
additional_dependencies:
|
|
||||||
- flake8-docstrings==1.7.0
|
|
||||||
- pydocstyle==6.3.0
|
|
||||||
files: ^(supervisor|script|tests)/.+\.py$
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.5.0
|
rev: v4.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- repo: https://github.com/PyCQA/isort
|
|
||||||
rev: 5.13.2
|
|
||||||
hooks:
|
|
||||||
- id: isort
|
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
|
||||||
rev: v3.15.0
|
|
||||||
hooks:
|
|
||||||
- id: pyupgrade
|
|
||||||
args: [--py312-plus]
|
|
||||||
|
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@@ -58,9 +58,23 @@
|
|||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Flake8",
|
"label": "Ruff Check",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "flake8 supervisor tests",
|
"command": "ruff check --fix supervisor tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Ruff Format",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "ruff format supervisor tests",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
|
12
build.yaml
12
build.yaml
@@ -1,10 +1,10 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.18
|
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.19
|
||||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.18
|
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.19
|
||||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.18
|
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.19
|
||||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.18
|
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.19
|
||||||
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.18
|
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.19
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
@@ -12,7 +12,7 @@ cosign:
|
|||||||
base_identity: https://github.com/home-assistant/docker-base/.*
|
base_identity: https://github.com/home-assistant/docker-base/.*
|
||||||
identity: https://github.com/home-assistant/supervisor/.*
|
identity: https://github.com/home-assistant/supervisor/.*
|
||||||
args:
|
args:
|
||||||
COSIGN_VERSION: 2.0.2
|
COSIGN_VERSION: 2.2.3
|
||||||
labels:
|
labels:
|
||||||
io.hass.type: supervisor
|
io.hass.type: supervisor
|
||||||
org.opencontainers.image.title: Home Assistant Supervisor
|
org.opencontainers.image.title: Home Assistant Supervisor
|
||||||
|
287
pyproject.toml
287
pyproject.toml
@@ -44,7 +44,7 @@ good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
|
|||||||
|
|
||||||
[tool.pylint."MESSAGES CONTROL"]
|
[tool.pylint."MESSAGES CONTROL"]
|
||||||
# Reasons disabled:
|
# Reasons disabled:
|
||||||
# format - handled by black
|
# format - handled by ruff
|
||||||
# abstract-method - with intro of async there are always methods missing
|
# abstract-method - with intro of async there are always methods missing
|
||||||
# cyclic-import - doesn't test if both import on load
|
# cyclic-import - doesn't test if both import on load
|
||||||
# duplicate-code - unavoidable
|
# duplicate-code - unavoidable
|
||||||
@@ -71,6 +71,136 @@ disable = [
|
|||||||
"too-many-statements",
|
"too-many-statements",
|
||||||
"unused-argument",
|
"unused-argument",
|
||||||
"consider-using-with",
|
"consider-using-with",
|
||||||
|
|
||||||
|
# Handled by ruff
|
||||||
|
# Ref: <https://github.com/astral-sh/ruff/issues/970>
|
||||||
|
"await-outside-async", # PLE1142
|
||||||
|
"bad-str-strip-call", # PLE1310
|
||||||
|
"bad-string-format-type", # PLE1307
|
||||||
|
"bidirectional-unicode", # PLE2502
|
||||||
|
"continue-in-finally", # PLE0116
|
||||||
|
"duplicate-bases", # PLE0241
|
||||||
|
"format-needs-mapping", # F502
|
||||||
|
"function-redefined", # F811
|
||||||
|
# Needed because ruff does not understand type of __all__ generated by a function
|
||||||
|
# "invalid-all-format", # PLE0605
|
||||||
|
"invalid-all-object", # PLE0604
|
||||||
|
"invalid-character-backspace", # PLE2510
|
||||||
|
"invalid-character-esc", # PLE2513
|
||||||
|
"invalid-character-nul", # PLE2514
|
||||||
|
"invalid-character-sub", # PLE2512
|
||||||
|
"invalid-character-zero-width-space", # PLE2515
|
||||||
|
"logging-too-few-args", # PLE1206
|
||||||
|
"logging-too-many-args", # PLE1205
|
||||||
|
"missing-format-string-key", # F524
|
||||||
|
"mixed-format-string", # F506
|
||||||
|
"no-method-argument", # N805
|
||||||
|
"no-self-argument", # N805
|
||||||
|
"nonexistent-operator", # B002
|
||||||
|
"nonlocal-without-binding", # PLE0117
|
||||||
|
"not-in-loop", # F701, F702
|
||||||
|
"notimplemented-raised", # F901
|
||||||
|
"return-in-init", # PLE0101
|
||||||
|
"return-outside-function", # F706
|
||||||
|
"syntax-error", # E999
|
||||||
|
"too-few-format-args", # F524
|
||||||
|
"too-many-format-args", # F522
|
||||||
|
"too-many-star-expressions", # F622
|
||||||
|
"truncated-format-string", # F501
|
||||||
|
"undefined-all-variable", # F822
|
||||||
|
"undefined-variable", # F821
|
||||||
|
"used-prior-global-declaration", # PLE0118
|
||||||
|
"yield-inside-async-function", # PLE1700
|
||||||
|
"yield-outside-function", # F704
|
||||||
|
"anomalous-backslash-in-string", # W605
|
||||||
|
"assert-on-string-literal", # PLW0129
|
||||||
|
"assert-on-tuple", # F631
|
||||||
|
"bad-format-string", # W1302, F
|
||||||
|
"bad-format-string-key", # W1300, F
|
||||||
|
"bare-except", # E722
|
||||||
|
"binary-op-exception", # PLW0711
|
||||||
|
"cell-var-from-loop", # B023
|
||||||
|
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
|
||||||
|
"duplicate-except", # B014
|
||||||
|
"duplicate-key", # F601
|
||||||
|
"duplicate-string-formatting-argument", # F
|
||||||
|
"duplicate-value", # F
|
||||||
|
"eval-used", # PGH001
|
||||||
|
"exec-used", # S102
|
||||||
|
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
|
||||||
|
"f-string-without-interpolation", # F541
|
||||||
|
"forgotten-debug-statement", # T100
|
||||||
|
"format-string-without-interpolation", # F
|
||||||
|
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
||||||
|
"global-variable-not-assigned", # PLW0602
|
||||||
|
"implicit-str-concat", # ISC001
|
||||||
|
"import-self", # PLW0406
|
||||||
|
"inconsistent-quotes", # Q000
|
||||||
|
"invalid-envvar-default", # PLW1508
|
||||||
|
"keyword-arg-before-vararg", # B026
|
||||||
|
"logging-format-interpolation", # G
|
||||||
|
"logging-fstring-interpolation", # G
|
||||||
|
"logging-not-lazy", # G
|
||||||
|
"misplaced-future", # F404
|
||||||
|
"named-expr-without-context", # PLW0131
|
||||||
|
"nested-min-max", # PLW3301
|
||||||
|
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
||||||
|
"raise-missing-from", # TRY200
|
||||||
|
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
||||||
|
"try-except-raise", # TRY302
|
||||||
|
"unused-argument", # ARG001, we don't use it
|
||||||
|
"unused-format-string-argument", #F507
|
||||||
|
"unused-format-string-key", # F504
|
||||||
|
"unused-import", # F401
|
||||||
|
"unused-variable", # F841
|
||||||
|
"useless-else-on-loop", # PLW0120
|
||||||
|
"wildcard-import", # F403
|
||||||
|
"bad-classmethod-argument", # N804
|
||||||
|
"consider-iterating-dictionary", # SIM118
|
||||||
|
"empty-docstring", # D419
|
||||||
|
"invalid-name", # N815
|
||||||
|
"line-too-long", # E501, disabled globally
|
||||||
|
"missing-class-docstring", # D101
|
||||||
|
"missing-final-newline", # W292
|
||||||
|
"missing-function-docstring", # D103
|
||||||
|
"missing-module-docstring", # D100
|
||||||
|
"multiple-imports", #E401
|
||||||
|
"singleton-comparison", # E711, E712
|
||||||
|
"subprocess-run-check", # PLW1510
|
||||||
|
"superfluous-parens", # UP034
|
||||||
|
"ungrouped-imports", # I001
|
||||||
|
"unidiomatic-typecheck", # E721
|
||||||
|
"unnecessary-direct-lambda-call", # PLC3002
|
||||||
|
"unnecessary-lambda-assignment", # PLC3001
|
||||||
|
"unneeded-not", # SIM208
|
||||||
|
"useless-import-alias", # PLC0414
|
||||||
|
"wrong-import-order", # I001
|
||||||
|
"wrong-import-position", # E402
|
||||||
|
"comparison-of-constants", # PLR0133
|
||||||
|
"comparison-with-itself", # PLR0124
|
||||||
|
# "consider-alternative-union-syntax", # UP007, typing extension
|
||||||
|
"consider-merging-isinstance", # PLR1701
|
||||||
|
# "consider-using-alias", # UP006, typing extension
|
||||||
|
"consider-using-dict-comprehension", # C402
|
||||||
|
"consider-using-generator", # C417
|
||||||
|
"consider-using-get", # SIM401
|
||||||
|
"consider-using-set-comprehension", # C401
|
||||||
|
"consider-using-sys-exit", # PLR1722
|
||||||
|
"consider-using-ternary", # SIM108
|
||||||
|
"literal-comparison", # F632
|
||||||
|
"property-with-parameters", # PLR0206
|
||||||
|
"super-with-arguments", # UP008
|
||||||
|
"too-many-branches", # PLR0912
|
||||||
|
"too-many-return-statements", # PLR0911
|
||||||
|
"too-many-statements", # PLR0915
|
||||||
|
"trailing-comma-tuple", # COM818
|
||||||
|
"unnecessary-comprehension", # C416
|
||||||
|
"use-a-generator", # C417
|
||||||
|
"use-dict-literal", # C406
|
||||||
|
"use-list-literal", # C405
|
||||||
|
"useless-object-inheritance", # UP004
|
||||||
|
"useless-return", # PLR1711
|
||||||
|
# "no-self-use", # PLR6301 # Optional plugin, not enabled
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.pylint.REPORTS]
|
[tool.pylint.REPORTS]
|
||||||
@@ -97,16 +227,145 @@ filterwarnings = [
|
|||||||
"ignore::pytest.PytestUnraisableExceptionWarning",
|
"ignore::pytest.PytestUnraisableExceptionWarning",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.isort]
|
[tool.ruff]
|
||||||
multi_line_output = 3
|
select = [
|
||||||
include_trailing_comma = true
|
"B002", # Python does not support the unary prefix increment
|
||||||
force_grid_wrap = 0
|
"B007", # Loop control variable {name} not used within loop body
|
||||||
line_length = 88
|
"B014", # Exception handler with duplicate exception
|
||||||
indent = " "
|
"B023", # Function definition does not bind loop variable {name}
|
||||||
force_sort_within_sections = true
|
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
|
||||||
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
|
"C", # complexity
|
||||||
default_section = "THIRDPARTY"
|
"COM818", # Trailing comma on bare tuple prohibited
|
||||||
forced_separate = "tests"
|
"D", # docstrings
|
||||||
combine_as_imports = true
|
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
|
||||||
use_parentheses = true
|
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
|
||||||
known_first_party = ["supervisor", "tests"]
|
"E", # pycodestyle
|
||||||
|
"F", # pyflakes/autoflake
|
||||||
|
"G", # flake8-logging-format
|
||||||
|
"I", # isort
|
||||||
|
"ICN001", # import concentions; {name} should be imported as {asname}
|
||||||
|
"N804", # First argument of a class method should be named cls
|
||||||
|
"N805", # First argument of a method should be named self
|
||||||
|
"N815", # Variable {name} in class scope should not be mixedCase
|
||||||
|
"PGH001", # No builtin eval() allowed
|
||||||
|
"PGH004", # Use specific rule codes when using noqa
|
||||||
|
"PLC0414", # Useless import alias. Import alias does not rename original package.
|
||||||
|
"PLC", # pylint
|
||||||
|
"PLE", # pylint
|
||||||
|
"PLR", # pylint
|
||||||
|
"PLW", # pylint
|
||||||
|
"Q000", # Double quotes found but single quotes preferred
|
||||||
|
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||||
|
"S102", # Use of exec detected
|
||||||
|
"S103", # bad-file-permissions
|
||||||
|
"S108", # hardcoded-temp-file
|
||||||
|
"S306", # suspicious-mktemp-usage
|
||||||
|
"S307", # suspicious-eval-usage
|
||||||
|
"S313", # suspicious-xmlc-element-tree-usage
|
||||||
|
"S314", # suspicious-xml-element-tree-usage
|
||||||
|
"S315", # suspicious-xml-expat-reader-usage
|
||||||
|
"S316", # suspicious-xml-expat-builder-usage
|
||||||
|
"S317", # suspicious-xml-sax-usage
|
||||||
|
"S318", # suspicious-xml-mini-dom-usage
|
||||||
|
"S319", # suspicious-xml-pull-dom-usage
|
||||||
|
"S320", # suspicious-xmle-tree-usage
|
||||||
|
"S601", # paramiko-call
|
||||||
|
"S602", # subprocess-popen-with-shell-equals-true
|
||||||
|
"S604", # call-with-shell-equals-true
|
||||||
|
"S608", # hardcoded-sql-expression
|
||||||
|
"S609", # unix-command-wildcard-injection
|
||||||
|
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
|
||||||
|
"SIM117", # Merge with-statements that use the same scope
|
||||||
|
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
|
||||||
|
"SIM201", # Use {left} != {right} instead of not {left} == {right}
|
||||||
|
"SIM208", # Use {expr} instead of not (not {expr})
|
||||||
|
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
|
||||||
|
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
|
||||||
|
"SIM401", # Use get from dict with default instead of an if block
|
||||||
|
"T100", # Trace found: {name} used
|
||||||
|
"T20", # flake8-print
|
||||||
|
"TID251", # Banned imports
|
||||||
|
"TRY004", # Prefer TypeError exception for invalid type
|
||||||
|
"TRY200", # Use raise from to specify exception cause
|
||||||
|
"TRY302", # Remove exception handler; error is immediately re-raised
|
||||||
|
"UP", # pyupgrade
|
||||||
|
"W", # pycodestyle
|
||||||
|
]
|
||||||
|
|
||||||
|
ignore = [
|
||||||
|
"D202", # No blank lines allowed after function docstring
|
||||||
|
"D203", # 1 blank line required before class docstring
|
||||||
|
"D213", # Multi-line docstring summary should start at the second line
|
||||||
|
"D406", # Section name should end with a newline
|
||||||
|
"D407", # Section name underlining
|
||||||
|
"E501", # line too long
|
||||||
|
"E731", # do not assign a lambda expression, use a def
|
||||||
|
|
||||||
|
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
|
||||||
|
# be ignored anymore without warnings.
|
||||||
|
# https://github.com/astral-sh/ruff/issues/7491
|
||||||
|
# "PLC1901", # Lots of false positives
|
||||||
|
|
||||||
|
# False positives https://github.com/astral-sh/ruff/issues/5386
|
||||||
|
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
|
||||||
|
"PLR0911", # Too many return statements ({returns} > {max_returns})
|
||||||
|
"PLR0912", # Too many branches ({branches} > {max_branches})
|
||||||
|
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
||||||
|
"PLR0915", # Too many statements ({statements} > {max_statements})
|
||||||
|
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
||||||
|
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||||
|
"UP006", # keep type annotation style as is
|
||||||
|
"UP007", # keep type annotation style as is
|
||||||
|
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
||||||
|
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
||||||
|
|
||||||
|
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||||
|
"W191",
|
||||||
|
"E111",
|
||||||
|
"E114",
|
||||||
|
"E117",
|
||||||
|
"D206",
|
||||||
|
"D300",
|
||||||
|
"Q000",
|
||||||
|
"Q001",
|
||||||
|
"Q002",
|
||||||
|
"Q003",
|
||||||
|
"COM812",
|
||||||
|
"COM819",
|
||||||
|
"ISC001",
|
||||||
|
"ISC002",
|
||||||
|
|
||||||
|
# Disabled because ruff does not understand type of __all__ generated by a function
|
||||||
|
"PLE0605",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.flake8-import-conventions.extend-aliases]
|
||||||
|
voluptuous = "vol"
|
||||||
|
|
||||||
|
[tool.ruff.flake8-pytest-style]
|
||||||
|
fixture-parentheses = false
|
||||||
|
|
||||||
|
[tool.ruff.flake8-tidy-imports.banned-api]
|
||||||
|
"pytz".msg = "use zoneinfo instead"
|
||||||
|
|
||||||
|
[tool.ruff.isort]
|
||||||
|
force-sort-within-sections = true
|
||||||
|
section-order = [
|
||||||
|
"future",
|
||||||
|
"standard-library",
|
||||||
|
"third-party",
|
||||||
|
"first-party",
|
||||||
|
"local-folder",
|
||||||
|
]
|
||||||
|
forced-separate = ["tests"]
|
||||||
|
known-first-party = ["supervisor", "tests"]
|
||||||
|
combine-as-imports = true
|
||||||
|
split-on-trailing-comma = false
|
||||||
|
|
||||||
|
[tool.ruff.per-file-ignores]
|
||||||
|
|
||||||
|
# DBus Service Mocks must use typing and names understood by dbus-fast
|
||||||
|
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
|
||||||
|
|
||||||
|
[tool.ruff.mccabe]
|
||||||
|
max-complexity = 25
|
||||||
|
@@ -1,28 +1,29 @@
|
|||||||
aiodns==3.1.1
|
aiodns==3.2.0
|
||||||
aiohttp==3.9.1
|
aiohttp==3.9.5
|
||||||
aiohttp-fast-url-dispatcher==0.3.0
|
aiohttp-fast-url-dispatcher==0.3.0
|
||||||
async_timeout==4.0.3
|
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==23.2.0
|
attrs==23.2.0
|
||||||
awesomeversion==23.11.0
|
awesomeversion==24.2.0
|
||||||
brotli==1.1.0
|
brotli==1.1.0
|
||||||
ciso8601==2.3.1
|
ciso8601==2.3.1
|
||||||
colorlog==6.8.0
|
colorlog==6.8.2
|
||||||
cpe==1.2.1
|
cpe==1.2.1
|
||||||
cryptography==41.0.7
|
cryptography==42.0.5
|
||||||
debugpy==1.8.0
|
debugpy==1.8.1
|
||||||
deepmerge==1.1.1
|
deepmerge==1.1.1
|
||||||
dirhash==0.2.1
|
dirhash==0.4.0
|
||||||
docker==7.0.0
|
docker==7.0.0
|
||||||
faust-cchardet==2.1.19
|
faust-cchardet==2.1.19
|
||||||
gitpython==3.1.41
|
gitpython==3.1.43
|
||||||
jinja2==3.1.3
|
jinja2==3.1.3
|
||||||
orjson==3.9.10
|
orjson==3.10.1
|
||||||
pulsectl==23.5.2
|
pulsectl==24.4.0
|
||||||
pyudev==0.24.1
|
pyudev==0.24.1
|
||||||
PyYAML==6.0.1
|
PyYAML==6.0.1
|
||||||
securetar==2023.12.0
|
securetar==2024.2.1
|
||||||
sentry-sdk==1.39.2
|
sentry-sdk==1.45.0
|
||||||
voluptuous==0.14.1
|
setuptools==69.5.1
|
||||||
dbus-fast==2.21.0
|
voluptuous==0.14.2
|
||||||
typing_extensions==4.9.0
|
dbus-fast==2.21.1
|
||||||
|
typing_extensions==4.11.0
|
||||||
|
zlib-fast==0.2.0
|
||||||
|
@@ -1,16 +1,12 @@
|
|||||||
black==23.12.1
|
coverage==7.5.0
|
||||||
coverage==7.4.0
|
pre-commit==3.7.0
|
||||||
flake8-docstrings==1.7.0
|
pylint==3.1.0
|
||||||
flake8==7.0.0
|
|
||||||
pre-commit==3.6.0
|
|
||||||
pydocstyle==6.3.0
|
|
||||||
pylint==3.0.3
|
|
||||||
pytest-aiohttp==1.0.5
|
pytest-aiohttp==1.0.5
|
||||||
pytest-asyncio==0.23.3
|
pytest-asyncio==0.23.5
|
||||||
pytest-cov==4.1.0
|
pytest-cov==5.0.0
|
||||||
pytest-timeout==2.2.0
|
pytest-timeout==2.3.1
|
||||||
pytest==7.4.4
|
pytest==8.1.1
|
||||||
pyupgrade==3.15.0
|
ruff==0.4.1
|
||||||
time-machine==2.13.0
|
time-machine==2.14.1
|
||||||
typing_extensions==4.9.0
|
typing_extensions==4.11.0
|
||||||
urllib3==2.1.0
|
urllib3==2.2.1
|
||||||
|
17
setup.cfg
17
setup.cfg
@@ -1,17 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
|
||||||
doctests = True
|
|
||||||
max-line-length = 88
|
|
||||||
# E501: line too long
|
|
||||||
# W503: Line break occurred before a binary operator
|
|
||||||
# E203: Whitespace before ':'
|
|
||||||
# D202 No blank lines allowed after function docstring
|
|
||||||
# W504 line break after binary operator
|
|
||||||
ignore =
|
|
||||||
E501,
|
|
||||||
W503,
|
|
||||||
E203,
|
|
||||||
D202,
|
|
||||||
W504
|
|
||||||
per-file-ignores =
|
|
||||||
tests/dbus_service_mocks/*.py: F821,F722
|
|
@@ -5,8 +5,15 @@ import logging
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from supervisor import bootstrap
|
import zlib_fast
|
||||||
from supervisor.utils.logging import activate_log_queue_handler
|
|
||||||
|
# Enable fast zlib before importing supervisor
|
||||||
|
zlib_fast.enable()
|
||||||
|
|
||||||
|
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
|
||||||
|
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
|
||||||
|
activate_log_queue_handler,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@@ -3,6 +3,7 @@ import asyncio
|
|||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from datetime import datetime
|
||||||
import errno
|
import errno
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
@@ -15,11 +16,14 @@ from tempfile import TemporaryDirectory
|
|||||||
from typing import Any, Final
|
from typing import Any, Final
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
from awesomeversion import AwesomeVersionCompareException
|
||||||
from deepmerge import Merger
|
from deepmerge import Merger
|
||||||
from securetar import atomic_contents_add, secure_path
|
from securetar import atomic_contents_add, secure_path
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
from supervisor.utils.dt import utc_from_timestamp
|
||||||
|
|
||||||
from ..bus import EventListener
|
from ..bus import EventListener
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ACCESS_TOKEN,
|
ATTR_ACCESS_TOKEN,
|
||||||
@@ -46,6 +50,7 @@ from ..const import (
|
|||||||
ATTR_USER,
|
ATTR_USER,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
ATTR_VERSION_TIMESTAMP,
|
||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
DNS_SUFFIX,
|
DNS_SUFFIX,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
@@ -175,6 +180,9 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Async initialize of object."""
|
"""Async initialize of object."""
|
||||||
|
if self.is_detached:
|
||||||
|
await super().refresh_path_cache()
|
||||||
|
|
||||||
self._listeners.append(
|
self._listeners.append(
|
||||||
self.sys_bus.register_event(
|
self.sys_bus.register_event(
|
||||||
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
|
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
|
||||||
@@ -187,9 +195,20 @@ class Addon(AddonModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
await self._check_ingress_port()
|
await self._check_ingress_port()
|
||||||
with suppress(DockerError):
|
default_image = self._image(self.data)
|
||||||
|
try:
|
||||||
await self.instance.attach(version=self.version)
|
await self.instance.attach(version=self.version)
|
||||||
|
|
||||||
|
# Ensure we are using correct image for this system
|
||||||
|
await self.instance.check_image(self.version, default_image, self.arch)
|
||||||
|
except DockerError:
|
||||||
|
_LOGGER.info("No %s addon Docker image %s found", self.slug, self.image)
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.install(self.version, default_image, arch=self.arch)
|
||||||
|
|
||||||
|
self.persist[ATTR_IMAGE] = default_image
|
||||||
|
self.save_persist()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ip_address(self) -> IPv4Address:
|
def ip_address(self) -> IPv4Address:
|
||||||
"""Return IP of add-on instance."""
|
"""Return IP of add-on instance."""
|
||||||
@@ -225,6 +244,34 @@ class Addon(AddonModel):
|
|||||||
"""Return True if add-on is detached."""
|
"""Return True if add-on is detached."""
|
||||||
return self.slug not in self.sys_store.data.addons
|
return self.slug not in self.sys_store.data.addons
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_icon(self) -> bool:
|
||||||
|
"""Return True if an icon exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_icon
|
||||||
|
return self.addon_store.with_icon
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_logo(self) -> bool:
|
||||||
|
"""Return True if a logo exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_logo
|
||||||
|
return self.addon_store.with_logo
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_changelog(self) -> bool:
|
||||||
|
"""Return True if a changelog exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_changelog
|
||||||
|
return self.addon_store.with_changelog
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_documentation(self) -> bool:
|
||||||
|
"""Return True if a documentation exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_documentation
|
||||||
|
return self.addon_store.with_documentation
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Return True if this add-on is available on this platform."""
|
"""Return True if this add-on is available on this platform."""
|
||||||
@@ -279,6 +326,28 @@ class Addon(AddonModel):
|
|||||||
"""Set auto update."""
|
"""Set auto update."""
|
||||||
self.persist[ATTR_AUTO_UPDATE] = value
|
self.persist[ATTR_AUTO_UPDATE] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto_update_available(self) -> bool:
|
||||||
|
"""Return if it is safe to auto update addon."""
|
||||||
|
if not self.need_update or not self.auto_update:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for version in self.breaking_versions:
|
||||||
|
try:
|
||||||
|
# Must update to latest so if true update crosses a breaking version
|
||||||
|
if self.version < version:
|
||||||
|
return False
|
||||||
|
except AwesomeVersionCompareException:
|
||||||
|
# If version scheme changed, we may get compare exception
|
||||||
|
# If latest version >= breaking version then assume update will
|
||||||
|
# cross it as the version scheme changes
|
||||||
|
# If both versions have compare exception, ignore as its in the past
|
||||||
|
with suppress(AwesomeVersionCompareException):
|
||||||
|
if self.latest_version >= version:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def watchdog(self) -> bool:
|
def watchdog(self) -> bool:
|
||||||
"""Return True if watchdog is enable."""
|
"""Return True if watchdog is enable."""
|
||||||
@@ -321,6 +390,11 @@ class Addon(AddonModel):
|
|||||||
"""Return version of add-on."""
|
"""Return version of add-on."""
|
||||||
return self.data_store[ATTR_VERSION]
|
return self.data_store[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version_timestamp(self) -> datetime:
|
||||||
|
"""Return when latest version was first seen."""
|
||||||
|
return utc_from_timestamp(self.data_store[ATTR_VERSION_TIMESTAMP])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def protected(self) -> bool:
|
def protected(self) -> bool:
|
||||||
"""Return if add-on is in protected mode."""
|
"""Return if add-on is in protected mode."""
|
||||||
@@ -655,7 +729,7 @@ class Addon(AddonModel):
|
|||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
on_condition=AddonsJobError,
|
on_condition=AddonsJobError,
|
||||||
)
|
)
|
||||||
async def uninstall(self) -> None:
|
async def uninstall(self, *, remove_config: bool) -> None:
|
||||||
"""Uninstall and cleanup this addon."""
|
"""Uninstall and cleanup this addon."""
|
||||||
try:
|
try:
|
||||||
await self.instance.remove()
|
await self.instance.remove()
|
||||||
@@ -666,6 +740,10 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
await self.unload()
|
await self.unload()
|
||||||
|
|
||||||
|
# Remove config if present and requested
|
||||||
|
if self.addon_config_used and remove_config:
|
||||||
|
await remove_data(self.path_config)
|
||||||
|
|
||||||
# Cleanup audio settings
|
# Cleanup audio settings
|
||||||
if self.path_pulse.exists():
|
if self.path_pulse.exists():
|
||||||
with suppress(OSError):
|
with suppress(OSError):
|
||||||
@@ -770,6 +848,7 @@ class Addon(AddonModel):
|
|||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
self.sys_addons.data.update(self.addon_store)
|
self.sys_addons.data.update(self.addon_store)
|
||||||
|
await self._check_ingress_port()
|
||||||
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
@@ -1221,7 +1300,7 @@ class Addon(AddonModel):
|
|||||||
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
||||||
with suppress(DockerError):
|
with suppress(DockerError):
|
||||||
await self.instance.update(version, restore_image, self.arch)
|
await self.instance.update(version, restore_image, self.arch)
|
||||||
self._check_ingress_port()
|
await self._check_ingress_port()
|
||||||
|
|
||||||
# Restore data and config
|
# Restore data and config
|
||||||
def _restore_data():
|
def _restore_data():
|
||||||
@@ -1362,3 +1441,9 @@ class Addon(AddonModel):
|
|||||||
ContainerState.UNHEALTHY,
|
ContainerState.UNHEALTHY,
|
||||||
]:
|
]:
|
||||||
await self._restart_after_problem(event.state)
|
await self._restart_after_problem(event.state)
|
||||||
|
|
||||||
|
def refresh_path_cache(self) -> Awaitable[None]:
|
||||||
|
"""Refresh cache of existing paths."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().refresh_path_cache()
|
||||||
|
return self.addon_store.refresh_path_cache()
|
||||||
|
@@ -102,11 +102,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
|||||||
except HassioArchNotFound:
|
except HassioArchNotFound:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_docker_args(self, version: AwesomeVersion):
|
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
||||||
"""Create a dict with Docker build arguments."""
|
"""Create a dict with Docker build arguments."""
|
||||||
args = {
|
args = {
|
||||||
"path": str(self.addon.path_location),
|
"path": str(self.addon.path_location),
|
||||||
"tag": f"{self.addon.image}:{version!s}",
|
"tag": f"{image or self.addon.image}:{version!s}",
|
||||||
"dockerfile": str(self.dockerfile),
|
"dockerfile": str(self.dockerfile),
|
||||||
"pull": True,
|
"pull": True,
|
||||||
"forcerm": not self.sys_dev,
|
"forcerm": not self.sys_dev,
|
||||||
|
@@ -28,6 +28,7 @@ class MappingType(StrEnum):
|
|||||||
|
|
||||||
|
|
||||||
ATTR_BACKUP = "backup"
|
ATTR_BACKUP = "backup"
|
||||||
|
ATTR_BREAKING_VERSIONS = "breaking_versions"
|
||||||
ATTR_CODENOTARY = "codenotary"
|
ATTR_CODENOTARY = "codenotary"
|
||||||
ATTR_READ_ONLY = "read_only"
|
ATTR_READ_ONLY = "read_only"
|
||||||
ATTR_PATH = "path"
|
ATTR_PATH = "path"
|
||||||
|
@@ -77,15 +77,20 @@ class AddonManager(CoreSysAttributes):
|
|||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Start up add-on management."""
|
"""Start up add-on management."""
|
||||||
tasks = []
|
# Refresh cache for all store addons
|
||||||
|
tasks: list[Awaitable[None]] = [
|
||||||
|
store.refresh_path_cache() for store in self.store.values()
|
||||||
|
]
|
||||||
|
|
||||||
|
# Load all installed addons
|
||||||
for slug in self.data.system:
|
for slug in self.data.system:
|
||||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||||
tasks.append(self.sys_create_task(addon.load()))
|
tasks.append(addon.load())
|
||||||
|
|
||||||
# Run initial tasks
|
# Run initial tasks
|
||||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.gather(*tasks)
|
||||||
|
|
||||||
# Sync DNS
|
# Sync DNS
|
||||||
await self.sync_dns()
|
await self.sync_dns()
|
||||||
@@ -173,13 +178,13 @@ class AddonManager(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||||
|
|
||||||
async def uninstall(self, slug: str) -> None:
|
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
||||||
"""Remove an add-on."""
|
"""Remove an add-on."""
|
||||||
if slug not in self.local:
|
if slug not in self.local:
|
||||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||||
return
|
return
|
||||||
|
|
||||||
await self.local[slug].uninstall()
|
await self.local[slug].uninstall(remove_config=remove_config)
|
||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||||
|
|
||||||
|
@@ -1,14 +1,17 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from collections.abc import Callable
|
from collections.abc import Awaitable, Callable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
from datetime import datetime
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||||
|
|
||||||
|
from supervisor.utils.dt import utc_from_timestamp
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADVANCED,
|
ATTR_ADVANCED,
|
||||||
ATTR_APPARMOR,
|
ATTR_APPARMOR,
|
||||||
@@ -71,6 +74,7 @@ from ..const import (
|
|||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USB,
|
ATTR_USB,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
ATTR_VERSION_TIMESTAMP,
|
||||||
ATTR_VIDEO,
|
ATTR_VIDEO,
|
||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
@@ -90,6 +94,7 @@ from ..utils import version_is_new_enough
|
|||||||
from .configuration import FolderMapping
|
from .configuration import FolderMapping
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_BACKUP,
|
ATTR_BACKUP,
|
||||||
|
ATTR_BREAKING_VERSIONS,
|
||||||
ATTR_CODENOTARY,
|
ATTR_CODENOTARY,
|
||||||
ATTR_PATH,
|
ATTR_PATH,
|
||||||
ATTR_READ_ONLY,
|
ATTR_READ_ONLY,
|
||||||
@@ -113,6 +118,10 @@ class AddonModel(JobGroup, ABC):
|
|||||||
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
||||||
)
|
)
|
||||||
self.slug: str = slug
|
self.slug: str = slug
|
||||||
|
self._path_icon_exists: bool = False
|
||||||
|
self._path_logo_exists: bool = False
|
||||||
|
self._path_changelog_exists: bool = False
|
||||||
|
self._path_documentation_exists: bool = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@@ -221,6 +230,11 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return latest version of add-on."""
|
"""Return latest version of add-on."""
|
||||||
return self.data[ATTR_VERSION]
|
return self.data[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version_timestamp(self) -> datetime:
|
||||||
|
"""Return when latest version was first seen."""
|
||||||
|
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> AwesomeVersion:
|
def version(self) -> AwesomeVersion:
|
||||||
"""Return version of add-on."""
|
"""Return version of add-on."""
|
||||||
@@ -501,22 +515,22 @@ class AddonModel(JobGroup, ABC):
|
|||||||
@property
|
@property
|
||||||
def with_icon(self) -> bool:
|
def with_icon(self) -> bool:
|
||||||
"""Return True if an icon exists."""
|
"""Return True if an icon exists."""
|
||||||
return self.path_icon.exists()
|
return self._path_icon_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_logo(self) -> bool:
|
def with_logo(self) -> bool:
|
||||||
"""Return True if a logo exists."""
|
"""Return True if a logo exists."""
|
||||||
return self.path_logo.exists()
|
return self._path_logo_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_changelog(self) -> bool:
|
def with_changelog(self) -> bool:
|
||||||
"""Return True if a changelog exists."""
|
"""Return True if a changelog exists."""
|
||||||
return self.path_changelog.exists()
|
return self._path_changelog_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_documentation(self) -> bool:
|
def with_documentation(self) -> bool:
|
||||||
"""Return True if a documentation exists."""
|
"""Return True if a documentation exists."""
|
||||||
return self.path_documentation.exists()
|
return self._path_documentation_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_arch(self) -> list[str]:
|
def supported_arch(self) -> list[str]:
|
||||||
@@ -620,6 +634,22 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return Signer email address for CAS."""
|
"""Return Signer email address for CAS."""
|
||||||
return self.data.get(ATTR_CODENOTARY)
|
return self.data.get(ATTR_CODENOTARY)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def breaking_versions(self) -> list[AwesomeVersion]:
|
||||||
|
"""Return breaking versions of addon."""
|
||||||
|
return self.data[ATTR_BREAKING_VERSIONS]
|
||||||
|
|
||||||
|
def refresh_path_cache(self) -> Awaitable[None]:
|
||||||
|
"""Refresh cache of existing paths."""
|
||||||
|
|
||||||
|
def check_paths():
|
||||||
|
self._path_icon_exists = self.path_icon.exists()
|
||||||
|
self._path_logo_exists = self.path_logo.exists()
|
||||||
|
self._path_changelog_exists = self.path_changelog.exists()
|
||||||
|
self._path_documentation_exists = self.path_documentation.exists()
|
||||||
|
|
||||||
|
return self.sys_run_in_executor(check_paths)
|
||||||
|
|
||||||
def validate_availability(self) -> None:
|
def validate_availability(self) -> None:
|
||||||
"""Validate if addon is available for current system."""
|
"""Validate if addon is available for current system."""
|
||||||
return self._validate_availability(self.data, logger=_LOGGER.error)
|
return self._validate_availability(self.data, logger=_LOGGER.error)
|
||||||
|
@@ -99,7 +99,6 @@ from ..const import (
|
|||||||
AddonStartup,
|
AddonStartup,
|
||||||
AddonState,
|
AddonState,
|
||||||
)
|
)
|
||||||
from ..discovery.validate import valid_discovery_service
|
|
||||||
from ..docker.const import Capabilities
|
from ..docker.const import Capabilities
|
||||||
from ..validate import (
|
from ..validate import (
|
||||||
docker_image,
|
docker_image,
|
||||||
@@ -112,6 +111,7 @@ from ..validate import (
|
|||||||
)
|
)
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_BACKUP,
|
ATTR_BACKUP,
|
||||||
|
ATTR_BREAKING_VERSIONS,
|
||||||
ATTR_CODENOTARY,
|
ATTR_CODENOTARY,
|
||||||
ATTR_PATH,
|
ATTR_PATH,
|
||||||
ATTR_READ_ONLY,
|
ATTR_READ_ONLY,
|
||||||
@@ -189,20 +189,6 @@ def _warn_addon_config(config: dict[str, Any]):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
invalid_services: list[str] = []
|
|
||||||
for service in config.get(ATTR_DISCOVERY, []):
|
|
||||||
try:
|
|
||||||
valid_discovery_service(service)
|
|
||||||
except vol.Invalid:
|
|
||||||
invalid_services.append(service)
|
|
||||||
|
|
||||||
if invalid_services:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s",
|
|
||||||
", ".join(invalid_services),
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
@@ -422,6 +408,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
|
@@ -9,12 +9,14 @@ from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispa
|
|||||||
|
|
||||||
from ..const import AddonState
|
from ..const import AddonState
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import APIAddonNotInstalled
|
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
||||||
|
from ..utils.sentry import capture_exception
|
||||||
from .addons import APIAddons
|
from .addons import APIAddons
|
||||||
from .audio import APIAudio
|
from .audio import APIAudio
|
||||||
from .auth import APIAuth
|
from .auth import APIAuth
|
||||||
from .backups import APIBackups
|
from .backups import APIBackups
|
||||||
from .cli import APICli
|
from .cli import APICli
|
||||||
|
from .const import CONTENT_TYPE_TEXT
|
||||||
from .discovery import APIDiscovery
|
from .discovery import APIDiscovery
|
||||||
from .dns import APICoreDNS
|
from .dns import APICoreDNS
|
||||||
from .docker import APIDocker
|
from .docker import APIDocker
|
||||||
@@ -36,7 +38,7 @@ from .security import APISecurity
|
|||||||
from .services import APIServices
|
from .services import APIServices
|
||||||
from .store import APIStore
|
from .store import APIStore
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
from .utils import api_process
|
from .utils import api_process, api_process_raw
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -71,8 +73,14 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
|
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
|
||||||
self._site: web.TCPSite | None = None
|
self._site: web.TCPSite | None = None
|
||||||
|
|
||||||
|
# share single host API handler for reuse in logging endpoints
|
||||||
|
self._api_host: APIHost | None = None
|
||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Register REST API Calls."""
|
"""Register REST API Calls."""
|
||||||
|
self._api_host = APIHost()
|
||||||
|
self._api_host.coresys = self.coresys
|
||||||
|
|
||||||
self._register_addons()
|
self._register_addons()
|
||||||
self._register_audio()
|
self._register_audio()
|
||||||
self._register_auth()
|
self._register_auth()
|
||||||
@@ -102,10 +110,41 @@ class RestAPI(CoreSysAttributes):
|
|||||||
|
|
||||||
await self.start()
|
await self.start()
|
||||||
|
|
||||||
|
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||||
|
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs",
|
||||||
|
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/follow",
|
||||||
|
partial(
|
||||||
|
self._api_host.advanced_logs,
|
||||||
|
identifier=syslog_identifier,
|
||||||
|
follow=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/boots/{{bootid}}",
|
||||||
|
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/boots/{{bootid}}/follow",
|
||||||
|
partial(
|
||||||
|
self._api_host.advanced_logs,
|
||||||
|
identifier=syslog_identifier,
|
||||||
|
follow=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_host(self) -> None:
|
def _register_host(self) -> None:
|
||||||
"""Register hostcontrol functions."""
|
"""Register hostcontrol functions."""
|
||||||
api_host = APIHost()
|
api_host = self._api_host
|
||||||
api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
@@ -182,6 +221,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/os/config/sync", api_os.config_sync),
|
web.post("/os/config/sync", api_os.config_sync),
|
||||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||||
web.get("/os/datadisk/list", api_os.list_data),
|
web.get("/os/datadisk/list", api_os.list_data),
|
||||||
|
web.post("/os/datadisk/wipe", api_os.wipe_data),
|
||||||
|
web.post("/os/boot-slot", api_os.set_boot_slot),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -219,6 +260,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/jobs/info", api_jobs.info),
|
web.get("/jobs/info", api_jobs.info),
|
||||||
web.post("/jobs/options", api_jobs.options),
|
web.post("/jobs/options", api_jobs.options),
|
||||||
web.post("/jobs/reset", api_jobs.reset),
|
web.post("/jobs/reset", api_jobs.reset),
|
||||||
|
web.get("/jobs/{uuid}", api_jobs.job_info),
|
||||||
|
web.delete("/jobs/{uuid}", api_jobs.remove_job),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -257,11 +300,11 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/multicast/info", api_multicast.info),
|
web.get("/multicast/info", api_multicast.info),
|
||||||
web.get("/multicast/stats", api_multicast.stats),
|
web.get("/multicast/stats", api_multicast.stats),
|
||||||
web.get("/multicast/logs", api_multicast.logs),
|
|
||||||
web.post("/multicast/update", api_multicast.update),
|
web.post("/multicast/update", api_multicast.update),
|
||||||
web.post("/multicast/restart", api_multicast.restart),
|
web.post("/multicast/restart", api_multicast.restart),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
self._register_advanced_logs("/multicast", "hassio_multicast")
|
||||||
|
|
||||||
def _register_hardware(self) -> None:
|
def _register_hardware(self) -> None:
|
||||||
"""Register hardware functions."""
|
"""Register hardware functions."""
|
||||||
@@ -334,6 +377,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/auth", api_auth.auth),
|
web.post("/auth", api_auth.auth),
|
||||||
web.post("/auth/reset", api_auth.reset),
|
web.post("/auth/reset", api_auth.reset),
|
||||||
web.delete("/auth/cache", api_auth.cache),
|
web.delete("/auth/cache", api_auth.cache),
|
||||||
|
web.get("/auth/list", api_auth.list_users),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -347,7 +391,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/supervisor/ping", api_supervisor.ping),
|
web.get("/supervisor/ping", api_supervisor.ping),
|
||||||
web.get("/supervisor/info", api_supervisor.info),
|
web.get("/supervisor/info", api_supervisor.info),
|
||||||
web.get("/supervisor/stats", api_supervisor.stats),
|
web.get("/supervisor/stats", api_supervisor.stats),
|
||||||
web.get("/supervisor/logs", api_supervisor.logs),
|
|
||||||
web.post("/supervisor/update", api_supervisor.update),
|
web.post("/supervisor/update", api_supervisor.update),
|
||||||
web.post("/supervisor/reload", api_supervisor.reload),
|
web.post("/supervisor/reload", api_supervisor.reload),
|
||||||
web.post("/supervisor/restart", api_supervisor.restart),
|
web.post("/supervisor/restart", api_supervisor.restart),
|
||||||
@@ -356,6 +399,38 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def get_supervisor_logs(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
return await self._api_host.advanced_logs_handler(
|
||||||
|
*args, identifier="hassio_supervisor", **kwargs
|
||||||
|
)
|
||||||
|
except Exception as err: # pylint: disable=broad-exception-caught
|
||||||
|
# Supervisor logs are critical, so catch everything, log the exception
|
||||||
|
# and try to return Docker container logs as the fallback
|
||||||
|
_LOGGER.exception(
|
||||||
|
"Failed to get supervisor logs using advanced_logs API"
|
||||||
|
)
|
||||||
|
if not isinstance(err, HostNotSupportedError):
|
||||||
|
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||||
|
# is known and reported to the user using the resolution center.
|
||||||
|
capture_exception(err)
|
||||||
|
return await api_supervisor.logs(*args, **kwargs)
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/supervisor/logs", get_supervisor_logs),
|
||||||
|
web.get(
|
||||||
|
"/supervisor/logs/follow",
|
||||||
|
partial(get_supervisor_logs, follow=True),
|
||||||
|
),
|
||||||
|
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
|
||||||
|
web.get(
|
||||||
|
"/supervisor/logs/boots/{bootid}/follow",
|
||||||
|
partial(get_supervisor_logs, follow=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_homeassistant(self) -> None:
|
def _register_homeassistant(self) -> None:
|
||||||
"""Register Home Assistant functions."""
|
"""Register Home Assistant functions."""
|
||||||
api_hass = APIHomeAssistant()
|
api_hass = APIHomeAssistant()
|
||||||
@@ -364,7 +439,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/core/info", api_hass.info),
|
web.get("/core/info", api_hass.info),
|
||||||
web.get("/core/logs", api_hass.logs),
|
|
||||||
web.get("/core/stats", api_hass.stats),
|
web.get("/core/stats", api_hass.stats),
|
||||||
web.post("/core/options", api_hass.options),
|
web.post("/core/options", api_hass.options),
|
||||||
web.post("/core/update", api_hass.update),
|
web.post("/core/update", api_hass.update),
|
||||||
@@ -376,11 +450,12 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/core", "homeassistant")
|
||||||
|
|
||||||
# Reroute from legacy
|
# Reroute from legacy
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/homeassistant/info", api_hass.info),
|
web.get("/homeassistant/info", api_hass.info),
|
||||||
web.get("/homeassistant/logs", api_hass.logs),
|
|
||||||
web.get("/homeassistant/stats", api_hass.stats),
|
web.get("/homeassistant/stats", api_hass.stats),
|
||||||
web.post("/homeassistant/options", api_hass.options),
|
web.post("/homeassistant/options", api_hass.options),
|
||||||
web.post("/homeassistant/restart", api_hass.restart),
|
web.post("/homeassistant/restart", api_hass.restart),
|
||||||
@@ -392,6 +467,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/homeassistant", "homeassistant")
|
||||||
|
|
||||||
def _register_proxy(self) -> None:
|
def _register_proxy(self) -> None:
|
||||||
"""Register Home Assistant API Proxy."""
|
"""Register Home Assistant API Proxy."""
|
||||||
api_proxy = APIProxy()
|
api_proxy = APIProxy()
|
||||||
@@ -438,13 +515,33 @@ class RestAPI(CoreSysAttributes):
|
|||||||
),
|
),
|
||||||
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
||||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
|
||||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||||
web.post("/addons/{addon}/security", api_addons.security),
|
web.post("/addons/{addon}/security", api_addons.security),
|
||||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||||
|
async def get_addon_logs(request, *args, **kwargs):
|
||||||
|
addon = api_addons.get_addon_for_request(request)
|
||||||
|
kwargs["identifier"] = f"addon_{addon.slug}"
|
||||||
|
return await self._api_host.advanced_logs(request, *args, **kwargs)
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/addons/{addon}/logs", get_addon_logs),
|
||||||
|
web.get(
|
||||||
|
"/addons/{addon}/logs/follow",
|
||||||
|
partial(get_addon_logs, follow=True),
|
||||||
|
),
|
||||||
|
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
|
||||||
|
web.get(
|
||||||
|
"/addons/{addon}/logs/boots/{bootid}/follow",
|
||||||
|
partial(get_addon_logs, follow=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
# Legacy routing to support requests for not installed addons
|
# Legacy routing to support requests for not installed addons
|
||||||
api_store = APIStore()
|
api_store = APIStore()
|
||||||
api_store.coresys = self.coresys
|
api_store.coresys = self.coresys
|
||||||
@@ -542,7 +639,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/dns/info", api_dns.info),
|
web.get("/dns/info", api_dns.info),
|
||||||
web.get("/dns/stats", api_dns.stats),
|
web.get("/dns/stats", api_dns.stats),
|
||||||
web.get("/dns/logs", api_dns.logs),
|
|
||||||
web.post("/dns/update", api_dns.update),
|
web.post("/dns/update", api_dns.update),
|
||||||
web.post("/dns/options", api_dns.options),
|
web.post("/dns/options", api_dns.options),
|
||||||
web.post("/dns/restart", api_dns.restart),
|
web.post("/dns/restart", api_dns.restart),
|
||||||
@@ -550,18 +646,17 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/dns", "hassio_dns")
|
||||||
|
|
||||||
def _register_audio(self) -> None:
|
def _register_audio(self) -> None:
|
||||||
"""Register Audio functions."""
|
"""Register Audio functions."""
|
||||||
api_audio = APIAudio()
|
api_audio = APIAudio()
|
||||||
api_audio.coresys = self.coresys
|
api_audio.coresys = self.coresys
|
||||||
api_host = APIHost()
|
|
||||||
api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/audio/info", api_audio.info),
|
web.get("/audio/info", api_audio.info),
|
||||||
web.get("/audio/stats", api_audio.stats),
|
web.get("/audio/stats", api_audio.stats),
|
||||||
web.get("/audio/logs", api_audio.logs),
|
|
||||||
web.post("/audio/update", api_audio.update),
|
web.post("/audio/update", api_audio.update),
|
||||||
web.post("/audio/restart", api_audio.restart),
|
web.post("/audio/restart", api_audio.restart),
|
||||||
web.post("/audio/reload", api_audio.reload),
|
web.post("/audio/reload", api_audio.reload),
|
||||||
@@ -574,6 +669,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/audio", "hassio_audio")
|
||||||
|
|
||||||
def _register_mounts(self) -> None:
|
def _register_mounts(self) -> None:
|
||||||
"""Register mounts endpoints."""
|
"""Register mounts endpoints."""
|
||||||
api_mounts = APIMounts()
|
api_mounts = APIMounts()
|
||||||
|
@@ -106,8 +106,8 @@ from ..exceptions import (
|
|||||||
PwnedSecret,
|
PwnedSecret,
|
||||||
)
|
)
|
||||||
from ..validate import docker_ports
|
from ..validate import docker_ports
|
||||||
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
|
from .const import ATTR_REMOVE_CONFIG, ATTR_SIGNED
|
||||||
from .utils import api_process, api_process_raw, api_validate, json_loads
|
from .utils import api_process, api_validate, json_loads
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -126,15 +126,19 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||||
|
|
||||||
|
SCHEMA_UNINSTALL = vol.Schema(
|
||||||
|
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
|
||||||
|
)
|
||||||
|
# pylint: enable=no-value-for-parameter
|
||||||
|
|
||||||
|
|
||||||
class APIAddons(CoreSysAttributes):
|
class APIAddons(CoreSysAttributes):
|
||||||
"""Handle RESTful API for add-on functions."""
|
"""Handle RESTful API for add-on functions."""
|
||||||
|
|
||||||
def _extract_addon(self, request: web.Request) -> Addon:
|
def get_addon_for_request(self, request: web.Request) -> Addon:
|
||||||
"""Return addon, throw an exception it it doesn't exist."""
|
"""Return addon, throw an exception if it doesn't exist."""
|
||||||
addon_slug: str = request.match_info.get("addon")
|
addon_slug: str = request.match_info.get("addon")
|
||||||
|
|
||||||
# Lookup itself
|
# Lookup itself
|
||||||
@@ -187,7 +191,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return add-on information."""
|
"""Return add-on information."""
|
||||||
addon: AnyAddon = self._extract_addon(request)
|
addon: AnyAddon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
@@ -268,7 +272,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def options(self, request: web.Request) -> None:
|
async def options(self, request: web.Request) -> None:
|
||||||
"""Store user options for add-on."""
|
"""Store user options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
# Update secrets for validation
|
# Update secrets for validation
|
||||||
await self.sys_homeassistant.secrets.reload()
|
await self.sys_homeassistant.secrets.reload()
|
||||||
@@ -303,7 +307,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def options_validate(self, request: web.Request) -> None:
|
async def options_validate(self, request: web.Request) -> None:
|
||||||
"""Validate user options for add-on."""
|
"""Validate user options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||||
|
|
||||||
options = await request.json(loads=json_loads) or addon.options
|
options = await request.json(loads=json_loads) or addon.options
|
||||||
@@ -345,7 +349,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
slug: str = request.match_info.get("addon")
|
slug: str = request.match_info.get("addon")
|
||||||
if slug != "self":
|
if slug != "self":
|
||||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
# Lookup/reload secrets
|
# Lookup/reload secrets
|
||||||
await self.sys_homeassistant.secrets.reload()
|
await self.sys_homeassistant.secrets.reload()
|
||||||
@@ -357,7 +361,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def security(self, request: web.Request) -> None:
|
async def security(self, request: web.Request) -> None:
|
||||||
"""Store security options for add-on."""
|
"""Store security options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||||
|
|
||||||
if ATTR_PROTECTED in body:
|
if ATTR_PROTECTED in body:
|
||||||
@@ -369,7 +373,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return resource information."""
|
"""Return resource information."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
stats: DockerStats = await addon.stats()
|
stats: DockerStats = await addon.stats()
|
||||||
|
|
||||||
@@ -385,48 +389,47 @@ class APIAddons(CoreSysAttributes):
|
|||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
async def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Uninstall add-on."""
|
"""Uninstall add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
return asyncio.shield(self.sys_addons.uninstall(addon.slug))
|
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
|
||||||
|
return await asyncio.shield(
|
||||||
|
self.sys_addons.uninstall(
|
||||||
|
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def start(self, request: web.Request) -> None:
|
async def start(self, request: web.Request) -> None:
|
||||||
"""Start add-on."""
|
"""Start add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(addon.start()):
|
if start_task := await asyncio.shield(addon.start()):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop add-on."""
|
"""Stop add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
return asyncio.shield(addon.stop())
|
return asyncio.shield(addon.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request: web.Request) -> None:
|
async def restart(self, request: web.Request) -> None:
|
||||||
"""Restart add-on."""
|
"""Restart add-on."""
|
||||||
addon: Addon = self._extract_addon(request)
|
addon: Addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(addon.restart()):
|
if start_task := await asyncio.shield(addon.restart()):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def rebuild(self, request: web.Request) -> None:
|
async def rebuild(self, request: web.Request) -> None:
|
||||||
"""Rebuild local build add-on."""
|
"""Rebuild local build add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
|
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return logs from add-on."""
|
|
||||||
addon = self._extract_addon(request)
|
|
||||||
return addon.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stdin(self, request: web.Request) -> None:
|
async def stdin(self, request: web.Request) -> None:
|
||||||
"""Write to stdin of add-on."""
|
"""Write to stdin of add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if not addon.with_stdin:
|
if not addon.with_stdin:
|
||||||
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
||||||
|
|
||||||
|
@@ -35,8 +35,7 @@ from ..coresys import CoreSysAttributes
|
|||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..host.sound import StreamType
|
from ..host.sound import StreamType
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .utils import api_process, api_validate
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -111,11 +110,6 @@ class APIAudio(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.audio.update(version))
|
await asyncio.shield(self.sys_plugins.audio.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return Audio Docker logs."""
|
|
||||||
return self.sys_plugins.audio.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Audio plugin."""
|
"""Restart Audio plugin."""
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import BasicAuth, web
|
from aiohttp import BasicAuth, web
|
||||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||||
@@ -8,11 +9,19 @@ from aiohttp.web_exceptions import HTTPUnauthorized
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIForbidden
|
from ..exceptions import APIForbidden
|
||||||
from ..utils.json import json_loads
|
from ..utils.json import json_loads
|
||||||
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
from .const import (
|
||||||
|
ATTR_GROUP_IDS,
|
||||||
|
ATTR_IS_ACTIVE,
|
||||||
|
ATTR_IS_OWNER,
|
||||||
|
ATTR_LOCAL_ONLY,
|
||||||
|
ATTR_USERS,
|
||||||
|
CONTENT_TYPE_JSON,
|
||||||
|
CONTENT_TYPE_URL,
|
||||||
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -90,3 +99,21 @@ class APIAuth(CoreSysAttributes):
|
|||||||
async def cache(self, request: web.Request) -> None:
|
async def cache(self, request: web.Request) -> None:
|
||||||
"""Process cache reset request."""
|
"""Process cache reset request."""
|
||||||
self.sys_auth.reset_data()
|
self.sys_auth.reset_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
||||||
|
"""List users on the Home Assistant instance."""
|
||||||
|
return {
|
||||||
|
ATTR_USERS: [
|
||||||
|
{
|
||||||
|
ATTR_USERNAME: user[ATTR_USERNAME],
|
||||||
|
ATTR_NAME: user[ATTR_NAME],
|
||||||
|
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
|
||||||
|
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
|
||||||
|
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
|
||||||
|
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
|
||||||
|
}
|
||||||
|
for user in await self.sys_auth.list_users()
|
||||||
|
if user[ATTR_USERNAME]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
"""Backups RESTful API."""
|
"""Backups RESTful API."""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Callable
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -11,6 +12,7 @@ from aiohttp import web
|
|||||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..backups.backup import Backup
|
||||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
@@ -33,12 +35,15 @@ from ..const import (
|
|||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
BusEvent,
|
||||||
|
CoreState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
|
from ..jobs import JobSchedulerOptions
|
||||||
from ..mounts.const import MountUsage
|
from ..mounts.const import MountUsage
|
||||||
from ..resolution.const import UnhealthyReason
|
from ..resolution.const import UnhealthyReason
|
||||||
from .const import CONTENT_TYPE_TAR
|
from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -50,17 +55,21 @@ RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
|||||||
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_RESTORE_PARTIAL = vol.Schema(
|
SCHEMA_RESTORE_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
|
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||||
|
{
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
|
|
||||||
|
|
||||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_NAME): str,
|
vol.Optional(ATTR_NAME): str,
|
||||||
@@ -68,6 +77,7 @@ SCHEMA_BACKUP_FULL = vol.Schema(
|
|||||||
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
||||||
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
|
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -204,46 +214,109 @@ class APIBackups(CoreSysAttributes):
|
|||||||
|
|
||||||
return body
|
return body
|
||||||
|
|
||||||
|
async def _background_backup_task(
|
||||||
|
self, backup_method: Callable, *args, **kwargs
|
||||||
|
) -> tuple[asyncio.Task, str]:
|
||||||
|
"""Start backup task in background and return task and job ID."""
|
||||||
|
event = asyncio.Event()
|
||||||
|
job, backup_task = self.sys_jobs.schedule_job(
|
||||||
|
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
async def release_on_freeze(new_state: CoreState):
|
||||||
|
if new_state == CoreState.FREEZE:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
# Wait for system to get into freeze state before returning
|
||||||
|
# If the backup fails validation it will raise before getting there
|
||||||
|
listener = self.sys_bus.register_event(
|
||||||
|
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
await asyncio.wait(
|
||||||
|
(
|
||||||
|
backup_task,
|
||||||
|
self.sys_create_task(event.wait()),
|
||||||
|
),
|
||||||
|
return_when=asyncio.FIRST_COMPLETED,
|
||||||
|
)
|
||||||
|
return (backup_task, job.uuid)
|
||||||
|
finally:
|
||||||
|
self.sys_bus.remove_listener(listener)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_full(self, request):
|
async def backup_full(self, request):
|
||||||
"""Create full backup."""
|
"""Create full backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
backup = await asyncio.shield(
|
backup_task, job_id = await self._background_backup_task(
|
||||||
self.sys_backups.do_backup_full(**self._location_to_mount(body))
|
self.sys_backups.do_backup_full, **self._location_to_mount(body)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if background and not backup_task.done():
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
|
||||||
|
backup: Backup = await backup_task
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||||
return False
|
raise APIError(
|
||||||
|
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_partial(self, request):
|
async def backup_partial(self, request):
|
||||||
"""Create a partial backup."""
|
"""Create a partial backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||||
backup = await asyncio.shield(
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
self.sys_backups.do_backup_partial(**self._location_to_mount(body))
|
backup_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_backup_partial, **self._location_to_mount(body)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if background and not backup_task.done():
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
|
||||||
|
backup: Backup = await backup_task
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||||
return False
|
raise APIError(
|
||||||
|
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_full(self, request):
|
async def restore_full(self, request):
|
||||||
"""Full restore of a backup."""
|
"""Full restore of a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
|
restore_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_restore_full, backup, **body
|
||||||
|
)
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
|
if background and not restore_task.done() or await restore_task:
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
raise APIError(
|
||||||
|
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_partial(self, request):
|
async def restore_partial(self, request):
|
||||||
"""Partial restore a backup."""
|
"""Partial restore a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
|
restore_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_restore_partial, backup, **body
|
||||||
|
)
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
if background and not restore_task.done() or await restore_task:
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
raise APIError(
|
||||||
|
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def freeze(self, request):
|
async def freeze(self, request):
|
||||||
|
@@ -1,11 +1,14 @@
|
|||||||
"""Const for API."""
|
"""Const for API."""
|
||||||
|
|
||||||
|
from enum import StrEnum
|
||||||
|
|
||||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
CONTENT_TYPE_BINARY = "application/octet-stream"
|
||||||
CONTENT_TYPE_JSON = "application/json"
|
CONTENT_TYPE_JSON = "application/json"
|
||||||
CONTENT_TYPE_PNG = "image/png"
|
CONTENT_TYPE_PNG = "image/png"
|
||||||
CONTENT_TYPE_TAR = "application/tar"
|
CONTENT_TYPE_TAR = "application/tar"
|
||||||
CONTENT_TYPE_TEXT = "text/plain"
|
CONTENT_TYPE_TEXT = "text/plain"
|
||||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||||
|
CONTENT_TYPE_X_LOG = "text/x-log"
|
||||||
|
|
||||||
COOKIE_INGRESS = "ingress_session"
|
COOKIE_INGRESS = "ingress_session"
|
||||||
|
|
||||||
@@ -13,6 +16,9 @@ ATTR_AGENT_VERSION = "agent_version"
|
|||||||
ATTR_APPARMOR_VERSION = "apparmor_version"
|
ATTR_APPARMOR_VERSION = "apparmor_version"
|
||||||
ATTR_ATTRIBUTES = "attributes"
|
ATTR_ATTRIBUTES = "attributes"
|
||||||
ATTR_AVAILABLE_UPDATES = "available_updates"
|
ATTR_AVAILABLE_UPDATES = "available_updates"
|
||||||
|
ATTR_BACKGROUND = "background"
|
||||||
|
ATTR_BOOT_SLOT = "boot_slot"
|
||||||
|
ATTR_BOOT_SLOTS = "boot_slots"
|
||||||
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
||||||
ATTR_BOOTS = "boots"
|
ATTR_BOOTS = "boots"
|
||||||
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
||||||
@@ -30,25 +36,42 @@ ATTR_DT_UTC = "dt_utc"
|
|||||||
ATTR_EJECTABLE = "ejectable"
|
ATTR_EJECTABLE = "ejectable"
|
||||||
ATTR_FALLBACK = "fallback"
|
ATTR_FALLBACK = "fallback"
|
||||||
ATTR_FILESYSTEMS = "filesystems"
|
ATTR_FILESYSTEMS = "filesystems"
|
||||||
|
ATTR_GROUP_IDS = "group_ids"
|
||||||
ATTR_IDENTIFIERS = "identifiers"
|
ATTR_IDENTIFIERS = "identifiers"
|
||||||
|
ATTR_IS_ACTIVE = "is_active"
|
||||||
|
ATTR_IS_OWNER = "is_owner"
|
||||||
|
ATTR_JOB_ID = "job_id"
|
||||||
ATTR_JOBS = "jobs"
|
ATTR_JOBS = "jobs"
|
||||||
ATTR_LLMNR = "llmnr"
|
ATTR_LLMNR = "llmnr"
|
||||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||||
|
ATTR_LOCAL_ONLY = "local_only"
|
||||||
ATTR_MDNS = "mdns"
|
ATTR_MDNS = "mdns"
|
||||||
ATTR_MODEL = "model"
|
ATTR_MODEL = "model"
|
||||||
ATTR_MOUNTS = "mounts"
|
ATTR_MOUNTS = "mounts"
|
||||||
ATTR_MOUNT_POINTS = "mount_points"
|
ATTR_MOUNT_POINTS = "mount_points"
|
||||||
ATTR_PANEL_PATH = "panel_path"
|
ATTR_PANEL_PATH = "panel_path"
|
||||||
ATTR_REMOVABLE = "removable"
|
ATTR_REMOVABLE = "removable"
|
||||||
|
ATTR_REMOVE_CONFIG = "remove_config"
|
||||||
ATTR_REVISION = "revision"
|
ATTR_REVISION = "revision"
|
||||||
|
ATTR_SAFE_MODE = "safe_mode"
|
||||||
ATTR_SEAT = "seat"
|
ATTR_SEAT = "seat"
|
||||||
ATTR_SIGNED = "signed"
|
ATTR_SIGNED = "signed"
|
||||||
ATTR_STARTUP_TIME = "startup_time"
|
ATTR_STARTUP_TIME = "startup_time"
|
||||||
|
ATTR_STATUS = "status"
|
||||||
ATTR_SUBSYSTEM = "subsystem"
|
ATTR_SUBSYSTEM = "subsystem"
|
||||||
ATTR_SYSFS = "sysfs"
|
ATTR_SYSFS = "sysfs"
|
||||||
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
||||||
ATTR_TIME_DETECTED = "time_detected"
|
ATTR_TIME_DETECTED = "time_detected"
|
||||||
ATTR_UPDATE_TYPE = "update_type"
|
ATTR_UPDATE_TYPE = "update_type"
|
||||||
ATTR_USE_NTP = "use_ntp"
|
|
||||||
ATTR_USAGE = "usage"
|
ATTR_USAGE = "usage"
|
||||||
|
ATTR_USE_NTP = "use_ntp"
|
||||||
|
ATTR_USERS = "users"
|
||||||
ATTR_VENDOR = "vendor"
|
ATTR_VENDOR = "vendor"
|
||||||
|
ATTR_VIRTUALIZATION = "virtualization"
|
||||||
|
|
||||||
|
|
||||||
|
class BootSlot(StrEnum):
|
||||||
|
"""Boot slots used by HAOS."""
|
||||||
|
|
||||||
|
A = "A"
|
||||||
|
B = "B"
|
||||||
|
@@ -15,7 +15,6 @@ from ..const import (
|
|||||||
AddonState,
|
AddonState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..discovery.validate import valid_discovery_service
|
|
||||||
from ..exceptions import APIError, APIForbidden
|
from ..exceptions import APIError, APIForbidden
|
||||||
from .utils import api_process, api_validate, require_home_assistant
|
from .utils import api_process, api_validate, require_home_assistant
|
||||||
|
|
||||||
@@ -24,7 +23,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
SCHEMA_DISCOVERY = vol.Schema(
|
SCHEMA_DISCOVERY = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_SERVICE): str,
|
vol.Required(ATTR_SERVICE): str,
|
||||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
vol.Required(ATTR_CONFIG): dict,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -71,15 +70,6 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
addon: Addon = request[REQUEST_FROM]
|
addon: Addon = request[REQUEST_FROM]
|
||||||
service = body[ATTR_SERVICE]
|
service = body[ATTR_SERVICE]
|
||||||
|
|
||||||
try:
|
|
||||||
valid_discovery_service(service)
|
|
||||||
except vol.Invalid:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on",
|
|
||||||
service,
|
|
||||||
addon.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Access?
|
# Access?
|
||||||
if body[ATTR_SERVICE] not in addon.discovery:
|
if body[ATTR_SERVICE] not in addon.discovery:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
|
@@ -26,8 +26,8 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import dns_server_list, version_tag
|
from ..validate import dns_server_list, version_tag
|
||||||
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
|
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -105,11 +105,6 @@ class APICoreDNS(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.dns.update(version))
|
await asyncio.shield(self.sys_plugins.dns.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return DNS Docker logs."""
|
|
||||||
return self.sys_plugins.dns.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart CoreDNS plugin."""
|
"""Restart CoreDNS plugin."""
|
||||||
|
@@ -16,7 +16,7 @@ from ..const import (
|
|||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..dbus.udisks2 import UDisks2
|
from ..dbus.udisks2 import UDisks2Manager
|
||||||
from ..dbus.udisks2.block import UDisks2Block
|
from ..dbus.udisks2.block import UDisks2Block
|
||||||
from ..dbus.udisks2.drive import UDisks2Drive
|
from ..dbus.udisks2.drive import UDisks2Drive
|
||||||
from ..hardware.data import Device
|
from ..hardware.data import Device
|
||||||
@@ -72,7 +72,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
|
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
|
||||||
"""Return a dict with information of a disk to be used in the API."""
|
"""Return a dict with information of a disk to be used in the API."""
|
||||||
return {
|
return {
|
||||||
ATTR_VENDOR: drive.vendor,
|
ATTR_VENDOR: drive.vendor,
|
||||||
|
@@ -36,8 +36,8 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import docker_image, network_port, version_tag
|
from ..validate import docker_image, network_port, version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .const import ATTR_SAFE_MODE
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -63,6 +63,12 @@ SCHEMA_UPDATE = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
SCHEMA_RESTART = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class APIHomeAssistant(CoreSysAttributes):
|
class APIHomeAssistant(CoreSysAttributes):
|
||||||
"""Handle RESTful API for Home Assistant functions."""
|
"""Handle RESTful API for Home Assistant functions."""
|
||||||
@@ -94,6 +100,9 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
|
|
||||||
if ATTR_IMAGE in body:
|
if ATTR_IMAGE in body:
|
||||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||||
|
self.sys_homeassistant.override_image = (
|
||||||
|
self.sys_homeassistant.image != self.sys_homeassistant.default_image
|
||||||
|
)
|
||||||
|
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||||
@@ -164,20 +173,19 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
return asyncio.shield(self.sys_homeassistant.core.start())
|
return asyncio.shield(self.sys_homeassistant.core.start())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
async def restart(self, request: web.Request) -> None:
|
||||||
"""Restart Home Assistant."""
|
"""Restart Home Assistant."""
|
||||||
return asyncio.shield(self.sys_homeassistant.core.restart())
|
body = await api_validate(SCHEMA_RESTART, request)
|
||||||
|
|
||||||
|
await asyncio.shield(
|
||||||
|
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Rebuild Home Assistant."""
|
"""Rebuild Home Assistant."""
|
||||||
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return Home Assistant Docker logs."""
|
|
||||||
return self.sys_homeassistant.core.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def check(self, request: web.Request) -> None:
|
async def check(self, request: web.Request) -> None:
|
||||||
"""Check configuration of Home Assistant."""
|
"""Check configuration of Home Assistant."""
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor host RESTful API."""
|
"""Init file for Supervisor host RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
@@ -28,7 +29,14 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, HostLogError
|
from ..exceptions import APIError, HostLogError
|
||||||
from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER
|
from ..host.const import (
|
||||||
|
PARAM_BOOT_ID,
|
||||||
|
PARAM_FOLLOW,
|
||||||
|
PARAM_SYSLOG_IDENTIFIER,
|
||||||
|
LogFormat,
|
||||||
|
LogFormatter,
|
||||||
|
)
|
||||||
|
from ..utils.systemd_journal import journal_logs_reader
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_AGENT_VERSION,
|
ATTR_AGENT_VERSION,
|
||||||
ATTR_APPARMOR_VERSION,
|
ATTR_APPARMOR_VERSION,
|
||||||
@@ -42,9 +50,11 @@ from .const import (
|
|||||||
ATTR_LLMNR_HOSTNAME,
|
ATTR_LLMNR_HOSTNAME,
|
||||||
ATTR_STARTUP_TIME,
|
ATTR_STARTUP_TIME,
|
||||||
ATTR_USE_NTP,
|
ATTR_USE_NTP,
|
||||||
|
ATTR_VIRTUALIZATION,
|
||||||
CONTENT_TYPE_TEXT,
|
CONTENT_TYPE_TEXT,
|
||||||
|
CONTENT_TYPE_X_LOG,
|
||||||
)
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -65,6 +75,7 @@ class APIHost(CoreSysAttributes):
|
|||||||
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
||||||
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
||||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||||
|
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
|
||||||
ATTR_CPE: self.sys_host.info.cpe,
|
ATTR_CPE: self.sys_host.info.cpe,
|
||||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||||
@@ -153,11 +164,11 @@ class APIHost(CoreSysAttributes):
|
|||||||
raise APIError() from err
|
raise APIError() from err
|
||||||
return possible_offset
|
return possible_offset
|
||||||
|
|
||||||
@api_process
|
async def advanced_logs_handler(
|
||||||
async def advanced_logs(
|
|
||||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||||
) -> web.StreamResponse:
|
) -> web.StreamResponse:
|
||||||
"""Return systemd-journald logs."""
|
"""Return systemd-journald logs."""
|
||||||
|
log_formatter = LogFormatter.PLAIN
|
||||||
params = {}
|
params = {}
|
||||||
if identifier:
|
if identifier:
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
||||||
@@ -165,6 +176,8 @@ class APIHost(CoreSysAttributes):
|
|||||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
||||||
else:
|
else:
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
||||||
|
# host logs should be always verbose, no matter what Accept header is used
|
||||||
|
log_formatter = LogFormatter.VERBOSE
|
||||||
|
|
||||||
if BOOTID in request.match_info:
|
if BOOTID in request.match_info:
|
||||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
||||||
@@ -175,28 +188,40 @@ class APIHost(CoreSysAttributes):
|
|||||||
|
|
||||||
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
||||||
CONTENT_TYPE_TEXT,
|
CONTENT_TYPE_TEXT,
|
||||||
|
CONTENT_TYPE_X_LOG,
|
||||||
"*/*",
|
"*/*",
|
||||||
]:
|
]:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
"Invalid content type requested. Only text/plain supported for now."
|
"Invalid content type requested. Only text/plain and text/x-log "
|
||||||
|
"supported for now."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
|
||||||
|
log_formatter = LogFormatter.VERBOSE
|
||||||
|
|
||||||
if RANGE in request.headers:
|
if RANGE in request.headers:
|
||||||
range_header = request.headers.get(RANGE)
|
range_header = request.headers.get(RANGE)
|
||||||
else:
|
else:
|
||||||
range_header = f"entries=:-{DEFAULT_RANGE}:"
|
range_header = f"entries=:-{DEFAULT_RANGE}:"
|
||||||
|
|
||||||
async with self.sys_host.logs.journald_logs(
|
async with self.sys_host.logs.journald_logs(
|
||||||
params=params, range_header=range_header
|
params=params, range_header=range_header, accept=LogFormat.JOURNAL
|
||||||
) as resp:
|
) as resp:
|
||||||
try:
|
try:
|
||||||
response = web.StreamResponse()
|
response = web.StreamResponse()
|
||||||
response.content_type = CONTENT_TYPE_TEXT
|
response.content_type = CONTENT_TYPE_TEXT
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
async for data in resp.content:
|
async for line in journal_logs_reader(resp, log_formatter):
|
||||||
await response.write(data)
|
await response.write(line.encode("utf-8") + b"\n")
|
||||||
except ConnectionResetError as ex:
|
except ConnectionResetError as ex:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
"Connection reset when trying to fetch data from systemd-journald."
|
"Connection reset when trying to fetch data from systemd-journald."
|
||||||
) from ex
|
) from ex
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||||
|
async def advanced_logs(
|
||||||
|
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||||
|
) -> web.StreamResponse:
|
||||||
|
"""Return systemd-journald logs. Wrapped as standard API handler."""
|
||||||
|
return await self.advanced_logs_handler(request, identifier, follow)
|
||||||
|
@@ -6,6 +6,7 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIError
|
||||||
from ..jobs import SupervisorJob
|
from ..jobs import SupervisorJob
|
||||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||||
from .const import ATTR_JOBS
|
from .const import ATTR_JOBS
|
||||||
@@ -21,7 +22,7 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
class APIJobs(CoreSysAttributes):
|
class APIJobs(CoreSysAttributes):
|
||||||
"""Handle RESTful API for OS functions."""
|
"""Handle RESTful API for OS functions."""
|
||||||
|
|
||||||
def _list_jobs(self) -> list[dict[str, Any]]:
|
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
||||||
"""Return current job tree."""
|
"""Return current job tree."""
|
||||||
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
||||||
for job in self.sys_jobs.jobs:
|
for job in self.sys_jobs.jobs:
|
||||||
@@ -34,9 +35,11 @@ class APIJobs(CoreSysAttributes):
|
|||||||
jobs_by_parent[job.parent_id].append(job)
|
jobs_by_parent[job.parent_id].append(job)
|
||||||
|
|
||||||
job_list: list[dict[str, Any]] = []
|
job_list: list[dict[str, Any]] = []
|
||||||
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = [
|
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
||||||
(job_list, job) for job in jobs_by_parent.get(None, [])
|
[(job_list, start)]
|
||||||
]
|
if start
|
||||||
|
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
|
||||||
|
)
|
||||||
|
|
||||||
while queue:
|
while queue:
|
||||||
(current_list, current_job) = queue.pop(0)
|
(current_list, current_job) = queue.pop(0)
|
||||||
@@ -78,3 +81,19 @@ class APIJobs(CoreSysAttributes):
|
|||||||
async def reset(self, request: web.Request) -> None:
|
async def reset(self, request: web.Request) -> None:
|
||||||
"""Reset options for JobManager."""
|
"""Reset options for JobManager."""
|
||||||
self.sys_jobs.reset_data()
|
self.sys_jobs.reset_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||||
|
"""Get details of a job by ID."""
|
||||||
|
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||||
|
return self._list_jobs(job)[0]
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def remove_job(self, request: web.Request) -> None:
|
||||||
|
"""Remove a completed job."""
|
||||||
|
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||||
|
|
||||||
|
if not job.done:
|
||||||
|
raise APIError(f"Job {job.uuid} is not done!")
|
||||||
|
|
||||||
|
self.sys_jobs.remove_job(job)
|
||||||
|
@@ -103,6 +103,8 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
||||||
r"|/audio/.+"
|
r"|/audio/.+"
|
||||||
r"|/auth/cache"
|
r"|/auth/cache"
|
||||||
|
r"|/available_updates"
|
||||||
|
r"|/backups.*"
|
||||||
r"|/cli/.+"
|
r"|/cli/.+"
|
||||||
r"|/core/.+"
|
r"|/core/.+"
|
||||||
r"|/dns/.+"
|
r"|/dns/.+"
|
||||||
@@ -112,16 +114,17 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
r"|/hassos/.+"
|
r"|/hassos/.+"
|
||||||
r"|/homeassistant/.+"
|
r"|/homeassistant/.+"
|
||||||
r"|/host/.+"
|
r"|/host/.+"
|
||||||
|
r"|/mounts.*"
|
||||||
r"|/multicast/.+"
|
r"|/multicast/.+"
|
||||||
r"|/network/.+"
|
r"|/network/.+"
|
||||||
r"|/observer/.+"
|
r"|/observer/.+"
|
||||||
r"|/os/.+"
|
r"|/os/(?!datadisk/wipe).+"
|
||||||
|
r"|/refresh_updates"
|
||||||
r"|/resolution/.+"
|
r"|/resolution/.+"
|
||||||
r"|/backups.*"
|
r"|/security/.+"
|
||||||
r"|/snapshots.*"
|
r"|/snapshots.*"
|
||||||
r"|/store.*"
|
r"|/store.*"
|
||||||
r"|/supervisor/.+"
|
r"|/supervisor/.+"
|
||||||
r"|/security/.+"
|
|
||||||
r")$"
|
r")$"
|
||||||
),
|
),
|
||||||
ROLE_ADMIN: re.compile(
|
ROLE_ADMIN: re.compile(
|
||||||
|
@@ -23,8 +23,7 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .utils import api_process, api_validate
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -69,11 +68,6 @@ class APIMulticast(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return Multicast Docker logs."""
|
|
||||||
return self.sys_plugins.multicast.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Multicast plugin."""
|
"""Restart Multicast plugin."""
|
||||||
|
@@ -19,6 +19,7 @@ from ..const import (
|
|||||||
ATTR_POWER_LED,
|
ATTR_POWER_LED,
|
||||||
ATTR_SERIAL,
|
ATTR_SERIAL,
|
||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
|
ATTR_STATE,
|
||||||
ATTR_UPDATE_AVAILABLE,
|
ATTR_UPDATE_AVAILABLE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_VERSION_LATEST,
|
ATTR_VERSION_LATEST,
|
||||||
@@ -28,13 +29,17 @@ from ..exceptions import BoardInvalidError
|
|||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import (
|
from .const import (
|
||||||
|
ATTR_BOOT_SLOT,
|
||||||
|
ATTR_BOOT_SLOTS,
|
||||||
ATTR_DATA_DISK,
|
ATTR_DATA_DISK,
|
||||||
ATTR_DEV_PATH,
|
ATTR_DEV_PATH,
|
||||||
ATTR_DEVICE,
|
ATTR_DEVICE,
|
||||||
ATTR_DISKS,
|
ATTR_DISKS,
|
||||||
ATTR_MODEL,
|
ATTR_MODEL,
|
||||||
|
ATTR_STATUS,
|
||||||
ATTR_SYSTEM_HEALTH_LED,
|
ATTR_SYSTEM_HEALTH_LED,
|
||||||
ATTR_VENDOR,
|
ATTR_VENDOR,
|
||||||
|
BootSlot,
|
||||||
)
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
@@ -42,6 +47,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||||
|
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
|
||||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
||||||
|
|
||||||
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
||||||
@@ -74,6 +80,15 @@ class APIOS(CoreSysAttributes):
|
|||||||
ATTR_BOARD: self.sys_os.board,
|
ATTR_BOARD: self.sys_os.board,
|
||||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||||
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
||||||
|
ATTR_BOOT_SLOTS: {
|
||||||
|
slot.bootname: {
|
||||||
|
ATTR_STATE: slot.state,
|
||||||
|
ATTR_STATUS: slot.boot_status,
|
||||||
|
ATTR_VERSION: slot.bundle_version,
|
||||||
|
}
|
||||||
|
for slot in self.sys_os.slots
|
||||||
|
if slot.bootname
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -96,6 +111,17 @@ class APIOS(CoreSysAttributes):
|
|||||||
|
|
||||||
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def wipe_data(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Trigger data disk wipe on Host."""
|
||||||
|
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def set_boot_slot(self, request: web.Request) -> None:
|
||||||
|
"""Change the active boot slot and reboot into it."""
|
||||||
|
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
|
||||||
|
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return possible data targets."""
|
"""Return possible data targets."""
|
||||||
@@ -130,13 +156,17 @@ class APIOS(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_ACTIVITY_LED in body:
|
if ATTR_ACTIVITY_LED in body:
|
||||||
self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED]
|
await self.sys_dbus.agent.board.green.set_activity_led(
|
||||||
|
body[ATTR_ACTIVITY_LED]
|
||||||
|
)
|
||||||
|
|
||||||
if ATTR_POWER_LED in body:
|
if ATTR_POWER_LED in body:
|
||||||
self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED]
|
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
|
||||||
|
|
||||||
if ATTR_SYSTEM_HEALTH_LED in body:
|
if ATTR_SYSTEM_HEALTH_LED in body:
|
||||||
self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED]
|
await self.sys_dbus.agent.board.green.set_user_led(
|
||||||
|
body[ATTR_SYSTEM_HEALTH_LED]
|
||||||
|
)
|
||||||
|
|
||||||
self.sys_dbus.agent.board.green.save_data()
|
self.sys_dbus.agent.board.green.save_data()
|
||||||
|
|
||||||
@@ -155,13 +185,15 @@ class APIOS(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_DISK_LED in body:
|
if ATTR_DISK_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
|
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
|
||||||
|
|
||||||
if ATTR_HEARTBEAT_LED in body:
|
if ATTR_HEARTBEAT_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
|
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
|
||||||
|
body[ATTR_HEARTBEAT_LED]
|
||||||
|
)
|
||||||
|
|
||||||
if ATTR_POWER_LED in body:
|
if ATTR_POWER_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
|
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
||||||
|
|
||||||
self.sys_dbus.agent.board.yellow.save_data()
|
self.sys_dbus.agent.board.yellow.save_data()
|
||||||
self.sys_resolution.create_issue(
|
self.sys_resolution.create_issue(
|
||||||
|
@@ -14,6 +14,7 @@ from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
|
|||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError
|
from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError
|
||||||
|
from ..utils.json import json_dumps
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -145,7 +146,8 @@ class APIProxy(CoreSysAttributes):
|
|||||||
{
|
{
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"access_token": self.sys_homeassistant.api.access_token,
|
"access_token": self.sys_homeassistant.api.access_token,
|
||||||
}
|
},
|
||||||
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
|
|
||||||
data = await client.receive_json()
|
data = await client.receive_json()
|
||||||
@@ -184,6 +186,9 @@ class APIProxy(CoreSysAttributes):
|
|||||||
return await target.send_str(msg.data)
|
return await target.send_str(msg.data)
|
||||||
if msg.type == WSMsgType.BINARY:
|
if msg.type == WSMsgType.BINARY:
|
||||||
return await target.send_bytes(msg.data)
|
return await target.send_bytes(msg.data)
|
||||||
|
if msg.type == WSMsgType.CLOSE:
|
||||||
|
_LOGGER.debug("Received close message from WebSocket.")
|
||||||
|
return await target.close()
|
||||||
|
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
f"Cannot proxy websocket message of unsupported type: {msg.type}"
|
f"Cannot proxy websocket message of unsupported type: {msg.type}"
|
||||||
@@ -198,11 +203,13 @@ class APIProxy(CoreSysAttributes):
|
|||||||
# init server
|
# init server
|
||||||
server = web.WebSocketResponse(heartbeat=30)
|
server = web.WebSocketResponse(heartbeat=30)
|
||||||
await server.prepare(request)
|
await server.prepare(request)
|
||||||
|
addon_name = None
|
||||||
|
|
||||||
# handle authentication
|
# handle authentication
|
||||||
try:
|
try:
|
||||||
await server.send_json(
|
await server.send_json(
|
||||||
{"type": "auth_required", "ha_version": self.sys_homeassistant.version}
|
{"type": "auth_required", "ha_version": self.sys_homeassistant.version},
|
||||||
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check API access
|
# Check API access
|
||||||
@@ -215,14 +222,17 @@ class APIProxy(CoreSysAttributes):
|
|||||||
if not addon or not addon.access_homeassistant_api:
|
if not addon or not addon.access_homeassistant_api:
|
||||||
_LOGGER.warning("Unauthorized WebSocket access!")
|
_LOGGER.warning("Unauthorized WebSocket access!")
|
||||||
await server.send_json(
|
await server.send_json(
|
||||||
{"type": "auth_invalid", "message": "Invalid access"}
|
{"type": "auth_invalid", "message": "Invalid access"},
|
||||||
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
return server
|
return server
|
||||||
|
|
||||||
_LOGGER.info("WebSocket access from %s", addon.slug)
|
addon_name = addon.slug
|
||||||
|
_LOGGER.info("WebSocket access from %s", addon_name)
|
||||||
|
|
||||||
await server.send_json(
|
await server.send_json(
|
||||||
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version}
|
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version},
|
||||||
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
except (RuntimeError, ValueError) as err:
|
except (RuntimeError, ValueError) as err:
|
||||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||||
@@ -277,5 +287,5 @@ class APIProxy(CoreSysAttributes):
|
|||||||
if not server.closed:
|
if not server.closed:
|
||||||
await server.close()
|
await server.close()
|
||||||
|
|
||||||
_LOGGER.info("Home Assistant WebSocket API connection is closed")
|
_LOGGER.info("Home Assistant WebSocket API for %s closed", addon_name)
|
||||||
return server
|
return server
|
||||||
|
@@ -49,7 +49,7 @@ from ..store.validate import repositories
|
|||||||
from ..utils.sentry import close_sentry, init_sentry
|
from ..utils.sentry import close_sentry, init_sentry
|
||||||
from ..utils.validate import validate_timezone
|
from ..utils.validate import validate_timezone
|
||||||
from ..validate import version_tag, wait_boot
|
from ..validate import version_tag, wait_boot
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .const import CONTENT_TYPE_TEXT
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -140,7 +140,7 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
|
|
||||||
if ATTR_DIAGNOSTICS in body:
|
if ATTR_DIAGNOSTICS in body:
|
||||||
self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS]
|
self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS]
|
||||||
self.sys_dbus.agent.diagnostics = body[ATTR_DIAGNOSTICS]
|
await self.sys_dbus.agent.set_diagnostics(body[ATTR_DIAGNOSTICS])
|
||||||
|
|
||||||
if body[ATTR_DIAGNOSTICS]:
|
if body[ATTR_DIAGNOSTICS]:
|
||||||
init_sentry(self.coresys)
|
init_sentry(self.coresys)
|
||||||
@@ -229,7 +229,7 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
"""Soft restart Supervisor."""
|
"""Soft restart Supervisor."""
|
||||||
return asyncio.shield(self.sys_supervisor.restart())
|
return asyncio.shield(self.sys_supervisor.restart())
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
"""Return supervisor Docker logs."""
|
"""Return supervisor Docker logs."""
|
||||||
return self.sys_supervisor.logs()
|
return self.sys_supervisor.logs()
|
||||||
|
@@ -13,6 +13,7 @@ from ..const import (
|
|||||||
HEADER_TOKEN,
|
HEADER_TOKEN,
|
||||||
HEADER_TOKEN_OLD,
|
HEADER_TOKEN_OLD,
|
||||||
JSON_DATA,
|
JSON_DATA,
|
||||||
|
JSON_JOB_ID,
|
||||||
JSON_MESSAGE,
|
JSON_MESSAGE,
|
||||||
JSON_RESULT,
|
JSON_RESULT,
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
@@ -24,7 +25,7 @@ from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError
|
|||||||
from ..utils import check_exception_chain, get_message_from_exception_chain
|
from ..utils import check_exception_chain, get_message_from_exception_chain
|
||||||
from ..utils.json import json_dumps, json_loads as json_loads_util
|
from ..utils.json import json_dumps, json_loads as json_loads_util
|
||||||
from ..utils.log_format import format_message
|
from ..utils.log_format import format_message
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from . import const
|
||||||
|
|
||||||
|
|
||||||
def excract_supervisor_token(request: web.Request) -> str | None:
|
def excract_supervisor_token(request: web.Request) -> str | None:
|
||||||
@@ -90,7 +91,7 @@ def require_home_assistant(method):
|
|||||||
return wrap_api
|
return wrap_api
|
||||||
|
|
||||||
|
|
||||||
def api_process_raw(content):
|
def api_process_raw(content, *, error_type=None):
|
||||||
"""Wrap content_type into function."""
|
"""Wrap content_type into function."""
|
||||||
|
|
||||||
def wrap_method(method):
|
def wrap_method(method):
|
||||||
@@ -100,15 +101,15 @@ def api_process_raw(content):
|
|||||||
"""Return api information."""
|
"""Return api information."""
|
||||||
try:
|
try:
|
||||||
msg_data = await method(api, *args, **kwargs)
|
msg_data = await method(api, *args, **kwargs)
|
||||||
msg_type = content
|
except HassioError as err:
|
||||||
except (APIError, APIForbidden) as err:
|
return api_return_error(
|
||||||
msg_data = str(err).encode()
|
err, error_type=error_type or const.CONTENT_TYPE_BINARY
|
||||||
msg_type = CONTENT_TYPE_BINARY
|
)
|
||||||
except HassioError:
|
|
||||||
msg_data = b""
|
|
||||||
msg_type = CONTENT_TYPE_BINARY
|
|
||||||
|
|
||||||
return web.Response(body=msg_data, content_type=msg_type)
|
if isinstance(msg_data, (web.Response, web.StreamResponse)):
|
||||||
|
return msg_data
|
||||||
|
|
||||||
|
return web.Response(body=msg_data, content_type=content)
|
||||||
|
|
||||||
return wrap_api
|
return wrap_api
|
||||||
|
|
||||||
@@ -116,20 +117,40 @@ def api_process_raw(content):
|
|||||||
|
|
||||||
|
|
||||||
def api_return_error(
|
def api_return_error(
|
||||||
error: Exception | None = None, message: str | None = None
|
error: Exception | None = None,
|
||||||
|
message: str | None = None,
|
||||||
|
error_type: str | None = None,
|
||||||
) -> web.Response:
|
) -> web.Response:
|
||||||
"""Return an API error message."""
|
"""Return an API error message."""
|
||||||
if error and not message:
|
if error and not message:
|
||||||
message = get_message_from_exception_chain(error)
|
message = get_message_from_exception_chain(error)
|
||||||
if check_exception_chain(error, DockerAPIError):
|
if check_exception_chain(error, DockerAPIError):
|
||||||
message = format_message(message)
|
message = format_message(message)
|
||||||
|
if not message:
|
||||||
|
message = "Unknown error, see supervisor"
|
||||||
|
|
||||||
|
status = 400
|
||||||
|
if is_api_error := isinstance(error, APIError):
|
||||||
|
status = error.status
|
||||||
|
|
||||||
|
match error_type:
|
||||||
|
case const.CONTENT_TYPE_TEXT:
|
||||||
|
return web.Response(body=message, content_type=error_type, status=status)
|
||||||
|
case const.CONTENT_TYPE_BINARY:
|
||||||
|
return web.Response(
|
||||||
|
body=message.encode(), content_type=error_type, status=status
|
||||||
|
)
|
||||||
|
case _:
|
||||||
|
result = {
|
||||||
|
JSON_RESULT: RESULT_ERROR,
|
||||||
|
JSON_MESSAGE: message,
|
||||||
|
}
|
||||||
|
if is_api_error and error.job_id:
|
||||||
|
result[JSON_JOB_ID] = error.job_id
|
||||||
|
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{
|
result,
|
||||||
JSON_RESULT: RESULT_ERROR,
|
status=status,
|
||||||
JSON_MESSAGE: message or "Unknown error, see supervisor",
|
|
||||||
},
|
|
||||||
status=400,
|
|
||||||
dumps=json_dumps,
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -2,11 +2,18 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from .addons.addon import Addon
|
from .addons.addon import Addon
|
||||||
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH
|
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_TYPE, ATTR_USERNAME, FILE_HASSIO_AUTH
|
||||||
from .coresys import CoreSys, CoreSysAttributes
|
from .coresys import CoreSys, CoreSysAttributes
|
||||||
from .exceptions import AuthError, AuthPasswordResetError, HomeAssistantAPIError
|
from .exceptions import (
|
||||||
|
AuthError,
|
||||||
|
AuthListUsersError,
|
||||||
|
AuthPasswordResetError,
|
||||||
|
HomeAssistantAPIError,
|
||||||
|
HomeAssistantWSError,
|
||||||
|
)
|
||||||
from .utils.common import FileConfiguration
|
from .utils.common import FileConfiguration
|
||||||
from .validate import SCHEMA_AUTH_CONFIG
|
from .validate import SCHEMA_AUTH_CONFIG
|
||||||
|
|
||||||
@@ -132,6 +139,17 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
|||||||
|
|
||||||
raise AuthPasswordResetError()
|
raise AuthPasswordResetError()
|
||||||
|
|
||||||
|
async def list_users(self) -> list[dict[str, Any]]:
|
||||||
|
"""List users on the Home Assistant instance."""
|
||||||
|
try:
|
||||||
|
return await self.sys_homeassistant.websocket.async_send_command(
|
||||||
|
{ATTR_TYPE: "config/auth/list"}
|
||||||
|
)
|
||||||
|
except HomeAssistantWSError:
|
||||||
|
_LOGGER.error("Can't request listing users on Home Assistant!")
|
||||||
|
|
||||||
|
raise AuthListUsersError()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _rehash(value: str, salt2: str = "") -> str:
|
def _rehash(value: str, salt2: str = "") -> str:
|
||||||
"""Rehash a value."""
|
"""Rehash a value."""
|
||||||
|
@@ -1,14 +1,18 @@
|
|||||||
"""Representation of a backup file."""
|
"""Representation of a backup file."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from base64 import b64decode, b64encode
|
from base64 import b64decode, b64encode
|
||||||
|
from collections import defaultdict
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
|
from copy import deepcopy
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
|
import io
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tarfile
|
import tarfile
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
import time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||||
@@ -42,11 +46,14 @@ from ..const import (
|
|||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
CRYPTO_AES128,
|
CRYPTO_AES128,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys
|
||||||
from ..exceptions import AddonsError, BackupError
|
from ..exceptions import AddonsError, BackupError, BackupInvalidError
|
||||||
|
from ..jobs.const import JOB_GROUP_BACKUP
|
||||||
|
from ..jobs.decorator import Job
|
||||||
|
from ..jobs.job_group import JobGroup
|
||||||
from ..utils import remove_folder
|
from ..utils import remove_folder
|
||||||
from ..utils.dt import parse_datetime, utcnow
|
from ..utils.dt import parse_datetime, utcnow
|
||||||
from ..utils.json import write_json_file
|
from ..utils.json import json_bytes
|
||||||
from .const import BUF_SIZE, BackupType
|
from .const import BUF_SIZE, BackupType
|
||||||
from .utils import key_to_iv, password_to_key
|
from .utils import key_to_iv, password_to_key
|
||||||
from .validate import SCHEMA_BACKUP
|
from .validate import SCHEMA_BACKUP
|
||||||
@@ -54,15 +61,25 @@ from .validate import SCHEMA_BACKUP
|
|||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Backup(CoreSysAttributes):
|
class Backup(JobGroup):
|
||||||
"""A single Supervisor backup."""
|
"""A single Supervisor backup."""
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys, tar_file: Path):
|
def __init__(
|
||||||
|
self,
|
||||||
|
coresys: CoreSys,
|
||||||
|
tar_file: Path,
|
||||||
|
slug: str,
|
||||||
|
data: dict[str, Any] | None = None,
|
||||||
|
):
|
||||||
"""Initialize a backup."""
|
"""Initialize a backup."""
|
||||||
self.coresys: CoreSys = coresys
|
super().__init__(
|
||||||
|
coresys, JOB_GROUP_BACKUP.format_map(defaultdict(str, slug=slug)), slug
|
||||||
|
)
|
||||||
self._tarfile: Path = tar_file
|
self._tarfile: Path = tar_file
|
||||||
self._data: dict[str, Any] = {}
|
self._data: dict[str, Any] = data or {ATTR_SLUG: slug}
|
||||||
self._tmp = None
|
self._tmp = None
|
||||||
|
self._outer_secure_tarfile: SecureTarFile | None = None
|
||||||
|
self._outer_secure_tarfile_tarfile: tarfile.TarFile | None = None
|
||||||
self._key: bytes | None = None
|
self._key: bytes | None = None
|
||||||
self._aes: Cipher | None = None
|
self._aes: Cipher | None = None
|
||||||
|
|
||||||
@@ -87,7 +104,7 @@ class Backup(CoreSysAttributes):
|
|||||||
return self._data[ATTR_NAME]
|
return self._data[ATTR_NAME]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def date(self):
|
def date(self) -> str:
|
||||||
"""Return backup date."""
|
"""Return backup date."""
|
||||||
return self._data[ATTR_DATE]
|
return self._data[ATTR_DATE]
|
||||||
|
|
||||||
@@ -102,32 +119,32 @@ class Backup(CoreSysAttributes):
|
|||||||
return self._data[ATTR_COMPRESSED]
|
return self._data[ATTR_COMPRESSED]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def addons(self):
|
def addons(self) -> list[dict[str, Any]]:
|
||||||
"""Return backup date."""
|
"""Return backup date."""
|
||||||
return self._data[ATTR_ADDONS]
|
return self._data[ATTR_ADDONS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def addon_list(self):
|
def addon_list(self) -> list[str]:
|
||||||
"""Return a list of add-ons slugs."""
|
"""Return a list of add-ons slugs."""
|
||||||
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
|
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def folders(self):
|
def folders(self) -> list[str]:
|
||||||
"""Return list of saved folders."""
|
"""Return list of saved folders."""
|
||||||
return self._data[ATTR_FOLDERS]
|
return self._data[ATTR_FOLDERS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def repositories(self):
|
def repositories(self) -> list[str]:
|
||||||
"""Return backup date."""
|
"""Return backup date."""
|
||||||
return self._data[ATTR_REPOSITORIES]
|
return self._data[ATTR_REPOSITORIES]
|
||||||
|
|
||||||
@repositories.setter
|
@repositories.setter
|
||||||
def repositories(self, value):
|
def repositories(self, value: list[str]) -> None:
|
||||||
"""Set backup date."""
|
"""Set backup date."""
|
||||||
self._data[ATTR_REPOSITORIES] = value
|
self._data[ATTR_REPOSITORIES] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant_version(self):
|
def homeassistant_version(self) -> AwesomeVersion:
|
||||||
"""Return backup Home Assistant version."""
|
"""Return backup Home Assistant version."""
|
||||||
if self.homeassistant is None:
|
if self.homeassistant is None:
|
||||||
return None
|
return None
|
||||||
@@ -141,7 +158,7 @@ class Backup(CoreSysAttributes):
|
|||||||
return self.homeassistant[ATTR_EXCLUDE_DATABASE]
|
return self.homeassistant[ATTR_EXCLUDE_DATABASE]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant(self):
|
def homeassistant(self) -> dict[str, Any]:
|
||||||
"""Return backup Home Assistant data."""
|
"""Return backup Home Assistant data."""
|
||||||
return self._data[ATTR_HOMEASSISTANT]
|
return self._data[ATTR_HOMEASSISTANT]
|
||||||
|
|
||||||
@@ -151,12 +168,12 @@ class Backup(CoreSysAttributes):
|
|||||||
return self._data[ATTR_SUPERVISOR_VERSION]
|
return self._data[ATTR_SUPERVISOR_VERSION]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def docker(self):
|
def docker(self) -> dict[str, Any]:
|
||||||
"""Return backup Docker config data."""
|
"""Return backup Docker config data."""
|
||||||
return self._data.get(ATTR_DOCKER, {})
|
return self._data.get(ATTR_DOCKER, {})
|
||||||
|
|
||||||
@docker.setter
|
@docker.setter
|
||||||
def docker(self, value):
|
def docker(self, value: dict[str, Any]) -> None:
|
||||||
"""Set the Docker config data."""
|
"""Set the Docker config data."""
|
||||||
self._data[ATTR_DOCKER] = value
|
self._data[ATTR_DOCKER] = value
|
||||||
|
|
||||||
@@ -169,32 +186,36 @@ class Backup(CoreSysAttributes):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def size(self):
|
def size(self) -> float:
|
||||||
"""Return backup size."""
|
"""Return backup size."""
|
||||||
if not self.tarfile.is_file():
|
if not self.tarfile.is_file():
|
||||||
return 0
|
return 0
|
||||||
return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte
|
return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_new(self):
|
def is_new(self) -> bool:
|
||||||
"""Return True if there is new."""
|
"""Return True if there is new."""
|
||||||
return not self.tarfile.exists()
|
return not self.tarfile.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tarfile(self):
|
def tarfile(self) -> Path:
|
||||||
"""Return path to backup tarfile."""
|
"""Return path to backup tarfile."""
|
||||||
return self._tarfile
|
return self._tarfile
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_current(self):
|
def is_current(self) -> bool:
|
||||||
"""Return true if backup is current, false if stale."""
|
"""Return true if backup is current, false if stale."""
|
||||||
return parse_datetime(self.date) >= utcnow() - timedelta(
|
return parse_datetime(self.date) >= utcnow() - timedelta(
|
||||||
days=self.sys_backups.days_until_stale
|
days=self.sys_backups.days_until_stale
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> dict[str, Any]:
|
||||||
|
"""Returns a copy of the data."""
|
||||||
|
return deepcopy(self._data)
|
||||||
|
|
||||||
def new(
|
def new(
|
||||||
self,
|
self,
|
||||||
slug: str,
|
|
||||||
name: str,
|
name: str,
|
||||||
date: str,
|
date: str,
|
||||||
sys_type: BackupType,
|
sys_type: BackupType,
|
||||||
@@ -204,7 +225,6 @@ class Backup(CoreSysAttributes):
|
|||||||
"""Initialize a new backup."""
|
"""Initialize a new backup."""
|
||||||
# Init metadata
|
# Init metadata
|
||||||
self._data[ATTR_VERSION] = 2
|
self._data[ATTR_VERSION] = 2
|
||||||
self._data[ATTR_SLUG] = slug
|
|
||||||
self._data[ATTR_NAME] = name
|
self._data[ATTR_NAME] = name
|
||||||
self._data[ATTR_DATE] = date
|
self._data[ATTR_DATE] = date
|
||||||
self._data[ATTR_TYPE] = sys_type
|
self._data[ATTR_TYPE] = sys_type
|
||||||
@@ -305,13 +325,21 @@ class Backup(CoreSysAttributes):
|
|||||||
|
|
||||||
async def __aenter__(self):
|
async def __aenter__(self):
|
||||||
"""Async context to open a backup."""
|
"""Async context to open a backup."""
|
||||||
self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent))
|
|
||||||
|
|
||||||
# create a backup
|
# create a backup
|
||||||
if not self.tarfile.is_file():
|
if not self.tarfile.is_file():
|
||||||
return self
|
self._outer_secure_tarfile = SecureTarFile(
|
||||||
|
self.tarfile,
|
||||||
|
"w",
|
||||||
|
gzip=False,
|
||||||
|
bufsize=BUF_SIZE,
|
||||||
|
)
|
||||||
|
self._outer_secure_tarfile_tarfile = self._outer_secure_tarfile.__enter__()
|
||||||
|
return
|
||||||
|
|
||||||
# extract an existing backup
|
# extract an existing backup
|
||||||
|
self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent))
|
||||||
|
|
||||||
def _extract_backup():
|
def _extract_backup():
|
||||||
"""Extract a backup."""
|
"""Extract a backup."""
|
||||||
with tarfile.open(self.tarfile, "r:") as tar:
|
with tarfile.open(self.tarfile, "r:") as tar:
|
||||||
@@ -326,8 +354,26 @@ class Backup(CoreSysAttributes):
|
|||||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||||
"""Async context to close a backup."""
|
"""Async context to close a backup."""
|
||||||
# exists backup or exception on build
|
# exists backup or exception on build
|
||||||
if self.tarfile.is_file() or exception_type is not None:
|
try:
|
||||||
self._tmp.cleanup()
|
await self._aexit(exception_type, exception_value, traceback)
|
||||||
|
finally:
|
||||||
|
if self._tmp:
|
||||||
|
self._tmp.cleanup()
|
||||||
|
if self._outer_secure_tarfile:
|
||||||
|
self._outer_secure_tarfile.__exit__(
|
||||||
|
exception_type, exception_value, traceback
|
||||||
|
)
|
||||||
|
self._outer_secure_tarfile = None
|
||||||
|
self._outer_secure_tarfile_tarfile = None
|
||||||
|
|
||||||
|
async def _aexit(self, exception_type, exception_value, traceback):
|
||||||
|
"""Cleanup after backup creation.
|
||||||
|
|
||||||
|
This is a separate method to allow it to be called from __aexit__ to ensure
|
||||||
|
that cleanup is always performed, even if an exception is raised.
|
||||||
|
"""
|
||||||
|
# If we're not creating a new backup, or if an exception was raised, we're done
|
||||||
|
if not self._outer_secure_tarfile or exception_type is not None:
|
||||||
return
|
return
|
||||||
|
|
||||||
# validate data
|
# validate data
|
||||||
@@ -340,161 +386,249 @@ class Backup(CoreSysAttributes):
|
|||||||
raise ValueError("Invalid config") from None
|
raise ValueError("Invalid config") from None
|
||||||
|
|
||||||
# new backup, build it
|
# new backup, build it
|
||||||
def _create_backup():
|
def _add_backup_json():
|
||||||
"""Create a new backup."""
|
"""Create a new backup."""
|
||||||
with tarfile.open(self.tarfile, "w:") as tar:
|
raw_bytes = json_bytes(self._data)
|
||||||
tar.add(self._tmp.name, arcname=".")
|
fileobj = io.BytesIO(raw_bytes)
|
||||||
|
tar_info = tarfile.TarInfo(name="./backup.json")
|
||||||
|
tar_info.size = len(raw_bytes)
|
||||||
|
tar_info.mtime = int(time.time())
|
||||||
|
self._outer_secure_tarfile_tarfile.addfile(tar_info, fileobj=fileobj)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
write_json_file(Path(self._tmp.name, "backup.json"), self._data)
|
await self.sys_run_in_executor(_add_backup_json)
|
||||||
await self.sys_run_in_executor(_create_backup)
|
|
||||||
except (OSError, json.JSONDecodeError) as err:
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
|
self.sys_jobs.current.capture_error(BackupError("Can't write backup"))
|
||||||
_LOGGER.error("Can't write backup: %s", err)
|
_LOGGER.error("Can't write backup: %s", err)
|
||||||
finally:
|
|
||||||
self._tmp.cleanup()
|
|
||||||
|
|
||||||
|
@Job(name="backup_addon_save", cleanup=False)
|
||||||
|
async def _addon_save(self, addon: Addon) -> asyncio.Task | None:
|
||||||
|
"""Store an add-on into backup."""
|
||||||
|
self.sys_jobs.current.reference = addon.slug
|
||||||
|
|
||||||
|
tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}"
|
||||||
|
|
||||||
|
addon_file = self._outer_secure_tarfile.create_inner_tar(
|
||||||
|
f"./{tar_name}",
|
||||||
|
gzip=self.compressed,
|
||||||
|
key=self._key,
|
||||||
|
)
|
||||||
|
# Take backup
|
||||||
|
try:
|
||||||
|
start_task = await addon.backup(addon_file)
|
||||||
|
except AddonsError as err:
|
||||||
|
raise BackupError(
|
||||||
|
f"Can't create backup for {addon.slug}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
# Store to config
|
||||||
|
self._data[ATTR_ADDONS].append(
|
||||||
|
{
|
||||||
|
ATTR_SLUG: addon.slug,
|
||||||
|
ATTR_NAME: addon.name,
|
||||||
|
ATTR_VERSION: addon.version,
|
||||||
|
ATTR_SIZE: addon_file.size,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return start_task
|
||||||
|
|
||||||
|
@Job(name="backup_store_addons", cleanup=False)
|
||||||
async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]:
|
async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]:
|
||||||
"""Add a list of add-ons into backup.
|
"""Add a list of add-ons into backup.
|
||||||
|
|
||||||
For each addon that needs to be started after backup, returns a Task which
|
For each addon that needs to be started after backup, returns a Task which
|
||||||
completes when that addon has state 'started' (see addon.start).
|
completes when that addon has state 'started' (see addon.start).
|
||||||
"""
|
"""
|
||||||
|
# Save Add-ons sequential avoid issue on slow IO
|
||||||
async def _addon_save(addon: Addon) -> asyncio.Task | None:
|
|
||||||
"""Task to store an add-on into backup."""
|
|
||||||
tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}"
|
|
||||||
addon_file = SecureTarFile(
|
|
||||||
Path(self._tmp.name, tar_name),
|
|
||||||
"w",
|
|
||||||
key=self._key,
|
|
||||||
gzip=self.compressed,
|
|
||||||
bufsize=BUF_SIZE,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Take backup
|
|
||||||
try:
|
|
||||||
start_task = await addon.backup(addon_file)
|
|
||||||
except AddonsError:
|
|
||||||
_LOGGER.error("Can't create backup for %s", addon.slug)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Store to config
|
|
||||||
self._data[ATTR_ADDONS].append(
|
|
||||||
{
|
|
||||||
ATTR_SLUG: addon.slug,
|
|
||||||
ATTR_NAME: addon.name,
|
|
||||||
ATTR_VERSION: addon.version,
|
|
||||||
ATTR_SIZE: addon_file.size,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return start_task
|
|
||||||
|
|
||||||
# Save Add-ons sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
start_tasks: list[asyncio.Task] = []
|
start_tasks: list[asyncio.Task] = []
|
||||||
for addon in addon_list:
|
for addon in addon_list:
|
||||||
try:
|
try:
|
||||||
if start_task := await _addon_save(addon):
|
if start_task := await self._addon_save(addon):
|
||||||
start_tasks.append(start_task)
|
start_tasks.append(start_task)
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err)
|
_LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err)
|
||||||
|
|
||||||
return start_tasks
|
return start_tasks
|
||||||
|
|
||||||
|
@Job(name="backup_addon_restore", cleanup=False)
|
||||||
|
async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None:
|
||||||
|
"""Restore an add-on from backup."""
|
||||||
|
self.sys_jobs.current.reference = addon_slug
|
||||||
|
|
||||||
|
tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}"
|
||||||
|
addon_file = SecureTarFile(
|
||||||
|
Path(self._tmp.name, tar_name),
|
||||||
|
"r",
|
||||||
|
key=self._key,
|
||||||
|
gzip=self.compressed,
|
||||||
|
bufsize=BUF_SIZE,
|
||||||
|
)
|
||||||
|
|
||||||
|
# If exists inside backup
|
||||||
|
if not addon_file.path.exists():
|
||||||
|
raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error)
|
||||||
|
|
||||||
|
# Perform a restore
|
||||||
|
try:
|
||||||
|
return await self.sys_addons.restore(addon_slug, addon_file)
|
||||||
|
except AddonsError as err:
|
||||||
|
raise BackupError(
|
||||||
|
f"Can't restore backup {addon_slug}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
@Job(name="backup_restore_addons", cleanup=False)
|
||||||
async def restore_addons(
|
async def restore_addons(
|
||||||
self, addon_list: list[str]
|
self, addon_list: list[str]
|
||||||
) -> tuple[bool, list[asyncio.Task]]:
|
) -> tuple[bool, list[asyncio.Task]]:
|
||||||
"""Restore a list add-on from backup."""
|
"""Restore a list add-on from backup."""
|
||||||
|
# Save Add-ons sequential avoid issue on slow IO
|
||||||
async def _addon_restore(addon_slug: str) -> tuple[bool, asyncio.Task | None]:
|
|
||||||
"""Task to restore an add-on into backup."""
|
|
||||||
tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}"
|
|
||||||
addon_file = SecureTarFile(
|
|
||||||
Path(self._tmp.name, tar_name),
|
|
||||||
"r",
|
|
||||||
key=self._key,
|
|
||||||
gzip=self.compressed,
|
|
||||||
bufsize=BUF_SIZE,
|
|
||||||
)
|
|
||||||
|
|
||||||
# If exists inside backup
|
|
||||||
if not addon_file.path.exists():
|
|
||||||
_LOGGER.error("Can't find backup %s", addon_slug)
|
|
||||||
return (False, None)
|
|
||||||
|
|
||||||
# Perform a restore
|
|
||||||
try:
|
|
||||||
return (True, await self.sys_addons.restore(addon_slug, addon_file))
|
|
||||||
except AddonsError:
|
|
||||||
_LOGGER.error("Can't restore backup %s", addon_slug)
|
|
||||||
return (False, None)
|
|
||||||
|
|
||||||
# Save Add-ons sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
start_tasks: list[asyncio.Task] = []
|
start_tasks: list[asyncio.Task] = []
|
||||||
success = True
|
success = True
|
||||||
for slug in addon_list:
|
for slug in addon_list:
|
||||||
try:
|
try:
|
||||||
addon_success, start_task = await _addon_restore(slug)
|
start_task = await self._addon_restore(slug)
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.warning("Can't restore Add-on %s: %s", slug, err)
|
_LOGGER.warning("Can't restore Add-on %s: %s", slug, err)
|
||||||
success = False
|
success = False
|
||||||
else:
|
else:
|
||||||
success = success and addon_success
|
|
||||||
if start_task:
|
if start_task:
|
||||||
start_tasks.append(start_task)
|
start_tasks.append(start_task)
|
||||||
|
|
||||||
return (success, start_tasks)
|
return (success, start_tasks)
|
||||||
|
|
||||||
|
@Job(name="backup_remove_delta_addons", cleanup=False)
|
||||||
|
async def remove_delta_addons(self) -> bool:
|
||||||
|
"""Remove addons which are not in this backup."""
|
||||||
|
success = True
|
||||||
|
for addon in self.sys_addons.installed:
|
||||||
|
if addon.slug in self.addon_list:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Remove Add-on because it's not a part of the new env
|
||||||
|
# Do it sequential avoid issue on slow IO
|
||||||
|
try:
|
||||||
|
await self.sys_addons.uninstall(addon.slug)
|
||||||
|
except AddonsError as err:
|
||||||
|
self.sys_jobs.current.capture_error(err)
|
||||||
|
_LOGGER.warning("Can't uninstall Add-on %s: %s", addon.slug, err)
|
||||||
|
success = False
|
||||||
|
|
||||||
|
return success
|
||||||
|
|
||||||
|
@Job(name="backup_folder_save", cleanup=False)
|
||||||
|
async def _folder_save(self, name: str):
|
||||||
|
"""Take backup of a folder."""
|
||||||
|
self.sys_jobs.current.reference = name
|
||||||
|
|
||||||
|
slug_name = name.replace("/", "_")
|
||||||
|
tar_name = f"{slug_name}.tar{'.gz' if self.compressed else ''}"
|
||||||
|
origin_dir = Path(self.sys_config.path_supervisor, name)
|
||||||
|
|
||||||
|
# Check if exists
|
||||||
|
if not origin_dir.is_dir():
|
||||||
|
_LOGGER.warning("Can't find backup folder %s", name)
|
||||||
|
return
|
||||||
|
|
||||||
|
def _save() -> None:
|
||||||
|
# Take backup
|
||||||
|
_LOGGER.info("Backing up folder %s", name)
|
||||||
|
|
||||||
|
with self._outer_secure_tarfile.create_inner_tar(
|
||||||
|
f"./{tar_name}",
|
||||||
|
gzip=self.compressed,
|
||||||
|
key=self._key,
|
||||||
|
) as tar_file:
|
||||||
|
atomic_contents_add(
|
||||||
|
tar_file,
|
||||||
|
origin_dir,
|
||||||
|
excludes=[
|
||||||
|
bound.bind_mount.local_where.as_posix()
|
||||||
|
for bound in self.sys_mounts.bound_mounts
|
||||||
|
if bound.bind_mount.local_where
|
||||||
|
],
|
||||||
|
arcname=".",
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.info("Backup folder %s done", name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.sys_run_in_executor(_save)
|
||||||
|
except (tarfile.TarError, OSError) as err:
|
||||||
|
raise BackupError(
|
||||||
|
f"Can't backup folder {name}: {str(err)}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
self._data[ATTR_FOLDERS].append(name)
|
||||||
|
|
||||||
|
@Job(name="backup_store_folders", cleanup=False)
|
||||||
async def store_folders(self, folder_list: list[str]):
|
async def store_folders(self, folder_list: list[str]):
|
||||||
"""Backup Supervisor data into backup."""
|
"""Backup Supervisor data into backup."""
|
||||||
|
# Save folder sequential avoid issue on slow IO
|
||||||
async def _folder_save(name: str):
|
|
||||||
"""Take backup of a folder."""
|
|
||||||
slug_name = name.replace("/", "_")
|
|
||||||
tar_name = Path(
|
|
||||||
self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}"
|
|
||||||
)
|
|
||||||
origin_dir = Path(self.sys_config.path_supervisor, name)
|
|
||||||
|
|
||||||
# Check if exists
|
|
||||||
if not origin_dir.is_dir():
|
|
||||||
_LOGGER.warning("Can't find backup folder %s", name)
|
|
||||||
return
|
|
||||||
|
|
||||||
def _save() -> None:
|
|
||||||
# Take backup
|
|
||||||
_LOGGER.info("Backing up folder %s", name)
|
|
||||||
with SecureTarFile(
|
|
||||||
tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE
|
|
||||||
) as tar_file:
|
|
||||||
atomic_contents_add(
|
|
||||||
tar_file,
|
|
||||||
origin_dir,
|
|
||||||
excludes=[
|
|
||||||
bound.bind_mount.local_where.as_posix()
|
|
||||||
for bound in self.sys_mounts.bound_mounts
|
|
||||||
if bound.bind_mount.local_where
|
|
||||||
],
|
|
||||||
arcname=".",
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER.info("Backup folder %s done", name)
|
|
||||||
|
|
||||||
await self.sys_run_in_executor(_save)
|
|
||||||
self._data[ATTR_FOLDERS].append(name)
|
|
||||||
|
|
||||||
# Save folder sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
for folder in folder_list:
|
for folder in folder_list:
|
||||||
|
await self._folder_save(folder)
|
||||||
|
|
||||||
|
@Job(name="backup_folder_restore", cleanup=False)
|
||||||
|
async def _folder_restore(self, name: str) -> None:
|
||||||
|
"""Restore a folder."""
|
||||||
|
self.sys_jobs.current.reference = name
|
||||||
|
|
||||||
|
slug_name = name.replace("/", "_")
|
||||||
|
tar_name = Path(
|
||||||
|
self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}"
|
||||||
|
)
|
||||||
|
origin_dir = Path(self.sys_config.path_supervisor, name)
|
||||||
|
|
||||||
|
# Check if exists inside backup
|
||||||
|
if not tar_name.exists():
|
||||||
|
raise BackupInvalidError(
|
||||||
|
f"Can't find restore folder {name}", _LOGGER.warning
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unmount any mounts within folder
|
||||||
|
bind_mounts = [
|
||||||
|
bound.bind_mount
|
||||||
|
for bound in self.sys_mounts.bound_mounts
|
||||||
|
if bound.bind_mount.local_where
|
||||||
|
and bound.bind_mount.local_where.is_relative_to(origin_dir)
|
||||||
|
]
|
||||||
|
if bind_mounts:
|
||||||
|
await asyncio.gather(*[bind_mount.unmount() for bind_mount in bind_mounts])
|
||||||
|
|
||||||
|
# Clean old stuff
|
||||||
|
if origin_dir.is_dir():
|
||||||
|
await remove_folder(origin_dir, content_only=True)
|
||||||
|
|
||||||
|
# Perform a restore
|
||||||
|
def _restore() -> bool:
|
||||||
try:
|
try:
|
||||||
await _folder_save(folder)
|
_LOGGER.info("Restore folder %s", name)
|
||||||
|
with SecureTarFile(
|
||||||
|
tar_name,
|
||||||
|
"r",
|
||||||
|
key=self._key,
|
||||||
|
gzip=self.compressed,
|
||||||
|
bufsize=BUF_SIZE,
|
||||||
|
) as tar_file:
|
||||||
|
tar_file.extractall(
|
||||||
|
path=origin_dir, members=tar_file, filter="fully_trusted"
|
||||||
|
)
|
||||||
|
_LOGGER.info("Restore folder %s done", name)
|
||||||
except (tarfile.TarError, OSError) as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
raise BackupError(
|
raise BackupError(
|
||||||
f"Can't backup folder {folder}: {str(err)}", _LOGGER.error
|
f"Can't restore folder {name}: {err}", _LOGGER.warning
|
||||||
) from err
|
) from err
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self.sys_run_in_executor(_restore)
|
||||||
|
finally:
|
||||||
|
if bind_mounts:
|
||||||
|
await asyncio.gather(
|
||||||
|
*[bind_mount.mount() for bind_mount in bind_mounts]
|
||||||
|
)
|
||||||
|
|
||||||
|
@Job(name="backup_restore_folders", cleanup=False)
|
||||||
async def restore_folders(self, folder_list: list[str]) -> bool:
|
async def restore_folders(self, folder_list: list[str]) -> bool:
|
||||||
"""Backup Supervisor data into backup."""
|
"""Backup Supervisor data into backup."""
|
||||||
success = True
|
success = True
|
||||||
@@ -556,16 +690,16 @@ class Backup(CoreSysAttributes):
|
|||||||
*[bind_mount.mount() for bind_mount in bind_mounts]
|
*[bind_mount.mount() for bind_mount in bind_mounts]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Restore folder sequential
|
# Restore folder sequential avoid issue on slow IO
|
||||||
# avoid issue on slow IO
|
|
||||||
for folder in folder_list:
|
for folder in folder_list:
|
||||||
try:
|
try:
|
||||||
success = success and await _folder_restore(folder)
|
await self._folder_restore(folder)
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.warning("Can't restore folder %s: %s", folder, err)
|
_LOGGER.warning("Can't restore folder %s: %s", folder, err)
|
||||||
success = False
|
success = False
|
||||||
return success
|
return success
|
||||||
|
|
||||||
|
@Job(name="backup_store_homeassistant", cleanup=False)
|
||||||
async def store_homeassistant(self, exclude_database: bool = False):
|
async def store_homeassistant(self, exclude_database: bool = False):
|
||||||
"""Backup Home Assistant Core configuration folder."""
|
"""Backup Home Assistant Core configuration folder."""
|
||||||
self._data[ATTR_HOMEASSISTANT] = {
|
self._data[ATTR_HOMEASSISTANT] = {
|
||||||
@@ -573,12 +707,12 @@ class Backup(CoreSysAttributes):
|
|||||||
ATTR_EXCLUDE_DATABASE: exclude_database,
|
ATTR_EXCLUDE_DATABASE: exclude_database,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tar_name = f"homeassistant.tar{'.gz' if self.compressed else ''}"
|
||||||
# Backup Home Assistant Core config directory
|
# Backup Home Assistant Core config directory
|
||||||
tar_name = Path(
|
homeassistant_file = self._outer_secure_tarfile.create_inner_tar(
|
||||||
self._tmp.name, f"homeassistant.tar{'.gz' if self.compressed else ''}"
|
f"./{tar_name}",
|
||||||
)
|
gzip=self.compressed,
|
||||||
homeassistant_file = SecureTarFile(
|
key=self._key,
|
||||||
tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.sys_homeassistant.backup(homeassistant_file, exclude_database)
|
await self.sys_homeassistant.backup(homeassistant_file, exclude_database)
|
||||||
@@ -586,6 +720,7 @@ class Backup(CoreSysAttributes):
|
|||||||
# Store size
|
# Store size
|
||||||
self.homeassistant[ATTR_SIZE] = homeassistant_file.size
|
self.homeassistant[ATTR_SIZE] = homeassistant_file.size
|
||||||
|
|
||||||
|
@Job(name="backup_restore_homeassistant", cleanup=False)
|
||||||
async def restore_homeassistant(self) -> Awaitable[None]:
|
async def restore_homeassistant(self) -> Awaitable[None]:
|
||||||
"""Restore Home Assistant Core configuration folder."""
|
"""Restore Home Assistant Core configuration folder."""
|
||||||
await self.sys_homeassistant.core.stop()
|
await self.sys_homeassistant.core.stop()
|
||||||
@@ -619,7 +754,7 @@ class Backup(CoreSysAttributes):
|
|||||||
|
|
||||||
return self.sys_create_task(_core_update())
|
return self.sys_create_task(_core_update())
|
||||||
|
|
||||||
def store_repositories(self):
|
def store_repositories(self) -> None:
|
||||||
"""Store repository list into backup."""
|
"""Store repository list into backup."""
|
||||||
self.repositories = self.sys_store.repository_urls
|
self.repositories = self.sys_store.repository_urls
|
||||||
|
|
||||||
|
@@ -15,7 +15,12 @@ from ..const import (
|
|||||||
CoreState,
|
CoreState,
|
||||||
)
|
)
|
||||||
from ..dbus.const import UnitActiveState
|
from ..dbus.const import UnitActiveState
|
||||||
from ..exceptions import AddonsError, BackupError, BackupInvalidError, BackupJobError
|
from ..exceptions import (
|
||||||
|
BackupError,
|
||||||
|
BackupInvalidError,
|
||||||
|
BackupJobError,
|
||||||
|
BackupMountDownError,
|
||||||
|
)
|
||||||
from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit
|
from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit
|
||||||
from ..jobs.decorator import Job
|
from ..jobs.decorator import Job
|
||||||
from ..jobs.job_group import JobGroup
|
from ..jobs.job_group import JobGroup
|
||||||
@@ -74,11 +79,15 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
|
|
||||||
def _get_base_path(self, location: Mount | type[DEFAULT] | None = DEFAULT) -> Path:
|
def _get_base_path(self, location: Mount | type[DEFAULT] | None = DEFAULT) -> Path:
|
||||||
"""Get base path for backup using location or default location."""
|
"""Get base path for backup using location or default location."""
|
||||||
if location:
|
|
||||||
return location.local_where
|
|
||||||
|
|
||||||
if location == DEFAULT and self.sys_mounts.default_backup_mount:
|
if location == DEFAULT and self.sys_mounts.default_backup_mount:
|
||||||
return self.sys_mounts.default_backup_mount.local_where
|
location = self.sys_mounts.default_backup_mount
|
||||||
|
|
||||||
|
if location:
|
||||||
|
if not location.local_where.is_mount():
|
||||||
|
raise BackupMountDownError(
|
||||||
|
f"{location.name} is down, cannot back-up to it", _LOGGER.error
|
||||||
|
)
|
||||||
|
return location.local_where
|
||||||
|
|
||||||
return self.sys_config.path_backup
|
return self.sys_config.path_backup
|
||||||
|
|
||||||
@@ -139,8 +148,8 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
tar_file = Path(self._get_base_path(location), f"{slug}.tar")
|
tar_file = Path(self._get_base_path(location), f"{slug}.tar")
|
||||||
|
|
||||||
# init object
|
# init object
|
||||||
backup = Backup(self.coresys, tar_file)
|
backup = Backup(self.coresys, tar_file, slug)
|
||||||
backup.new(slug, name, date_str, sys_type, password, compressed)
|
backup.new(name, date_str, sys_type, password, compressed)
|
||||||
|
|
||||||
# Add backup ID to job
|
# Add backup ID to job
|
||||||
self.sys_jobs.current.reference = backup.slug
|
self.sys_jobs.current.reference = backup.slug
|
||||||
@@ -165,9 +174,11 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
|
|
||||||
async def _load_backup(tar_file):
|
async def _load_backup(tar_file):
|
||||||
"""Load the backup."""
|
"""Load the backup."""
|
||||||
backup = Backup(self.coresys, tar_file)
|
backup = Backup(self.coresys, tar_file, "temp")
|
||||||
if await backup.load():
|
if await backup.load():
|
||||||
self._backups[backup.slug] = backup
|
self._backups[backup.slug] = Backup(
|
||||||
|
self.coresys, tar_file, backup.slug, backup.data
|
||||||
|
)
|
||||||
|
|
||||||
tasks = [
|
tasks = [
|
||||||
self.sys_create_task(_load_backup(tar_file))
|
self.sys_create_task(_load_backup(tar_file))
|
||||||
@@ -199,7 +210,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
|
|
||||||
async def import_backup(self, tar_file: Path) -> Backup | None:
|
async def import_backup(self, tar_file: Path) -> Backup | None:
|
||||||
"""Check backup tarfile and import it."""
|
"""Check backup tarfile and import it."""
|
||||||
backup = Backup(self.coresys, tar_file)
|
backup = Backup(self.coresys, tar_file, "temp")
|
||||||
|
|
||||||
# Read meta data
|
# Read meta data
|
||||||
if not await backup.load():
|
if not await backup.load():
|
||||||
@@ -222,7 +233,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Load new backup
|
# Load new backup
|
||||||
backup = Backup(self.coresys, tar_origin)
|
backup = Backup(self.coresys, tar_origin, backup.slug, backup.data)
|
||||||
if not await backup.load():
|
if not await backup.load():
|
||||||
return None
|
return None
|
||||||
_LOGGER.info("Successfully imported %s", backup.slug)
|
_LOGGER.info("Successfully imported %s", backup.slug)
|
||||||
@@ -269,9 +280,15 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
|
|
||||||
self._change_stage(BackupJobStage.FINISHING_FILE, backup)
|
self._change_stage(BackupJobStage.FINISHING_FILE, backup)
|
||||||
|
|
||||||
|
except BackupError as err:
|
||||||
|
self.sys_jobs.current.capture_error(err)
|
||||||
|
return None
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Backup %s error", backup.slug)
|
_LOGGER.exception("Backup %s error", backup.slug)
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
|
self.sys_jobs.current.capture_error(
|
||||||
|
BackupError(f"Backup {backup.slug} error, see supervisor logs")
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
self._backups[backup.slug] = backup
|
self._backups[backup.slug] = backup
|
||||||
@@ -290,6 +307,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
conditions=[JobCondition.RUNNING],
|
conditions=[JobCondition.RUNNING],
|
||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
on_condition=BackupJobError,
|
on_condition=BackupJobError,
|
||||||
|
cleanup=False,
|
||||||
)
|
)
|
||||||
async def do_backup_full(
|
async def do_backup_full(
|
||||||
self,
|
self,
|
||||||
@@ -326,6 +344,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
conditions=[JobCondition.RUNNING],
|
conditions=[JobCondition.RUNNING],
|
||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
on_condition=BackupJobError,
|
on_condition=BackupJobError,
|
||||||
|
cleanup=False,
|
||||||
)
|
)
|
||||||
async def do_backup_partial(
|
async def do_backup_partial(
|
||||||
self,
|
self,
|
||||||
@@ -410,17 +429,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
# Delete delta add-ons
|
# Delete delta add-ons
|
||||||
if replace:
|
if replace:
|
||||||
self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup)
|
self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup)
|
||||||
for addon in self.sys_addons.installed:
|
success = success and await backup.remove_delta_addons()
|
||||||
if addon.slug in backup.addon_list:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Remove Add-on because it's not a part of the new env
|
|
||||||
# Do it sequential avoid issue on slow IO
|
|
||||||
try:
|
|
||||||
await self.sys_addons.uninstall(addon.slug)
|
|
||||||
except AddonsError:
|
|
||||||
_LOGGER.warning("Can't uninstall Add-on %s", addon.slug)
|
|
||||||
success = False
|
|
||||||
|
|
||||||
if addon_list:
|
if addon_list:
|
||||||
self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup)
|
self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup)
|
||||||
@@ -444,7 +453,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
_LOGGER.exception("Restore %s error", backup.slug)
|
_LOGGER.exception("Restore %s error", backup.slug)
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
raise BackupError(
|
raise BackupError(
|
||||||
f"Restore {backup.slug} error, check logs for details"
|
f"Restore {backup.slug} error, see supervisor logs"
|
||||||
) from err
|
) from err
|
||||||
else:
|
else:
|
||||||
if addon_start_tasks:
|
if addon_start_tasks:
|
||||||
@@ -463,12 +472,16 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
|
|
||||||
# Do we need start Home Assistant Core?
|
# Do we need start Home Assistant Core?
|
||||||
if not await self.sys_homeassistant.core.is_running():
|
if not await self.sys_homeassistant.core.is_running():
|
||||||
await self.sys_homeassistant.core.start()
|
await self.sys_homeassistant.core.start(
|
||||||
|
_job_override__cleanup=False
|
||||||
|
)
|
||||||
|
|
||||||
# Check If we can access to API / otherwise restart
|
# Check If we can access to API / otherwise restart
|
||||||
if not await self.sys_homeassistant.api.check_api_state():
|
if not await self.sys_homeassistant.api.check_api_state():
|
||||||
_LOGGER.warning("Need restart HomeAssistant for API")
|
_LOGGER.warning("Need restart HomeAssistant for API")
|
||||||
await self.sys_homeassistant.core.restart()
|
await self.sys_homeassistant.core.restart(
|
||||||
|
_job_override__cleanup=False
|
||||||
|
)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="backup_manager_full_restore",
|
name="backup_manager_full_restore",
|
||||||
@@ -481,6 +494,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
],
|
],
|
||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
on_condition=BackupJobError,
|
on_condition=BackupJobError,
|
||||||
|
cleanup=False,
|
||||||
)
|
)
|
||||||
async def do_restore_full(
|
async def do_restore_full(
|
||||||
self, backup: Backup, password: str | None = None
|
self, backup: Backup, password: str | None = None
|
||||||
@@ -534,6 +548,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
],
|
],
|
||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
on_condition=BackupJobError,
|
on_condition=BackupJobError,
|
||||||
|
cleanup=False,
|
||||||
)
|
)
|
||||||
async def do_restore_partial(
|
async def do_restore_partial(
|
||||||
self,
|
self,
|
||||||
|
@@ -53,7 +53,7 @@ def unique_addons(addons_list):
|
|||||||
|
|
||||||
|
|
||||||
def v1_homeassistant(
|
def v1_homeassistant(
|
||||||
homeassistant_data: dict[str, Any] | None
|
homeassistant_data: dict[str, Any] | None,
|
||||||
) -> dict[str, Any] | None:
|
) -> dict[str, Any] | None:
|
||||||
"""Cleanup homeassistant artefacts from v1."""
|
"""Cleanup homeassistant artefacts from v1."""
|
||||||
if not homeassistant_data:
|
if not homeassistant_data:
|
||||||
|
@@ -115,7 +115,7 @@ async def initialize_coresys() -> CoreSys:
|
|||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Missing SUPERVISOR_MACHINE environment variable. Fallback to deprecated extraction!"
|
"Missing SUPERVISOR_MACHINE environment variable. Fallback to deprecated extraction!"
|
||||||
)
|
)
|
||||||
_LOGGER.info("Seting up coresys for machine: %s", coresys.machine)
|
_LOGGER.info("Setting up coresys for machine: %s", coresys.machine)
|
||||||
|
|
||||||
return coresys
|
return coresys
|
||||||
|
|
||||||
@@ -256,9 +256,11 @@ def migrate_system_env(coresys: CoreSys) -> None:
|
|||||||
def initialize_logging() -> None:
|
def initialize_logging() -> None:
|
||||||
"""Initialize the logging."""
|
"""Initialize the logging."""
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
fmt = (
|
||||||
|
"%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||||
|
)
|
||||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||||
datefmt = "%y-%m-%d %H:%M:%S"
|
datefmt = "%Y-%m-%d %H:%M:%S"
|
||||||
|
|
||||||
# suppress overly verbose logs from libraries that aren't helpful
|
# suppress overly verbose logs from libraries that aren't helpful
|
||||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||||
|
@@ -68,6 +68,7 @@ META_SUPERVISOR = "supervisor"
|
|||||||
JSON_DATA = "data"
|
JSON_DATA = "data"
|
||||||
JSON_MESSAGE = "message"
|
JSON_MESSAGE = "message"
|
||||||
JSON_RESULT = "result"
|
JSON_RESULT = "result"
|
||||||
|
JSON_JOB_ID = "job_id"
|
||||||
|
|
||||||
RESULT_ERROR = "error"
|
RESULT_ERROR = "error"
|
||||||
RESULT_OK = "ok"
|
RESULT_OK = "ok"
|
||||||
@@ -331,6 +332,7 @@ ATTR_UUID = "uuid"
|
|||||||
ATTR_VALID = "valid"
|
ATTR_VALID = "valid"
|
||||||
ATTR_VALUE = "value"
|
ATTR_VALUE = "value"
|
||||||
ATTR_VERSION = "version"
|
ATTR_VERSION = "version"
|
||||||
|
ATTR_VERSION_TIMESTAMP = "version_timestamp"
|
||||||
ATTR_VERSION_LATEST = "version_latest"
|
ATTR_VERSION_LATEST = "version_latest"
|
||||||
ATTR_VIDEO = "video"
|
ATTR_VIDEO = "video"
|
||||||
ATTR_VLAN = "vlan"
|
ATTR_VLAN = "vlan"
|
||||||
@@ -458,9 +460,11 @@ class HostFeature(StrEnum):
|
|||||||
class BusEvent(StrEnum):
|
class BusEvent(StrEnum):
|
||||||
"""Bus event type."""
|
"""Bus event type."""
|
||||||
|
|
||||||
|
DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change"
|
||||||
HARDWARE_NEW_DEVICE = "hardware_new_device"
|
HARDWARE_NEW_DEVICE = "hardware_new_device"
|
||||||
HARDWARE_REMOVE_DEVICE = "hardware_remove_device"
|
HARDWARE_REMOVE_DEVICE = "hardware_remove_device"
|
||||||
DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change"
|
SUPERVISOR_JOB_END = "supervisor_job_end"
|
||||||
|
SUPERVISOR_JOB_START = "supervisor_job_start"
|
||||||
SUPERVISOR_STATE_CHANGE = "supervisor_state_change"
|
SUPERVISOR_STATE_CHANGE = "supervisor_state_change"
|
||||||
|
|
||||||
|
|
||||||
|
@@ -5,8 +5,6 @@ from contextlib import suppress
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_STARTUP,
|
ATTR_STARTUP,
|
||||||
RUN_SUPERVISOR_STATE,
|
RUN_SUPERVISOR_STATE,
|
||||||
@@ -179,7 +177,15 @@ class Core(CoreSysAttributes):
|
|||||||
and not self.sys_dev
|
and not self.sys_dev
|
||||||
and self.supported
|
and self.supported
|
||||||
):
|
):
|
||||||
self.sys_dbus.agent.diagnostics = self.sys_config.diagnostics
|
try:
|
||||||
|
await self.sys_dbus.agent.set_diagnostics(self.sys_config.diagnostics)
|
||||||
|
except Exception as err: # pylint: disable=broad-except
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Could not set diagnostics to %s due to %s",
|
||||||
|
self.sys_config.diagnostics,
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
capture_exception(err)
|
||||||
|
|
||||||
# Evaluate the system
|
# Evaluate the system
|
||||||
await self.sys_resolution.evaluate.evaluate_system()
|
await self.sys_resolution.evaluate.evaluate_system()
|
||||||
@@ -298,7 +304,7 @@ class Core(CoreSysAttributes):
|
|||||||
|
|
||||||
# Stage 1
|
# Stage 1
|
||||||
try:
|
try:
|
||||||
async with async_timeout.timeout(10):
|
async with asyncio.timeout(10):
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[
|
[
|
||||||
self.sys_create_task(coro)
|
self.sys_create_task(coro)
|
||||||
@@ -314,7 +320,7 @@ class Core(CoreSysAttributes):
|
|||||||
|
|
||||||
# Stage 2
|
# Stage 2
|
||||||
try:
|
try:
|
||||||
async with async_timeout.timeout(10):
|
async with asyncio.timeout(10):
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[
|
[
|
||||||
self.sys_create_task(coro)
|
self.sys_create_task(coro)
|
||||||
|
@@ -544,13 +544,44 @@ class CoreSys:
|
|||||||
|
|
||||||
return self.loop.run_in_executor(None, funct, *args)
|
return self.loop.run_in_executor(None, funct, *args)
|
||||||
|
|
||||||
def create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
def _create_context(self) -> Context:
|
||||||
"""Create an async task."""
|
"""Create a new context for a task."""
|
||||||
context = copy_context()
|
context = copy_context()
|
||||||
for callback in self._set_task_context:
|
for callback in self._set_task_context:
|
||||||
context = callback(context)
|
context = callback(context)
|
||||||
|
return context
|
||||||
|
|
||||||
return self.loop.create_task(coroutine, context=context)
|
def create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
||||||
|
"""Create an async task."""
|
||||||
|
return self.loop.create_task(coroutine, context=self._create_context())
|
||||||
|
|
||||||
|
def call_later(
|
||||||
|
self,
|
||||||
|
delay: float,
|
||||||
|
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
*args: tuple[Any],
|
||||||
|
**kwargs: dict[str, Any],
|
||||||
|
) -> asyncio.TimerHandle:
|
||||||
|
"""Start a task after a delay."""
|
||||||
|
if kwargs:
|
||||||
|
funct = partial(funct, **kwargs)
|
||||||
|
|
||||||
|
return self.loop.call_later(delay, funct, *args, context=self._create_context())
|
||||||
|
|
||||||
|
def call_at(
|
||||||
|
self,
|
||||||
|
when: datetime,
|
||||||
|
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
*args: tuple[Any],
|
||||||
|
**kwargs: dict[str, Any],
|
||||||
|
) -> asyncio.TimerHandle:
|
||||||
|
"""Start a task at the specified datetime."""
|
||||||
|
if kwargs:
|
||||||
|
funct = partial(funct, **kwargs)
|
||||||
|
|
||||||
|
return self.loop.call_at(
|
||||||
|
when.timestamp(), funct, *args, context=self._create_context()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CoreSysAttributes:
|
class CoreSysAttributes:
|
||||||
@@ -731,3 +762,23 @@ class CoreSysAttributes:
|
|||||||
def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
||||||
"""Create an async task."""
|
"""Create an async task."""
|
||||||
return self.coresys.create_task(coroutine)
|
return self.coresys.create_task(coroutine)
|
||||||
|
|
||||||
|
def sys_call_later(
|
||||||
|
self,
|
||||||
|
delay: float,
|
||||||
|
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
*args: tuple[Any],
|
||||||
|
**kwargs: dict[str, Any],
|
||||||
|
) -> asyncio.TimerHandle:
|
||||||
|
"""Start a task after a delay."""
|
||||||
|
return self.coresys.call_later(delay, funct, *args, **kwargs)
|
||||||
|
|
||||||
|
def sys_call_at(
|
||||||
|
self,
|
||||||
|
when: datetime,
|
||||||
|
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
*args: tuple[Any],
|
||||||
|
**kwargs: dict[str, Any],
|
||||||
|
) -> asyncio.TimerHandle:
|
||||||
|
"""Start a task at the specified datetime."""
|
||||||
|
return self.coresys.call_at(when, funct, *args, **kwargs)
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
"""OS-Agent implementation for DBUS."""
|
"""OS-Agent implementation for DBUS."""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -80,11 +81,9 @@ class OSAgent(DBusInterfaceProxy):
|
|||||||
"""Return if diagnostics is enabled on OS-Agent."""
|
"""Return if diagnostics is enabled on OS-Agent."""
|
||||||
return self.properties[DBUS_ATTR_DIAGNOSTICS]
|
return self.properties[DBUS_ATTR_DIAGNOSTICS]
|
||||||
|
|
||||||
@diagnostics.setter
|
def set_diagnostics(self, value: bool) -> Awaitable[None]:
|
||||||
@dbus_property
|
|
||||||
def diagnostics(self, value: bool) -> None:
|
|
||||||
"""Enable or disable OS-Agent diagnostics."""
|
"""Enable or disable OS-Agent diagnostics."""
|
||||||
asyncio.create_task(self.dbus.set_diagnostics(value))
|
return self.dbus.set_diagnostics(value)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def all(self) -> list[DBusInterface]:
|
def all(self) -> list[DBusInterface]:
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
"""Green board management."""
|
"""Green board management."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Awaitable
|
||||||
|
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
@@ -25,11 +26,10 @@ class Green(BoardProxy):
|
|||||||
"""Get activity LED enabled."""
|
"""Get activity LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_ACTIVITY_LED]
|
return self.properties[DBUS_ATTR_ACTIVITY_LED]
|
||||||
|
|
||||||
@activity_led.setter
|
def set_activity_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def activity_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable activity LED."""
|
"""Enable/disable activity LED."""
|
||||||
self._data[ATTR_ACTIVITY_LED] = enabled
|
self._data[ATTR_ACTIVITY_LED] = enabled
|
||||||
asyncio.create_task(self.dbus.Boards.Green.set_activity_led(enabled))
|
return self.dbus.Boards.Green.set_activity_led(enabled)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
@@ -37,11 +37,10 @@ class Green(BoardProxy):
|
|||||||
"""Get power LED enabled."""
|
"""Get power LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_POWER_LED]
|
return self.properties[DBUS_ATTR_POWER_LED]
|
||||||
|
|
||||||
@power_led.setter
|
def set_power_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def power_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable power LED."""
|
"""Enable/disable power LED."""
|
||||||
self._data[ATTR_POWER_LED] = enabled
|
self._data[ATTR_POWER_LED] = enabled
|
||||||
asyncio.create_task(self.dbus.Boards.Green.set_power_led(enabled))
|
return self.dbus.Boards.Green.set_power_led(enabled)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
@@ -49,17 +48,18 @@ class Green(BoardProxy):
|
|||||||
"""Get user LED enabled."""
|
"""Get user LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_USER_LED]
|
return self.properties[DBUS_ATTR_USER_LED]
|
||||||
|
|
||||||
@user_led.setter
|
def set_user_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def user_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable disk LED."""
|
"""Enable/disable disk LED."""
|
||||||
self._data[ATTR_USER_LED] = enabled
|
self._data[ATTR_USER_LED] = enabled
|
||||||
asyncio.create_task(self.dbus.Boards.Green.set_user_led(enabled))
|
return self.dbus.Boards.Green.set_user_led(enabled)
|
||||||
|
|
||||||
async def connect(self, bus: MessageBus) -> None:
|
async def connect(self, bus: MessageBus) -> None:
|
||||||
"""Connect to D-Bus."""
|
"""Connect to D-Bus."""
|
||||||
await super().connect(bus)
|
await super().connect(bus)
|
||||||
|
|
||||||
# Set LEDs based on settings on connect
|
# Set LEDs based on settings on connect
|
||||||
self.activity_led = self._data[ATTR_ACTIVITY_LED]
|
await asyncio.gather(
|
||||||
self.power_led = self._data[ATTR_POWER_LED]
|
self.set_activity_led(self._data[ATTR_ACTIVITY_LED]),
|
||||||
self.user_led = self._data[ATTR_USER_LED]
|
self.set_power_led(self._data[ATTR_POWER_LED]),
|
||||||
|
self.set_user_led(self._data[ATTR_USER_LED]),
|
||||||
|
)
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
"""Yellow board management."""
|
"""Yellow board management."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Awaitable
|
||||||
|
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
@@ -25,11 +26,10 @@ class Yellow(BoardProxy):
|
|||||||
"""Get heartbeat LED enabled."""
|
"""Get heartbeat LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_HEARTBEAT_LED]
|
return self.properties[DBUS_ATTR_HEARTBEAT_LED]
|
||||||
|
|
||||||
@heartbeat_led.setter
|
def set_heartbeat_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def heartbeat_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable heartbeat LED."""
|
"""Enable/disable heartbeat LED."""
|
||||||
self._data[ATTR_HEARTBEAT_LED] = enabled
|
self._data[ATTR_HEARTBEAT_LED] = enabled
|
||||||
asyncio.create_task(self.dbus.Boards.Yellow.set_heartbeat_led(enabled))
|
return self.dbus.Boards.Yellow.set_heartbeat_led(enabled)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
@@ -37,11 +37,10 @@ class Yellow(BoardProxy):
|
|||||||
"""Get power LED enabled."""
|
"""Get power LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_POWER_LED]
|
return self.properties[DBUS_ATTR_POWER_LED]
|
||||||
|
|
||||||
@power_led.setter
|
def set_power_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def power_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable power LED."""
|
"""Enable/disable power LED."""
|
||||||
self._data[ATTR_POWER_LED] = enabled
|
self._data[ATTR_POWER_LED] = enabled
|
||||||
asyncio.create_task(self.dbus.Boards.Yellow.set_power_led(enabled))
|
return self.dbus.Boards.Yellow.set_power_led(enabled)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
@@ -49,17 +48,18 @@ class Yellow(BoardProxy):
|
|||||||
"""Get disk LED enabled."""
|
"""Get disk LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_DISK_LED]
|
return self.properties[DBUS_ATTR_DISK_LED]
|
||||||
|
|
||||||
@disk_led.setter
|
def set_disk_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def disk_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable disk LED."""
|
"""Enable/disable disk LED."""
|
||||||
self._data[ATTR_DISK_LED] = enabled
|
self._data[ATTR_DISK_LED] = enabled
|
||||||
asyncio.create_task(self.dbus.Boards.Yellow.set_disk_led(enabled))
|
return self.dbus.Boards.Yellow.set_disk_led(enabled)
|
||||||
|
|
||||||
async def connect(self, bus: MessageBus) -> None:
|
async def connect(self, bus: MessageBus) -> None:
|
||||||
"""Connect to D-Bus."""
|
"""Connect to D-Bus."""
|
||||||
await super().connect(bus)
|
await super().connect(bus)
|
||||||
|
|
||||||
# Set LEDs based on settings on connect
|
# Set LEDs based on settings on connect
|
||||||
self.disk_led = self._data[ATTR_DISK_LED]
|
await asyncio.gather(
|
||||||
self.heartbeat_led = self._data[ATTR_HEARTBEAT_LED]
|
self.set_disk_led(self._data[ATTR_DISK_LED]),
|
||||||
self.power_led = self._data[ATTR_POWER_LED]
|
self.set_heartbeat_led(self._data[ATTR_HEARTBEAT_LED]),
|
||||||
|
self.set_power_led(self._data[ATTR_POWER_LED]),
|
||||||
|
)
|
||||||
|
@@ -12,6 +12,6 @@ class System(DBusInterface):
|
|||||||
object_path: str = DBUS_OBJECT_HAOS_SYSTEM
|
object_path: str = DBUS_OBJECT_HAOS_SYSTEM
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
async def schedule_wipe_device(self) -> None:
|
async def schedule_wipe_device(self) -> bool:
|
||||||
"""Schedule a factory reset on next system boot."""
|
"""Schedule a factory reset on next system boot."""
|
||||||
await self.dbus.System.call_schedule_wipe_device()
|
return await self.dbus.System.call_schedule_wipe_device()
|
||||||
|
@@ -36,12 +36,14 @@ DBUS_IFACE_RAUC_INSTALLER = "de.pengutronix.rauc.Installer"
|
|||||||
DBUS_IFACE_RESOLVED_MANAGER = "org.freedesktop.resolve1.Manager"
|
DBUS_IFACE_RESOLVED_MANAGER = "org.freedesktop.resolve1.Manager"
|
||||||
DBUS_IFACE_SETTINGS_CONNECTION = "org.freedesktop.NetworkManager.Settings.Connection"
|
DBUS_IFACE_SETTINGS_CONNECTION = "org.freedesktop.NetworkManager.Settings.Connection"
|
||||||
DBUS_IFACE_SYSTEMD_MANAGER = "org.freedesktop.systemd1.Manager"
|
DBUS_IFACE_SYSTEMD_MANAGER = "org.freedesktop.systemd1.Manager"
|
||||||
|
DBUS_IFACE_SYSTEMD_UNIT = "org.freedesktop.systemd1.Unit"
|
||||||
DBUS_IFACE_TIMEDATE = "org.freedesktop.timedate1"
|
DBUS_IFACE_TIMEDATE = "org.freedesktop.timedate1"
|
||||||
DBUS_IFACE_UDISKS2_MANAGER = "org.freedesktop.UDisks2.Manager"
|
DBUS_IFACE_UDISKS2_MANAGER = "org.freedesktop.UDisks2.Manager"
|
||||||
|
|
||||||
DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED = (
|
DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED = (
|
||||||
"org.freedesktop.NetworkManager.Connection.Active.StateChanged"
|
"org.freedesktop.NetworkManager.Connection.Active.StateChanged"
|
||||||
)
|
)
|
||||||
|
DBUS_SIGNAL_PROPERTIES_CHANGED = "org.freedesktop.DBus.Properties.PropertiesChanged"
|
||||||
DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED = "de.pengutronix.rauc.Installer.Completed"
|
DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED = "de.pengutronix.rauc.Installer.Completed"
|
||||||
|
|
||||||
DBUS_OBJECT_BASE = "/"
|
DBUS_OBJECT_BASE = "/"
|
||||||
@@ -59,11 +61,13 @@ DBUS_OBJECT_RESOLVED = "/org/freedesktop/resolve1"
|
|||||||
DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings"
|
DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings"
|
||||||
DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1"
|
DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1"
|
||||||
DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1"
|
DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1"
|
||||||
DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2/Manager"
|
DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2"
|
||||||
|
DBUS_OBJECT_UDISKS2_MANAGER = "/org/freedesktop/UDisks2/Manager"
|
||||||
|
|
||||||
DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint"
|
DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint"
|
||||||
DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection"
|
DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection"
|
||||||
DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections"
|
DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections"
|
||||||
|
DBUS_ATTR_ACTIVE_STATE = "ActiveState"
|
||||||
DBUS_ATTR_ACTIVITY_LED = "ActivityLED"
|
DBUS_ATTR_ACTIVITY_LED = "ActivityLED"
|
||||||
DBUS_ATTR_ADDRESS_DATA = "AddressData"
|
DBUS_ATTR_ADDRESS_DATA = "AddressData"
|
||||||
DBUS_ATTR_BITRATE = "Bitrate"
|
DBUS_ATTR_BITRATE = "Bitrate"
|
||||||
@@ -177,6 +181,7 @@ DBUS_ATTR_UUID = "Uuid"
|
|||||||
DBUS_ATTR_VARIANT = "Variant"
|
DBUS_ATTR_VARIANT = "Variant"
|
||||||
DBUS_ATTR_VENDOR = "Vendor"
|
DBUS_ATTR_VENDOR = "Vendor"
|
||||||
DBUS_ATTR_VERSION = "Version"
|
DBUS_ATTR_VERSION = "Version"
|
||||||
|
DBUS_ATTR_VIRTUALIZATION = "Virtualization"
|
||||||
DBUS_ATTR_WHAT = "What"
|
DBUS_ATTR_WHAT = "What"
|
||||||
DBUS_ATTR_WWN = "WWN"
|
DBUS_ATTR_WWN = "WWN"
|
||||||
|
|
||||||
|
@@ -17,7 +17,7 @@ from .rauc import Rauc
|
|||||||
from .resolved import Resolved
|
from .resolved import Resolved
|
||||||
from .systemd import Systemd
|
from .systemd import Systemd
|
||||||
from .timedate import TimeDate
|
from .timedate import TimeDate
|
||||||
from .udisks2 import UDisks2
|
from .udisks2 import UDisks2Manager
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ class DBusManager(CoreSysAttributes):
|
|||||||
self._agent: OSAgent = OSAgent()
|
self._agent: OSAgent = OSAgent()
|
||||||
self._timedate: TimeDate = TimeDate()
|
self._timedate: TimeDate = TimeDate()
|
||||||
self._resolved: Resolved = Resolved()
|
self._resolved: Resolved = Resolved()
|
||||||
self._udisks2: UDisks2 = UDisks2()
|
self._udisks2: UDisks2Manager = UDisks2Manager()
|
||||||
self._bus: MessageBus | None = None
|
self._bus: MessageBus | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -81,7 +81,7 @@ class DBusManager(CoreSysAttributes):
|
|||||||
return self._resolved
|
return self._resolved
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def udisks2(self) -> UDisks2:
|
def udisks2(self) -> UDisks2Manager:
|
||||||
"""Return the udisks2 interface."""
|
"""Return the udisks2 interface."""
|
||||||
return self._udisks2
|
return self._udisks2
|
||||||
|
|
||||||
|
@@ -7,6 +7,8 @@ from uuid import uuid4
|
|||||||
|
|
||||||
from dbus_fast import Variant
|
from dbus_fast import Variant
|
||||||
|
|
||||||
|
from ....host.const import InterfaceMethod, InterfaceType
|
||||||
|
from .. import NetworkManager
|
||||||
from . import (
|
from . import (
|
||||||
ATTR_ASSIGNED_MAC,
|
ATTR_ASSIGNED_MAC,
|
||||||
CONF_ATTR_802_ETHERNET,
|
CONF_ATTR_802_ETHERNET,
|
||||||
@@ -19,8 +21,6 @@ from . import (
|
|||||||
CONF_ATTR_PATH,
|
CONF_ATTR_PATH,
|
||||||
CONF_ATTR_VLAN,
|
CONF_ATTR_VLAN,
|
||||||
)
|
)
|
||||||
from .. import NetworkManager
|
|
||||||
from ....host.const import InterfaceMethod, InterfaceType
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ....host.configuration import Interface
|
from ....host.configuration import Interface
|
||||||
@@ -37,8 +37,8 @@ def get_connection_from_interface(
|
|||||||
# Generate/Update ID/name
|
# Generate/Update ID/name
|
||||||
if not name or not name.startswith("Supervisor"):
|
if not name or not name.startswith("Supervisor"):
|
||||||
name = f"Supervisor {interface.name}"
|
name = f"Supervisor {interface.name}"
|
||||||
if interface.type == InterfaceType.VLAN:
|
if interface.type == InterfaceType.VLAN:
|
||||||
name = f"{name}.{interface.vlan.id}"
|
name = f"{name}.{interface.vlan.id}"
|
||||||
|
|
||||||
if interface.type == InterfaceType.ETHERNET:
|
if interface.type == InterfaceType.ETHERNET:
|
||||||
iftype = "802-3-ethernet"
|
iftype = "802-3-ethernet"
|
||||||
|
@@ -1,6 +1,8 @@
|
|||||||
"""D-Bus interface for rauc."""
|
"""D-Bus interface for rauc."""
|
||||||
|
|
||||||
|
from ctypes import c_uint32, c_uint64
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, NotRequired, TypedDict
|
||||||
|
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
@@ -23,6 +25,28 @@ from .utils import dbus_connected
|
|||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SlotStatusDataType = TypedDict(
|
||||||
|
"SlotStatusDataType",
|
||||||
|
{
|
||||||
|
"class": str,
|
||||||
|
"type": str,
|
||||||
|
"state": str,
|
||||||
|
"device": str,
|
||||||
|
"bundle.compatible": NotRequired[str],
|
||||||
|
"sha256": NotRequired[str],
|
||||||
|
"size": NotRequired[c_uint64],
|
||||||
|
"installed.count": NotRequired[c_uint32],
|
||||||
|
"bundle.version": NotRequired[str],
|
||||||
|
"installed.timestamp": NotRequired[str],
|
||||||
|
"status": NotRequired[str],
|
||||||
|
"activated.count": NotRequired[c_uint32],
|
||||||
|
"activated.timestamp": NotRequired[str],
|
||||||
|
"boot-status": NotRequired[str],
|
||||||
|
"bootname": NotRequired[str],
|
||||||
|
"parent": NotRequired[str],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Rauc(DBusInterfaceProxy):
|
class Rauc(DBusInterfaceProxy):
|
||||||
"""Handle D-Bus interface for rauc."""
|
"""Handle D-Bus interface for rauc."""
|
||||||
@@ -83,7 +107,7 @@ class Rauc(DBusInterfaceProxy):
|
|||||||
await self.dbus.Installer.call_install(str(raucb_file))
|
await self.dbus.Installer.call_install(str(raucb_file))
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
async def get_slot_status(self) -> list[tuple[str, dict[str, Any]]]:
|
async def get_slot_status(self) -> list[tuple[str, SlotStatusDataType]]:
|
||||||
"""Get slot status."""
|
"""Get slot status."""
|
||||||
return await self.dbus.Installer.call_get_slot_status()
|
return await self.dbus.Installer.call_get_slot_status()
|
||||||
|
|
||||||
|
@@ -13,16 +13,19 @@ from ..exceptions import (
|
|||||||
DBusServiceUnkownError,
|
DBusServiceUnkownError,
|
||||||
DBusSystemdNoSuchUnit,
|
DBusSystemdNoSuchUnit,
|
||||||
)
|
)
|
||||||
|
from ..utils.dbus import DBusSignalWrapper
|
||||||
from .const import (
|
from .const import (
|
||||||
DBUS_ATTR_FINISH_TIMESTAMP,
|
DBUS_ATTR_FINISH_TIMESTAMP,
|
||||||
DBUS_ATTR_FIRMWARE_TIMESTAMP_MONOTONIC,
|
DBUS_ATTR_FIRMWARE_TIMESTAMP_MONOTONIC,
|
||||||
DBUS_ATTR_KERNEL_TIMESTAMP_MONOTONIC,
|
DBUS_ATTR_KERNEL_TIMESTAMP_MONOTONIC,
|
||||||
DBUS_ATTR_LOADER_TIMESTAMP_MONOTONIC,
|
DBUS_ATTR_LOADER_TIMESTAMP_MONOTONIC,
|
||||||
DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC,
|
DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC,
|
||||||
|
DBUS_ATTR_VIRTUALIZATION,
|
||||||
DBUS_ERR_SYSTEMD_NO_SUCH_UNIT,
|
DBUS_ERR_SYSTEMD_NO_SUCH_UNIT,
|
||||||
DBUS_IFACE_SYSTEMD_MANAGER,
|
DBUS_IFACE_SYSTEMD_MANAGER,
|
||||||
DBUS_NAME_SYSTEMD,
|
DBUS_NAME_SYSTEMD,
|
||||||
DBUS_OBJECT_SYSTEMD,
|
DBUS_OBJECT_SYSTEMD,
|
||||||
|
DBUS_SIGNAL_PROPERTIES_CHANGED,
|
||||||
StartUnitMode,
|
StartUnitMode,
|
||||||
StopUnitMode,
|
StopUnitMode,
|
||||||
UnitActiveState,
|
UnitActiveState,
|
||||||
@@ -42,9 +45,7 @@ def systemd_errors(func):
|
|||||||
return await func(*args, **kwds)
|
return await func(*args, **kwds)
|
||||||
except DBusFatalError as err:
|
except DBusFatalError as err:
|
||||||
if err.type == DBUS_ERR_SYSTEMD_NO_SUCH_UNIT:
|
if err.type == DBUS_ERR_SYSTEMD_NO_SUCH_UNIT:
|
||||||
# pylint: disable=raise-missing-from
|
raise DBusSystemdNoSuchUnit(str(err)) from None
|
||||||
raise DBusSystemdNoSuchUnit(str(err))
|
|
||||||
# pylint: enable=raise-missing-from
|
|
||||||
raise err
|
raise err
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
@@ -66,6 +67,11 @@ class SystemdUnit(DBusInterface):
|
|||||||
"""Get active state of the unit."""
|
"""Get active state of the unit."""
|
||||||
return await self.dbus.Unit.get_active_state()
|
return await self.dbus.Unit.get_active_state()
|
||||||
|
|
||||||
|
@dbus_connected
|
||||||
|
def properties_changed(self) -> DBusSignalWrapper:
|
||||||
|
"""Return signal wrapper for properties changed."""
|
||||||
|
return self.dbus.signal(DBUS_SIGNAL_PROPERTIES_CHANGED)
|
||||||
|
|
||||||
|
|
||||||
class Systemd(DBusInterfaceProxy):
|
class Systemd(DBusInterfaceProxy):
|
||||||
"""Systemd function handler.
|
"""Systemd function handler.
|
||||||
@@ -109,6 +115,12 @@ class Systemd(DBusInterfaceProxy):
|
|||||||
"""Return the boot timestamp."""
|
"""Return the boot timestamp."""
|
||||||
return self.properties[DBUS_ATTR_FINISH_TIMESTAMP]
|
return self.properties[DBUS_ATTR_FINISH_TIMESTAMP]
|
||||||
|
|
||||||
|
@property
|
||||||
|
@dbus_property
|
||||||
|
def virtualization(self) -> str:
|
||||||
|
"""Return virtualization hypervisor being used."""
|
||||||
|
return self.properties[DBUS_ATTR_VIRTUALIZATION]
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
async def reboot(self) -> None:
|
async def reboot(self) -> None:
|
||||||
"""Reboot host computer."""
|
"""Reboot host computer."""
|
||||||
|
@@ -15,12 +15,15 @@ from ...exceptions import (
|
|||||||
from ..const import (
|
from ..const import (
|
||||||
DBUS_ATTR_SUPPORTED_FILESYSTEMS,
|
DBUS_ATTR_SUPPORTED_FILESYSTEMS,
|
||||||
DBUS_ATTR_VERSION,
|
DBUS_ATTR_VERSION,
|
||||||
|
DBUS_IFACE_BLOCK,
|
||||||
|
DBUS_IFACE_DRIVE,
|
||||||
DBUS_IFACE_UDISKS2_MANAGER,
|
DBUS_IFACE_UDISKS2_MANAGER,
|
||||||
DBUS_NAME_UDISKS2,
|
DBUS_NAME_UDISKS2,
|
||||||
DBUS_OBJECT_BASE,
|
DBUS_OBJECT_BASE,
|
||||||
DBUS_OBJECT_UDISKS2,
|
DBUS_OBJECT_UDISKS2,
|
||||||
|
DBUS_OBJECT_UDISKS2_MANAGER,
|
||||||
)
|
)
|
||||||
from ..interface import DBusInterfaceProxy, dbus_property
|
from ..interface import DBusInterface, DBusInterfaceProxy, dbus_property
|
||||||
from ..utils import dbus_connected
|
from ..utils import dbus_connected
|
||||||
from .block import UDisks2Block
|
from .block import UDisks2Block
|
||||||
from .const import UDISKS2_DEFAULT_OPTIONS
|
from .const import UDISKS2_DEFAULT_OPTIONS
|
||||||
@@ -30,7 +33,15 @@ from .drive import UDisks2Drive
|
|||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class UDisks2(DBusInterfaceProxy):
|
class UDisks2(DBusInterface):
|
||||||
|
"""Handle D-Bus interface for UDisks2 root object."""
|
||||||
|
|
||||||
|
name: str = DBUS_NAME_UDISKS2
|
||||||
|
bus_name: str = DBUS_NAME_UDISKS2
|
||||||
|
object_path: str = DBUS_OBJECT_UDISKS2
|
||||||
|
|
||||||
|
|
||||||
|
class UDisks2Manager(DBusInterfaceProxy):
|
||||||
"""Handle D-Bus interface for UDisks2.
|
"""Handle D-Bus interface for UDisks2.
|
||||||
|
|
||||||
http://storaged.org/doc/udisks2-api/latest/
|
http://storaged.org/doc/udisks2-api/latest/
|
||||||
@@ -38,16 +49,22 @@ class UDisks2(DBusInterfaceProxy):
|
|||||||
|
|
||||||
name: str = DBUS_NAME_UDISKS2
|
name: str = DBUS_NAME_UDISKS2
|
||||||
bus_name: str = DBUS_NAME_UDISKS2
|
bus_name: str = DBUS_NAME_UDISKS2
|
||||||
object_path: str = DBUS_OBJECT_UDISKS2
|
object_path: str = DBUS_OBJECT_UDISKS2_MANAGER
|
||||||
properties_interface: str = DBUS_IFACE_UDISKS2_MANAGER
|
properties_interface: str = DBUS_IFACE_UDISKS2_MANAGER
|
||||||
|
|
||||||
_block_devices: dict[str, UDisks2Block] = {}
|
_block_devices: dict[str, UDisks2Block] = {}
|
||||||
_drives: dict[str, UDisks2Drive] = {}
|
_drives: dict[str, UDisks2Drive] = {}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize object."""
|
||||||
|
super().__init__()
|
||||||
|
self.udisks2_object_manager = UDisks2()
|
||||||
|
|
||||||
async def connect(self, bus: MessageBus):
|
async def connect(self, bus: MessageBus):
|
||||||
"""Connect to D-Bus."""
|
"""Connect to D-Bus."""
|
||||||
try:
|
try:
|
||||||
await super().connect(bus)
|
await super().connect(bus)
|
||||||
|
await self.udisks2_object_manager.connect(bus)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to udisks2")
|
_LOGGER.warning("Can't connect to udisks2")
|
||||||
except (DBusServiceUnkownError, DBusInterfaceError):
|
except (DBusServiceUnkownError, DBusInterfaceError):
|
||||||
@@ -55,6 +72,14 @@ class UDisks2(DBusInterfaceProxy):
|
|||||||
"No udisks2 support on the host. Host control has been disabled."
|
"No udisks2 support on the host. Host control has been disabled."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Register for signals on devices added/removed
|
||||||
|
self.udisks2_object_manager.dbus.object_manager.on_interfaces_added(
|
||||||
|
self._interfaces_added
|
||||||
|
)
|
||||||
|
self.udisks2_object_manager.dbus.object_manager.on_interfaces_removed(
|
||||||
|
self._interfaces_removed
|
||||||
|
)
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
async def update(self, changed: dict[str, Any] | None = None) -> None:
|
async def update(self, changed: dict[str, Any] | None = None) -> None:
|
||||||
"""Update properties via D-Bus.
|
"""Update properties via D-Bus.
|
||||||
@@ -161,11 +186,47 @@ class UDisks2(DBusInterfaceProxy):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _interfaces_added(
|
||||||
|
self, object_path: str, properties: dict[str, dict[str, Any]]
|
||||||
|
) -> None:
|
||||||
|
"""Interfaces added to a UDisks2 object."""
|
||||||
|
if object_path in self._block_devices:
|
||||||
|
await self._block_devices[object_path].update()
|
||||||
|
return
|
||||||
|
if object_path in self._drives:
|
||||||
|
await self._drives[object_path].update()
|
||||||
|
return
|
||||||
|
|
||||||
|
if DBUS_IFACE_BLOCK in properties:
|
||||||
|
self._block_devices[object_path] = await UDisks2Block.new(
|
||||||
|
object_path, self.dbus.bus
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if DBUS_IFACE_DRIVE in properties:
|
||||||
|
self._drives[object_path] = await UDisks2Drive.new(
|
||||||
|
object_path, self.dbus.bus
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _interfaces_removed(
|
||||||
|
self, object_path: str, interfaces: list[str]
|
||||||
|
) -> None:
|
||||||
|
"""Interfaces removed from a UDisks2 object."""
|
||||||
|
if object_path in self._block_devices and DBUS_IFACE_BLOCK in interfaces:
|
||||||
|
self._block_devices[object_path].shutdown()
|
||||||
|
del self._block_devices[object_path]
|
||||||
|
return
|
||||||
|
|
||||||
|
if object_path in self._drives and DBUS_IFACE_DRIVE in interfaces:
|
||||||
|
self._drives[object_path].shutdown()
|
||||||
|
del self._drives[object_path]
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
"""Shutdown the object and disconnect from D-Bus.
|
"""Shutdown the object and disconnect from D-Bus.
|
||||||
|
|
||||||
This method is irreversible.
|
This method is irreversible.
|
||||||
"""
|
"""
|
||||||
|
self.udisks2_object_manager.shutdown()
|
||||||
for block_device in self.block_devices:
|
for block_device in self.block_devices:
|
||||||
block_device.shutdown()
|
block_device.shutdown()
|
||||||
for drive in self.drives:
|
for drive in self.drives:
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
"""Interface to UDisks2 Drive over D-Bus."""
|
"""Interface to UDisks2 Drive over D-Bus."""
|
||||||
|
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from dbus_fast.aio import MessageBus
|
from dbus_fast.aio import MessageBus
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ class UDisks2Drive(DBusInterfaceProxy):
|
|||||||
"""Return time drive first detected."""
|
"""Return time drive first detected."""
|
||||||
return datetime.fromtimestamp(
|
return datetime.fromtimestamp(
|
||||||
self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6
|
self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6
|
||||||
).astimezone(timezone.utc)
|
).astimezone(UTC)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
|
@@ -7,14 +7,12 @@ from typing import TYPE_CHECKING, Any
|
|||||||
from uuid import UUID, uuid4
|
from uuid import UUID, uuid4
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import voluptuous as vol
|
|
||||||
from voluptuous.humanize import humanize_error
|
|
||||||
|
|
||||||
from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY
|
from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import DiscoveryError, HomeAssistantAPIError
|
from ..exceptions import HomeAssistantAPIError
|
||||||
from ..utils.common import FileConfiguration
|
from ..utils.common import FileConfiguration
|
||||||
from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config
|
from .validate import SCHEMA_DISCOVERY_CONFIG
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
@@ -75,12 +73,6 @@ class Discovery(CoreSysAttributes, FileConfiguration):
|
|||||||
|
|
||||||
def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message:
|
def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message:
|
||||||
"""Send a discovery message to Home Assistant."""
|
"""Send a discovery message to Home Assistant."""
|
||||||
try:
|
|
||||||
config = valid_discovery_config(service, config)
|
|
||||||
except vol.Invalid as err:
|
|
||||||
_LOGGER.error("Invalid discovery %s config", humanize_error(config, err))
|
|
||||||
raise DiscoveryError() from err
|
|
||||||
|
|
||||||
# Create message
|
# Create message
|
||||||
message = Message(addon.slug, service, config)
|
message = Message(addon.slug, service, config)
|
||||||
|
|
||||||
|
@@ -1 +0,0 @@
|
|||||||
"""Discovery service modules."""
|
|
@@ -1,9 +0,0 @@
|
|||||||
"""Discovery service for AdGuard."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port}
|
|
||||||
)
|
|
@@ -1,9 +0,0 @@
|
|||||||
"""Discovery service for Almond."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port}
|
|
||||||
)
|
|
@@ -1,14 +0,0 @@
|
|||||||
"""Discovery service for MQTT."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_API_KEY, ATTR_HOST, ATTR_PORT, ATTR_SERIAL
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_HOST): str,
|
|
||||||
vol.Required(ATTR_PORT): network_port,
|
|
||||||
vol.Required(ATTR_SERIAL): str,
|
|
||||||
vol.Required(ATTR_API_KEY): str,
|
|
||||||
}
|
|
||||||
)
|
|
@@ -1,9 +0,0 @@
|
|||||||
"""Discovery service for the ESPHome Dashboard."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port}
|
|
||||||
)
|
|
@@ -1,16 +0,0 @@
|
|||||||
"""Discovery service for HomeMatic."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
str: vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_HOST): str,
|
|
||||||
vol.Required(ATTR_PORT): network_port,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
@@ -1,13 +0,0 @@
|
|||||||
"""Discovery service for Matter Server."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_HOST): str,
|
|
||||||
vol.Required(ATTR_PORT): network_port,
|
|
||||||
}
|
|
||||||
)
|
|
@@ -1,6 +0,0 @@
|
|||||||
"""Discovery service for motionEye."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ..const import ATTR_URL
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema({vol.Required(ATTR_URL): str})
|
|
@@ -1,26 +0,0 @@
|
|||||||
"""Discovery service for MQTT."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import (
|
|
||||||
ATTR_HOST,
|
|
||||||
ATTR_PASSWORD,
|
|
||||||
ATTR_PORT,
|
|
||||||
ATTR_PROTOCOL,
|
|
||||||
ATTR_SSL,
|
|
||||||
ATTR_USERNAME,
|
|
||||||
)
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_HOST): str,
|
|
||||||
vol.Required(ATTR_PORT): network_port,
|
|
||||||
vol.Optional(ATTR_USERNAME): str,
|
|
||||||
vol.Optional(ATTR_PASSWORD): str,
|
|
||||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_PROTOCOL, default="3.1.1"): vol.All(
|
|
||||||
str, vol.In(["3.1", "3.1.1"])
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
@@ -1,13 +0,0 @@
|
|||||||
"""Discovery service for OpenThread Border Router."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_HOST): str,
|
|
||||||
vol.Required(ATTR_PORT): network_port,
|
|
||||||
}
|
|
||||||
)
|
|
@@ -1,15 +0,0 @@
|
|||||||
"""Discovery service for OpenZwave MQTT."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PASSWORD, ATTR_PORT, ATTR_USERNAME
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_HOST): str,
|
|
||||||
vol.Required(ATTR_PORT): network_port,
|
|
||||||
vol.Required(ATTR_USERNAME): str,
|
|
||||||
vol.Required(ATTR_PASSWORD): str,
|
|
||||||
}
|
|
||||||
)
|
|
@@ -1,9 +0,0 @@
|
|||||||
"""Discovery service for RTSPtoWebRTC."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port}
|
|
||||||
)
|
|
@@ -1,9 +0,0 @@
|
|||||||
"""Discovery service for UniFi."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port}
|
|
||||||
)
|
|
@@ -1,14 +0,0 @@
|
|||||||
"""Discovery service for VLC Telnet."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PASSWORD, ATTR_PORT
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_HOST): str,
|
|
||||||
vol.Required(ATTR_PORT): network_port,
|
|
||||||
vol.Required(ATTR_PASSWORD): str,
|
|
||||||
}
|
|
||||||
)
|
|
@@ -1,25 +0,0 @@
|
|||||||
"""Discovery service for the Wyoming Protocol integration."""
|
|
||||||
from typing import Any, cast
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ..const import ATTR_URI
|
|
||||||
|
|
||||||
|
|
||||||
def validate_uri(value: Any) -> str:
|
|
||||||
"""Validate an Wyoming URI.
|
|
||||||
|
|
||||||
Currently accepts TCP URIs, can extended
|
|
||||||
to accept UNIX sockets in the future.
|
|
||||||
"""
|
|
||||||
uri_value = str(value)
|
|
||||||
|
|
||||||
if urlparse(uri_value).scheme == "tcp":
|
|
||||||
# pylint: disable-next=no-value-for-parameter
|
|
||||||
return cast(str, vol.Schema(vol.Url())(uri_value))
|
|
||||||
|
|
||||||
raise vol.Invalid("invalid Wyoming Protocol URI")
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema({vol.Required(ATTR_URI): validate_uri})
|
|
@@ -1,13 +0,0 @@
|
|||||||
"""Discovery service for Zwave JS."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ...validate import network_port
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_HOST): str,
|
|
||||||
vol.Required(ATTR_PORT): network_port,
|
|
||||||
}
|
|
||||||
)
|
|
@@ -1,6 +1,4 @@
|
|||||||
"""Validate services schema."""
|
"""Validate services schema."""
|
||||||
from importlib import import_module
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -8,25 +6,6 @@ from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_
|
|||||||
from ..utils.validate import schema_or
|
from ..utils.validate import schema_or
|
||||||
from ..validate import uuid_match
|
from ..validate import uuid_match
|
||||||
|
|
||||||
|
|
||||||
def valid_discovery_service(service):
|
|
||||||
"""Validate service name."""
|
|
||||||
service_file = Path(__file__).parent.joinpath(f"services/{service}.py")
|
|
||||||
if not service_file.exists():
|
|
||||||
raise vol.Invalid(f"Service {service} not found") from None
|
|
||||||
return service
|
|
||||||
|
|
||||||
|
|
||||||
def valid_discovery_config(service, config):
|
|
||||||
"""Validate service name."""
|
|
||||||
try:
|
|
||||||
service_mod = import_module(f".services.{service}", "supervisor.discovery")
|
|
||||||
except ImportError:
|
|
||||||
raise vol.Invalid(f"Service {service} not found") from None
|
|
||||||
|
|
||||||
return service_mod.SCHEMA(config)
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_DISCOVERY = vol.Schema(
|
SCHEMA_DISCOVERY = vol.Schema(
|
||||||
[
|
[
|
||||||
vol.Schema(
|
vol.Schema(
|
||||||
|
@@ -233,10 +233,10 @@ class DockerAddon(DockerInterface):
|
|||||||
tmpfs = {}
|
tmpfs = {}
|
||||||
|
|
||||||
if self.addon.with_tmpfs:
|
if self.addon.with_tmpfs:
|
||||||
tmpfs["/tmp"] = ""
|
tmpfs["/tmp"] = "" # noqa: S108
|
||||||
|
|
||||||
if not self.addon.host_ipc:
|
if not self.addon.host_ipc:
|
||||||
tmpfs["/dev/shm"] = ""
|
tmpfs["/dev/shm"] = "" # noqa: S108
|
||||||
|
|
||||||
# Return None if no tmpfs is present
|
# Return None if no tmpfs is present
|
||||||
if tmpfs:
|
if tmpfs:
|
||||||
@@ -641,11 +641,11 @@ class DockerAddon(DockerInterface):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Pull Docker image or build it."""
|
"""Pull Docker image or build it."""
|
||||||
if need_build is None and self.addon.need_build or need_build:
|
if need_build is None and self.addon.need_build or need_build:
|
||||||
await self._build(version)
|
await self._build(version, image)
|
||||||
else:
|
else:
|
||||||
await super().install(version, image, latest, arch)
|
await super().install(version, image, latest, arch)
|
||||||
|
|
||||||
async def _build(self, version: AwesomeVersion) -> None:
|
async def _build(self, version: AwesomeVersion, image: str | None = None) -> None:
|
||||||
"""Build a Docker container."""
|
"""Build a Docker container."""
|
||||||
build_env = AddonBuild(self.coresys, self.addon)
|
build_env = AddonBuild(self.coresys, self.addon)
|
||||||
if not build_env.is_valid:
|
if not build_env.is_valid:
|
||||||
@@ -657,7 +657,7 @@ class DockerAddon(DockerInterface):
|
|||||||
image, log = await self.sys_run_in_executor(
|
image, log = await self.sys_run_in_executor(
|
||||||
self.sys_docker.images.build,
|
self.sys_docker.images.build,
|
||||||
use_config_proxy=False,
|
use_config_proxy=False,
|
||||||
**build_env.get_docker_args(version),
|
**build_env.get_docker_args(version, image),
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.debug("Build %s:%s done: %s", self.image, version, log)
|
_LOGGER.debug("Build %s:%s done: %s", self.image, version, log)
|
||||||
|
@@ -74,6 +74,7 @@ MOUNT_DBUS = Mount(
|
|||||||
type=MountType.BIND, source="/run/dbus", target="/run/dbus", read_only=True
|
type=MountType.BIND, source="/run/dbus", target="/run/dbus", read_only=True
|
||||||
)
|
)
|
||||||
MOUNT_DEV = Mount(type=MountType.BIND, source="/dev", target="/dev", read_only=True)
|
MOUNT_DEV = Mount(type=MountType.BIND, source="/dev", target="/dev", read_only=True)
|
||||||
|
MOUNT_DEV.setdefault("BindOptions", {})["ReadOnlyNonRecursive"] = True
|
||||||
MOUNT_DOCKER = Mount(
|
MOUNT_DOCKER = Mount(
|
||||||
type=MountType.BIND,
|
type=MountType.BIND,
|
||||||
source="/run/docker.sock",
|
source="/run/docker.sock",
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||||
from docker.types import Mount
|
from docker.types import Mount
|
||||||
@@ -28,6 +29,7 @@ from .interface import CommandReturn, DockerInterface
|
|||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
_VERIFY_TRUST: AwesomeVersion = AwesomeVersion("2021.5.0")
|
_VERIFY_TRUST: AwesomeVersion = AwesomeVersion("2021.5.0")
|
||||||
_HASS_DOCKER_NAME: str = "homeassistant"
|
_HASS_DOCKER_NAME: str = "homeassistant"
|
||||||
|
ENV_S6_GRACETIME = re.compile(r"^S6_SERVICES_GRACETIME=([0-9]+)$")
|
||||||
|
|
||||||
|
|
||||||
class DockerHomeAssistant(DockerInterface):
|
class DockerHomeAssistant(DockerInterface):
|
||||||
@@ -53,10 +55,15 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
@property
|
@property
|
||||||
def timeout(self) -> int:
|
def timeout(self) -> int:
|
||||||
"""Return timeout for Docker actions."""
|
"""Return timeout for Docker actions."""
|
||||||
# Synchronized with the homeassistant core container's S6_SERVICES_GRACETIME
|
# Use S6_SERVICES_GRACETIME to avoid killing Home Assistant Core, see
|
||||||
# to avoid killing Home Assistant Core, see
|
|
||||||
# https://github.com/home-assistant/core/tree/dev/Dockerfile
|
# https://github.com/home-assistant/core/tree/dev/Dockerfile
|
||||||
return 240 + 20
|
if self.meta_config and "Env" in self.meta_config:
|
||||||
|
for env in self.meta_config["Env"]:
|
||||||
|
if match := ENV_S6_GRACETIME.match(env):
|
||||||
|
return 20 + int(int(match.group(1)) / 1000)
|
||||||
|
|
||||||
|
# Fallback - as of 2024.3, S6 SERVICES_GRACETIME was set to 24000
|
||||||
|
return 260
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ip_address(self) -> IPv4Address:
|
def ip_address(self) -> IPv4Address:
|
||||||
@@ -175,7 +182,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
ENV_TOKEN: self.sys_homeassistant.supervisor_token,
|
ENV_TOKEN: self.sys_homeassistant.supervisor_token,
|
||||||
ENV_TOKEN_OLD: self.sys_homeassistant.supervisor_token,
|
ENV_TOKEN_OLD: self.sys_homeassistant.supervisor_token,
|
||||||
},
|
},
|
||||||
tmpfs={"/tmp": ""},
|
tmpfs={"/tmp": ""}, # noqa: S108
|
||||||
oom_score_adj=-300,
|
oom_score_adj=-300,
|
||||||
)
|
)
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
|
@@ -14,6 +14,7 @@ from awesomeversion import AwesomeVersion
|
|||||||
from awesomeversion.strategy import AwesomeVersionStrategy
|
from awesomeversion.strategy import AwesomeVersionStrategy
|
||||||
import docker
|
import docker
|
||||||
from docker.models.containers import Container
|
from docker.models.containers import Container
|
||||||
|
from docker.models.images import Image
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
@@ -438,6 +439,44 @@ class DockerInterface(JobGroup):
|
|||||||
)
|
)
|
||||||
self._meta = None
|
self._meta = None
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="docker_interface_check_image",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=DockerJobError,
|
||||||
|
)
|
||||||
|
async def check_image(
|
||||||
|
self,
|
||||||
|
version: AwesomeVersion,
|
||||||
|
expected_image: str,
|
||||||
|
expected_arch: CpuArch | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Check we have expected image with correct arch."""
|
||||||
|
expected_arch = expected_arch or self.sys_arch.supervisor
|
||||||
|
image_name = f"{expected_image}:{version!s}"
|
||||||
|
if self.image == expected_image:
|
||||||
|
try:
|
||||||
|
image: Image = await self.sys_run_in_executor(
|
||||||
|
self.sys_docker.images.get, image_name
|
||||||
|
)
|
||||||
|
except (docker.errors.DockerException, requests.RequestException) as err:
|
||||||
|
raise DockerError(
|
||||||
|
f"Could not get {image_name} for check due to: {err!s}",
|
||||||
|
_LOGGER.error,
|
||||||
|
) from err
|
||||||
|
|
||||||
|
image_arch = f"{image.attrs['Os']}/{image.attrs['Architecture']}"
|
||||||
|
if "Variant" in image.attrs:
|
||||||
|
image_arch = f"{image_arch}/{image.attrs['Variant']}"
|
||||||
|
|
||||||
|
# If we have an image and its the right arch, all set
|
||||||
|
if MAP_ARCH[expected_arch] == image_arch:
|
||||||
|
return
|
||||||
|
|
||||||
|
# We're missing the image we need. Stop and clean up what we have then pull the right one
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.remove()
|
||||||
|
await self.install(version, expected_image, arch=expected_arch)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="docker_interface_update",
|
name="docker_interface_update",
|
||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
@@ -177,6 +177,11 @@ class DockerAPI:
|
|||||||
if dns:
|
if dns:
|
||||||
kwargs["dns"] = [str(self.network.dns)]
|
kwargs["dns"] = [str(self.network.dns)]
|
||||||
kwargs["dns_search"] = [DNS_SUFFIX]
|
kwargs["dns_search"] = [DNS_SUFFIX]
|
||||||
|
# CoreDNS forward plug-in fails in ~6s, then fallback triggers.
|
||||||
|
# However, the default timeout of glibc and musl is 5s. Increase
|
||||||
|
# default timeout to make sure CoreDNS fallback is working
|
||||||
|
# on first query.
|
||||||
|
kwargs["dns_opt"] = ["timeout:10"]
|
||||||
if hostname:
|
if hostname:
|
||||||
kwargs["domainname"] = DNS_SUFFIX
|
kwargs["domainname"] = DNS_SUFFIX
|
||||||
|
|
||||||
|
@@ -1,4 +1,6 @@
|
|||||||
"""Supervisor docker monitor based on events."""
|
"""Supervisor docker monitor based on events."""
|
||||||
|
|
||||||
|
from contextlib import suppress
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import logging
|
import logging
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
@@ -47,10 +49,8 @@ class DockerMonitor(CoreSysAttributes, Thread):
|
|||||||
async def unload(self):
|
async def unload(self):
|
||||||
"""Stop docker events monitor."""
|
"""Stop docker events monitor."""
|
||||||
self._events.close()
|
self._events.close()
|
||||||
try:
|
with suppress(RuntimeError):
|
||||||
self.join(timeout=5)
|
self.join(timeout=5)
|
||||||
except RuntimeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
_LOGGER.info("Stopped docker events monitor")
|
_LOGGER.info("Stopped docker events monitor")
|
||||||
|
|
||||||
|
@@ -133,6 +133,14 @@ class HassOSDataDiskError(HassOSError):
|
|||||||
"""Issues with the DataDisk feature from HAOS."""
|
"""Issues with the DataDisk feature from HAOS."""
|
||||||
|
|
||||||
|
|
||||||
|
class HassOSSlotNotFound(HassOSError):
|
||||||
|
"""Could not find boot slot."""
|
||||||
|
|
||||||
|
|
||||||
|
class HassOSSlotUpdateError(HassOSError):
|
||||||
|
"""Error while updating a slot via rauc."""
|
||||||
|
|
||||||
|
|
||||||
# All Plugins
|
# All Plugins
|
||||||
|
|
||||||
|
|
||||||
@@ -267,6 +275,10 @@ class AuthPasswordResetError(HassioError):
|
|||||||
"""Auth error if password reset failed."""
|
"""Auth error if password reset failed."""
|
||||||
|
|
||||||
|
|
||||||
|
class AuthListUsersError(HassioError):
|
||||||
|
"""Auth error if listing users failed."""
|
||||||
|
|
||||||
|
|
||||||
# Host
|
# Host
|
||||||
|
|
||||||
|
|
||||||
@@ -304,10 +316,24 @@ class HostLogError(HostError):
|
|||||||
class APIError(HassioError, RuntimeError):
|
class APIError(HassioError, RuntimeError):
|
||||||
"""API errors."""
|
"""API errors."""
|
||||||
|
|
||||||
|
status = 400
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str | None = None,
|
||||||
|
logger: Callable[..., None] | None = None,
|
||||||
|
job_id: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Raise & log, optionally with job."""
|
||||||
|
super().__init__(message, logger)
|
||||||
|
self.job_id = job_id
|
||||||
|
|
||||||
|
|
||||||
class APIForbidden(APIError):
|
class APIForbidden(APIError):
|
||||||
"""API forbidden error."""
|
"""API forbidden error."""
|
||||||
|
|
||||||
|
status = 403
|
||||||
|
|
||||||
|
|
||||||
class APIAddonNotInstalled(APIError):
|
class APIAddonNotInstalled(APIError):
|
||||||
"""Not installed addon requested at addons API."""
|
"""Not installed addon requested at addons API."""
|
||||||
@@ -483,6 +509,17 @@ class WhoamiConnectivityError(WhoamiError):
|
|||||||
"""Connectivity errors while using whoami."""
|
"""Connectivity errors while using whoami."""
|
||||||
|
|
||||||
|
|
||||||
|
# utils/systemd_journal
|
||||||
|
|
||||||
|
|
||||||
|
class SystemdJournalError(HassioError):
|
||||||
|
"""Error while processing systemd journal logs."""
|
||||||
|
|
||||||
|
|
||||||
|
class MalformedBinaryEntryError(SystemdJournalError):
|
||||||
|
"""Raised when binary entry in the journal isn't followed by a newline."""
|
||||||
|
|
||||||
|
|
||||||
# docker/api
|
# docker/api
|
||||||
|
|
||||||
|
|
||||||
@@ -597,6 +634,10 @@ class BackupInvalidError(BackupError):
|
|||||||
"""Raise if backup or password provided is invalid."""
|
"""Raise if backup or password provided is invalid."""
|
||||||
|
|
||||||
|
|
||||||
|
class BackupMountDownError(BackupError):
|
||||||
|
"""Raise if mount specified for backup is down."""
|
||||||
|
|
||||||
|
|
||||||
class BackupJobError(BackupError, JobException):
|
class BackupJobError(BackupError, JobException):
|
||||||
"""Raise on Backup job error."""
|
"""Raise on Backup job error."""
|
||||||
|
|
||||||
|
@@ -1,9 +1,9 @@
|
|||||||
"""Home Assistant control object."""
|
"""Home Assistant control object."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import asynccontextmanager, suppress
|
from contextlib import AbstractAsyncContextManager, asynccontextmanager, suppress
|
||||||
from datetime import datetime, timedelta
|
from datetime import UTC, datetime, timedelta
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, AsyncContextManager
|
from typing import Any
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from aiohttp import hdrs
|
from aiohttp import hdrs
|
||||||
@@ -39,9 +39,8 @@ class HomeAssistantAPI(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
async def ensure_access_token(self) -> None:
|
async def ensure_access_token(self) -> None:
|
||||||
"""Ensure there is an access token."""
|
"""Ensure there is an access token."""
|
||||||
if (
|
if self.access_token is not None and self._access_token_expires > datetime.now(
|
||||||
self.access_token is not None
|
tz=UTC
|
||||||
and self._access_token_expires > datetime.utcnow()
|
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -63,7 +62,7 @@ class HomeAssistantAPI(CoreSysAttributes):
|
|||||||
_LOGGER.info("Updated Home Assistant API token")
|
_LOGGER.info("Updated Home Assistant API token")
|
||||||
tokens = await resp.json()
|
tokens = await resp.json()
|
||||||
self.access_token = tokens["access_token"]
|
self.access_token = tokens["access_token"]
|
||||||
self._access_token_expires = datetime.utcnow() + timedelta(
|
self._access_token_expires = datetime.now(tz=UTC) + timedelta(
|
||||||
seconds=tokens["expires_in"]
|
seconds=tokens["expires_in"]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -78,7 +77,7 @@ class HomeAssistantAPI(CoreSysAttributes):
|
|||||||
timeout: int = 30,
|
timeout: int = 30,
|
||||||
params: dict[str, str] | None = None,
|
params: dict[str, str] | None = None,
|
||||||
headers: dict[str, str] | None = None,
|
headers: dict[str, str] | None = None,
|
||||||
) -> AsyncContextManager[aiohttp.ClientResponse]:
|
) -> AbstractAsyncContextManager[aiohttp.ClientResponse]:
|
||||||
"""Async context manager to make a request with right auth."""
|
"""Async context manager to make a request with right auth."""
|
||||||
url = f"{self.sys_homeassistant.api_url}/{path}"
|
url = f"{self.sys_homeassistant.api_url}/{path}"
|
||||||
headers = headers or {}
|
headers = headers or {}
|
||||||
@@ -107,7 +106,10 @@ class HomeAssistantAPI(CoreSysAttributes):
|
|||||||
continue
|
continue
|
||||||
yield resp
|
yield resp
|
||||||
return
|
return
|
||||||
except (TimeoutError, aiohttp.ClientError) as err:
|
except TimeoutError:
|
||||||
|
_LOGGER.error("Timeout on call %s.", url)
|
||||||
|
break
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
_LOGGER.error("Error on call %s: %s", url, err)
|
_LOGGER.error("Error on call %s: %s", url, err)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@@ -1,16 +1,19 @@
|
|||||||
"""Constants for homeassistant."""
|
"""Constants for homeassistant."""
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
|
from pathlib import PurePath
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
|
|
||||||
from ..const import CoreState
|
from ..const import CoreState
|
||||||
|
|
||||||
|
ATTR_OVERRIDE_IMAGE = "override_image"
|
||||||
LANDINGPAGE: AwesomeVersion = AwesomeVersion("landingpage")
|
LANDINGPAGE: AwesomeVersion = AwesomeVersion("landingpage")
|
||||||
WATCHDOG_RETRY_SECONDS = 10
|
WATCHDOG_RETRY_SECONDS = 10
|
||||||
WATCHDOG_MAX_ATTEMPTS = 5
|
WATCHDOG_MAX_ATTEMPTS = 5
|
||||||
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
|
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
|
||||||
WATCHDOG_THROTTLE_MAX_CALLS = 10
|
WATCHDOG_THROTTLE_MAX_CALLS = 10
|
||||||
|
SAFE_MODE_FILENAME = PurePath("safe-mode")
|
||||||
|
|
||||||
CLOSING_STATES = [
|
CLOSING_STATES = [
|
||||||
CoreState.SHUTDOWN,
|
CoreState.SHUTDOWN,
|
||||||
|
@@ -35,6 +35,7 @@ from ..utils import convert_to_ascii
|
|||||||
from ..utils.sentry import capture_exception
|
from ..utils.sentry import capture_exception
|
||||||
from .const import (
|
from .const import (
|
||||||
LANDINGPAGE,
|
LANDINGPAGE,
|
||||||
|
SAFE_MODE_FILENAME,
|
||||||
WATCHDOG_MAX_ATTEMPTS,
|
WATCHDOG_MAX_ATTEMPTS,
|
||||||
WATCHDOG_RETRY_SECONDS,
|
WATCHDOG_RETRY_SECONDS,
|
||||||
WATCHDOG_THROTTLE_MAX_CALLS,
|
WATCHDOG_THROTTLE_MAX_CALLS,
|
||||||
@@ -86,7 +87,16 @@ class HomeAssistantCore(JobGroup):
|
|||||||
await self.instance.get_latest_version()
|
await self.instance.get_latest_version()
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.instance.attach(version=self.sys_homeassistant.version)
|
await self.instance.attach(
|
||||||
|
version=self.sys_homeassistant.version, skip_state_event_if_down=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure we are using correct image for this system (unless user has overridden it)
|
||||||
|
if not self.sys_homeassistant.override_image:
|
||||||
|
await self.instance.check_image(
|
||||||
|
self.sys_homeassistant.version, self.sys_homeassistant.default_image
|
||||||
|
)
|
||||||
|
self.sys_homeassistant.image = self.sys_homeassistant.default_image
|
||||||
except DockerError:
|
except DockerError:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"No Home Assistant Docker image %s found.", self.sys_homeassistant.image
|
"No Home Assistant Docker image %s found.", self.sys_homeassistant.image
|
||||||
@@ -115,7 +125,9 @@ class HomeAssistantCore(JobGroup):
|
|||||||
"""Install a landing page."""
|
"""Install a landing page."""
|
||||||
# Try to use a preinstalled landingpage
|
# Try to use a preinstalled landingpage
|
||||||
try:
|
try:
|
||||||
await self.instance.attach(version=LANDINGPAGE)
|
await self.instance.attach(
|
||||||
|
version=LANDINGPAGE, skip_state_event_if_down=True
|
||||||
|
)
|
||||||
except DockerError:
|
except DockerError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
@@ -351,8 +363,14 @@ class HomeAssistantCore(JobGroup):
|
|||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
on_condition=HomeAssistantJobError,
|
on_condition=HomeAssistantJobError,
|
||||||
)
|
)
|
||||||
async def restart(self) -> None:
|
async def restart(self, *, safe_mode: bool = False) -> None:
|
||||||
"""Restart Home Assistant Docker."""
|
"""Restart Home Assistant Docker."""
|
||||||
|
# Create safe mode marker file if necessary
|
||||||
|
if safe_mode:
|
||||||
|
await self.sys_run_in_executor(
|
||||||
|
(self.sys_config.path_homeassistant / SAFE_MODE_FILENAME).touch
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.instance.restart()
|
await self.instance.restart()
|
||||||
except DockerError as err:
|
except DockerError as err:
|
||||||
|
@@ -48,7 +48,7 @@ from ..utils import remove_folder
|
|||||||
from ..utils.common import FileConfiguration
|
from ..utils.common import FileConfiguration
|
||||||
from ..utils.json import read_json_file, write_json_file
|
from ..utils.json import read_json_file, write_json_file
|
||||||
from .api import HomeAssistantAPI
|
from .api import HomeAssistantAPI
|
||||||
from .const import WSType
|
from .const import ATTR_OVERRIDE_IMAGE, WSType
|
||||||
from .core import HomeAssistantCore
|
from .core import HomeAssistantCore
|
||||||
from .secrets import HomeAssistantSecrets
|
from .secrets import HomeAssistantSecrets
|
||||||
from .validate import SCHEMA_HASS_CONFIG
|
from .validate import SCHEMA_HASS_CONFIG
|
||||||
@@ -170,18 +170,33 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes):
|
|||||||
"""Return last available version of Home Assistant."""
|
"""Return last available version of Home Assistant."""
|
||||||
return self.sys_updater.version_homeassistant
|
return self.sys_updater.version_homeassistant
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_image(self) -> str:
|
||||||
|
"""Return the default image for this system."""
|
||||||
|
return f"ghcr.io/home-assistant/{self.sys_machine}-homeassistant"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image(self) -> str:
|
def image(self) -> str:
|
||||||
"""Return image name of the Home Assistant container."""
|
"""Return image name of the Home Assistant container."""
|
||||||
if self._data.get(ATTR_IMAGE):
|
if self._data.get(ATTR_IMAGE):
|
||||||
return self._data[ATTR_IMAGE]
|
return self._data[ATTR_IMAGE]
|
||||||
return f"ghcr.io/home-assistant/{self.sys_machine}-homeassistant"
|
return self.default_image
|
||||||
|
|
||||||
@image.setter
|
@image.setter
|
||||||
def image(self, value: str | None) -> None:
|
def image(self, value: str | None) -> None:
|
||||||
"""Set image name of Home Assistant container."""
|
"""Set image name of Home Assistant container."""
|
||||||
self._data[ATTR_IMAGE] = value
|
self._data[ATTR_IMAGE] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def override_image(self) -> bool:
|
||||||
|
"""Return if user has overridden the image to use for Home Assistant."""
|
||||||
|
return self._data[ATTR_OVERRIDE_IMAGE]
|
||||||
|
|
||||||
|
@override_image.setter
|
||||||
|
def override_image(self, value: bool) -> None:
|
||||||
|
"""Enable/disable image override."""
|
||||||
|
self._data[ATTR_OVERRIDE_IMAGE] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> AwesomeVersion | None:
|
def version(self) -> AwesomeVersion | None:
|
||||||
"""Return version of local version."""
|
"""Return version of local version."""
|
||||||
|
@@ -18,6 +18,7 @@ from ..const import (
|
|||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
)
|
)
|
||||||
from ..validate import docker_image, network_port, token, uuid_match, version_tag
|
from ..validate import docker_image, network_port, token, uuid_match, version_tag
|
||||||
|
from .const import ATTR_OVERRIDE_IMAGE
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_HASS_CONFIG = vol.Schema(
|
SCHEMA_HASS_CONFIG = vol.Schema(
|
||||||
@@ -34,6 +35,7 @@ SCHEMA_HASS_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_AUDIO_OUTPUT, default=None): vol.Maybe(str),
|
vol.Optional(ATTR_AUDIO_OUTPUT, default=None): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_AUDIO_INPUT, default=None): vol.Maybe(str),
|
vol.Optional(ATTR_AUDIO_INPUT, default=None): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE, default=False): vol.Boolean(),
|
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_OVERRIDE_IMAGE, default=False): vol.Boolean(),
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
|
@@ -26,6 +26,7 @@ from ..exceptions import (
|
|||||||
HomeAssistantWSError,
|
HomeAssistantWSError,
|
||||||
HomeAssistantWSNotSupported,
|
HomeAssistantWSNotSupported,
|
||||||
)
|
)
|
||||||
|
from ..utils.json import json_dumps
|
||||||
from .const import CLOSING_STATES, WSEvent, WSType
|
from .const import CLOSING_STATES, WSEvent, WSType
|
||||||
|
|
||||||
MIN_VERSION = {
|
MIN_VERSION = {
|
||||||
@@ -74,7 +75,7 @@ class WSClient:
|
|||||||
self._message_id += 1
|
self._message_id += 1
|
||||||
_LOGGER.debug("Sending: %s", message)
|
_LOGGER.debug("Sending: %s", message)
|
||||||
try:
|
try:
|
||||||
await self._client.send_json(message)
|
await self._client.send_json(message, dumps=json_dumps)
|
||||||
except ConnectionError as err:
|
except ConnectionError as err:
|
||||||
raise HomeAssistantWSConnectionError(err) from err
|
raise HomeAssistantWSConnectionError(err) from err
|
||||||
|
|
||||||
@@ -85,7 +86,7 @@ class WSClient:
|
|||||||
self._futures[message["id"]] = self._loop.create_future()
|
self._futures[message["id"]] = self._loop.create_future()
|
||||||
_LOGGER.debug("Sending: %s", message)
|
_LOGGER.debug("Sending: %s", message)
|
||||||
try:
|
try:
|
||||||
await self._client.send_json(message)
|
await self._client.send_json(message, dumps=json_dumps)
|
||||||
except ConnectionError as err:
|
except ConnectionError as err:
|
||||||
raise HomeAssistantWSConnectionError(err) from err
|
raise HomeAssistantWSConnectionError(err) from err
|
||||||
|
|
||||||
@@ -163,7 +164,9 @@ class WSClient:
|
|||||||
|
|
||||||
hello_message = await client.receive_json()
|
hello_message = await client.receive_json()
|
||||||
|
|
||||||
await client.send_json({ATTR_TYPE: WSType.AUTH, ATTR_ACCESS_TOKEN: token})
|
await client.send_json(
|
||||||
|
{ATTR_TYPE: WSType.AUTH, ATTR_ACCESS_TOKEN: token}, dumps=json_dumps
|
||||||
|
)
|
||||||
|
|
||||||
auth_ok_message = await client.receive_json()
|
auth_ok_message = await client.receive_json()
|
||||||
|
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
"""AppArmor control for host."""
|
"""AppArmor control for host."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from contextlib import suppress
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -62,10 +63,8 @@ class AppArmorControl(CoreSysAttributes):
|
|||||||
# Load profiles
|
# Load profiles
|
||||||
if self.available:
|
if self.available:
|
||||||
for profile_name in self._profiles:
|
for profile_name in self._profiles:
|
||||||
try:
|
with suppress(HostAppArmorError):
|
||||||
await self._load_profile(profile_name)
|
await self._load_profile(profile_name)
|
||||||
except HostAppArmorError:
|
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
_LOGGER.warning("AppArmor is not enabled on host")
|
_LOGGER.warning("AppArmor is not enabled on host")
|
||||||
|
|
||||||
|
@@ -62,3 +62,10 @@ class LogFormat(StrEnum):
|
|||||||
JOURNAL = "application/vnd.fdo.journal"
|
JOURNAL = "application/vnd.fdo.journal"
|
||||||
JSON = "application/json"
|
JSON = "application/json"
|
||||||
TEXT = "text/plain"
|
TEXT = "text/plain"
|
||||||
|
|
||||||
|
|
||||||
|
class LogFormatter(StrEnum):
|
||||||
|
"""Log formatter."""
|
||||||
|
|
||||||
|
PLAIN = "plain"
|
||||||
|
VERBOSE = "verbose"
|
||||||
|
@@ -129,6 +129,11 @@ class InfoCenter(CoreSysAttributes):
|
|||||||
self.coresys.config.path_supervisor
|
self.coresys.config.path_supervisor
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def virtualization(self) -> str | None:
|
||||||
|
"""Return virtualization hypervisor being used."""
|
||||||
|
return self.sys_dbus.systemd.virtualization
|
||||||
|
|
||||||
async def get_dmesg(self) -> bytes:
|
async def get_dmesg(self) -> bytes:
|
||||||
"""Return host dmesg output."""
|
"""Return host dmesg output."""
|
||||||
proc = await asyncio.create_subprocess_shell(
|
proc = await asyncio.create_subprocess_shell(
|
||||||
|
@@ -7,12 +7,18 @@ import logging
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from aiohttp import ClientError, ClientSession, ClientTimeout
|
from aiohttp import ClientError, ClientSession, ClientTimeout
|
||||||
|
from aiohttp.client_exceptions import UnixClientConnectorError
|
||||||
from aiohttp.client_reqrep import ClientResponse
|
from aiohttp.client_reqrep import ClientResponse
|
||||||
from aiohttp.connector import UnixConnector
|
from aiohttp.connector import UnixConnector
|
||||||
from aiohttp.hdrs import ACCEPT, RANGE
|
from aiohttp.hdrs import ACCEPT, RANGE
|
||||||
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import ConfigurationFileError, HostLogError, HostNotSupportedError
|
from ..exceptions import (
|
||||||
|
ConfigurationFileError,
|
||||||
|
HostLogError,
|
||||||
|
HostNotSupportedError,
|
||||||
|
HostServiceError,
|
||||||
|
)
|
||||||
from ..utils.json import read_json_file
|
from ..utils.json import read_json_file
|
||||||
from .const import PARAM_BOOT_ID, PARAM_SYSLOG_IDENTIFIER, LogFormat
|
from .const import PARAM_BOOT_ID, PARAM_SYSLOG_IDENTIFIER, LogFormat
|
||||||
|
|
||||||
@@ -69,8 +75,7 @@ class LogsControl(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def get_boot_id(self, offset: int = 0) -> str:
|
async def get_boot_id(self, offset: int = 0) -> str:
|
||||||
"""
|
"""Get ID of a boot by offset.
|
||||||
Get ID of a boot by offset.
|
|
||||||
|
|
||||||
Current boot is offset = 0, negative numbers go that many in the past.
|
Current boot is offset = 0, negative numbers go that many in the past.
|
||||||
Positive numbers count up from the oldest boot.
|
Positive numbers count up from the oldest boot.
|
||||||
@@ -139,16 +144,21 @@ class LogsControl(CoreSysAttributes):
|
|||||||
"No systemd-journal-gatewayd Unix socket available", _LOGGER.error
|
"No systemd-journal-gatewayd Unix socket available", _LOGGER.error
|
||||||
)
|
)
|
||||||
|
|
||||||
async with ClientSession(
|
try:
|
||||||
connector=UnixConnector(path="/run/systemd-journal-gatewayd.sock")
|
async with ClientSession(
|
||||||
) as session:
|
connector=UnixConnector(path=str(SYSTEMD_JOURNAL_GATEWAYD_SOCKET))
|
||||||
headers = {ACCEPT: accept}
|
) as session:
|
||||||
if range_header:
|
headers = {ACCEPT: accept}
|
||||||
headers[RANGE] = range_header
|
if range_header:
|
||||||
async with session.get(
|
headers[RANGE] = range_header
|
||||||
f"http://localhost{path}",
|
async with session.get(
|
||||||
headers=headers,
|
f"http://localhost{path}",
|
||||||
params=params or {},
|
headers=headers,
|
||||||
timeout=timeout,
|
params=params or {},
|
||||||
) as client_response:
|
timeout=timeout,
|
||||||
yield client_response
|
) as client_response:
|
||||||
|
yield client_response
|
||||||
|
except UnixClientConnectorError as ex:
|
||||||
|
raise HostServiceError(
|
||||||
|
"Unable to connect to systemd-journal-gatewayd", _LOGGER.error
|
||||||
|
) from ex
|
||||||
|
@@ -127,6 +127,7 @@ class HostManager(CoreSysAttributes):
|
|||||||
async def reload(self):
|
async def reload(self):
|
||||||
"""Reload host functions."""
|
"""Reload host functions."""
|
||||||
await self.info.update()
|
await self.info.update()
|
||||||
|
await self.sys_os.reload()
|
||||||
|
|
||||||
if self.sys_dbus.systemd.is_connected:
|
if self.sys_dbus.systemd.is_connected:
|
||||||
await self.services.update()
|
await self.services.update()
|
||||||
|
@@ -155,11 +155,10 @@ class SoundControl(CoreSysAttributes):
|
|||||||
stream = pulse.source_output_info(index)
|
stream = pulse.source_output_info(index)
|
||||||
else:
|
else:
|
||||||
stream = pulse.source_info(index)
|
stream = pulse.source_info(index)
|
||||||
|
elif application:
|
||||||
|
stream = pulse.sink_input_info(index)
|
||||||
else:
|
else:
|
||||||
if application:
|
stream = pulse.sink_info(index)
|
||||||
stream = pulse.sink_input_info(index)
|
|
||||||
else:
|
|
||||||
stream = pulse.sink_info(index)
|
|
||||||
|
|
||||||
# Set volume
|
# Set volume
|
||||||
pulse.volume_set_all_chans(stream, volume)
|
pulse.volume_set_all_chans(stream, volume)
|
||||||
@@ -190,11 +189,10 @@ class SoundControl(CoreSysAttributes):
|
|||||||
stream = pulse.source_output_info(index)
|
stream = pulse.source_output_info(index)
|
||||||
else:
|
else:
|
||||||
stream = pulse.source_info(index)
|
stream = pulse.source_info(index)
|
||||||
|
elif application:
|
||||||
|
stream = pulse.sink_input_info(index)
|
||||||
else:
|
else:
|
||||||
if application:
|
stream = pulse.sink_info(index)
|
||||||
stream = pulse.sink_input_info(index)
|
|
||||||
else:
|
|
||||||
stream = pulse.sink_info(index)
|
|
||||||
|
|
||||||
# Mute stream
|
# Mute stream
|
||||||
pulse.mute(stream, mute)
|
pulse.mute(stream, mute)
|
||||||
|
@@ -1,7 +1,11 @@
|
|||||||
"""Supervisor job manager."""
|
"""Supervisor job manager."""
|
||||||
from collections.abc import Callable
|
|
||||||
|
import asyncio
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from contextvars import Context, ContextVar, Token
|
from contextvars import Context, ContextVar, Token
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from uuid import UUID, uuid4
|
from uuid import UUID, uuid4
|
||||||
@@ -10,8 +14,9 @@ from attrs import Attribute, define, field
|
|||||||
from attrs.setters import convert as attr_convert, frozen, validate as attr_validate
|
from attrs.setters import convert as attr_convert, frozen, validate as attr_validate
|
||||||
from attrs.validators import ge, le
|
from attrs.validators import ge, le
|
||||||
|
|
||||||
|
from ..const import BusEvent
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import JobNotFound, JobStartException
|
from ..exceptions import HassioError, JobNotFound, JobStartException
|
||||||
from ..homeassistant.const import WSEvent
|
from ..homeassistant.const import WSEvent
|
||||||
from ..utils.common import FileConfiguration
|
from ..utils.common import FileConfiguration
|
||||||
from ..utils.sentry import capture_exception
|
from ..utils.sentry import capture_exception
|
||||||
@@ -27,6 +32,14 @@ _CURRENT_JOB: ContextVar[UUID] = ContextVar("current_job")
|
|||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class JobSchedulerOptions:
|
||||||
|
"""Options for scheduling a job."""
|
||||||
|
|
||||||
|
start_at: datetime | None = None
|
||||||
|
delayed_start: float = 0 # Ignored if start_at is set
|
||||||
|
|
||||||
|
|
||||||
def _remove_current_job(context: Context) -> Context:
|
def _remove_current_job(context: Context) -> Context:
|
||||||
"""Remove the current job from the context."""
|
"""Remove the current job from the context."""
|
||||||
context.run(_CURRENT_JOB.set, None)
|
context.run(_CURRENT_JOB.set, None)
|
||||||
@@ -48,11 +61,29 @@ def _on_change(instance: "SupervisorJob", attribute: Attribute, value: Any) -> A
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _invalid_if_started(instance: "SupervisorJob", *_) -> None:
|
||||||
|
"""Validate that job has not been started."""
|
||||||
|
if instance.done is not None:
|
||||||
|
raise ValueError("Field cannot be updated once job has started")
|
||||||
|
|
||||||
|
|
||||||
|
@define
|
||||||
|
class SupervisorJobError:
|
||||||
|
"""Representation of an error occurring during a supervisor job."""
|
||||||
|
|
||||||
|
type_: type[HassioError] = HassioError
|
||||||
|
message: str = "Unknown error, see supervisor logs"
|
||||||
|
|
||||||
|
def as_dict(self) -> dict[str, str]:
|
||||||
|
"""Return dictionary representation."""
|
||||||
|
return {"type": self.type_.__name__, "message": self.message}
|
||||||
|
|
||||||
|
|
||||||
@define
|
@define
|
||||||
class SupervisorJob:
|
class SupervisorJob:
|
||||||
"""Representation of a job running in supervisor."""
|
"""Representation of a job running in supervisor."""
|
||||||
|
|
||||||
name: str = field(on_setattr=frozen)
|
name: str | None = field(default=None, validator=[_invalid_if_started])
|
||||||
reference: str | None = field(default=None, on_setattr=_on_change)
|
reference: str | None = field(default=None, on_setattr=_on_change)
|
||||||
progress: float = field(
|
progress: float = field(
|
||||||
default=0,
|
default=0,
|
||||||
@@ -65,13 +96,17 @@ class SupervisorJob:
|
|||||||
)
|
)
|
||||||
uuid: UUID = field(init=False, factory=lambda: uuid4().hex, on_setattr=frozen)
|
uuid: UUID = field(init=False, factory=lambda: uuid4().hex, on_setattr=frozen)
|
||||||
parent_id: UUID | None = field(
|
parent_id: UUID | None = field(
|
||||||
init=False, factory=lambda: _CURRENT_JOB.get(None), on_setattr=frozen
|
factory=lambda: _CURRENT_JOB.get(None), on_setattr=frozen
|
||||||
)
|
)
|
||||||
done: bool | None = field(init=False, default=None, on_setattr=_on_change)
|
done: bool | None = field(init=False, default=None, on_setattr=_on_change)
|
||||||
on_change: Callable[["SupervisorJob", Attribute, Any], None] | None = field(
|
on_change: Callable[["SupervisorJob", Attribute, Any], None] | None = field(
|
||||||
default=None, on_setattr=frozen
|
default=None, on_setattr=frozen
|
||||||
)
|
)
|
||||||
internal: bool = field(default=False, on_setattr=frozen)
|
internal: bool = field(default=False)
|
||||||
|
errors: list[SupervisorJobError] = field(
|
||||||
|
init=False, factory=list, on_setattr=_on_change
|
||||||
|
)
|
||||||
|
release_event: asyncio.Event | None = None
|
||||||
|
|
||||||
def as_dict(self) -> dict[str, Any]:
|
def as_dict(self) -> dict[str, Any]:
|
||||||
"""Return dictionary representation."""
|
"""Return dictionary representation."""
|
||||||
@@ -83,8 +118,17 @@ class SupervisorJob:
|
|||||||
"stage": self.stage,
|
"stage": self.stage,
|
||||||
"done": self.done,
|
"done": self.done,
|
||||||
"parent_id": self.parent_id,
|
"parent_id": self.parent_id,
|
||||||
|
"errors": [err.as_dict() for err in self.errors],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def capture_error(self, err: HassioError | None = None) -> None:
|
||||||
|
"""Capture an error or record that an unknown error has occurred."""
|
||||||
|
if err:
|
||||||
|
new_error = SupervisorJobError(type(err), str(err))
|
||||||
|
else:
|
||||||
|
new_error = SupervisorJobError()
|
||||||
|
self.errors += [new_error]
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Start the job in the current task.
|
"""Start the job in the current task.
|
||||||
@@ -156,17 +200,27 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
|||||||
def _notify_on_job_change(
|
def _notify_on_job_change(
|
||||||
self, job: SupervisorJob, attribute: Attribute, value: Any
|
self, job: SupervisorJob, attribute: Attribute, value: Any
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Notify Home Assistant of a change to a job."""
|
"""Notify Home Assistant of a change to a job and bus on job start/end."""
|
||||||
|
if attribute.name == "errors":
|
||||||
|
value = [err.as_dict() for err in value]
|
||||||
|
|
||||||
self.sys_homeassistant.websocket.supervisor_event(
|
self.sys_homeassistant.websocket.supervisor_event(
|
||||||
WSEvent.JOB, job.as_dict() | {attribute.alias: value}
|
WSEvent.JOB, job.as_dict() | {attribute.name: value}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if attribute.name == "done":
|
||||||
|
if value is False:
|
||||||
|
self.sys_bus.fire_event(BusEvent.SUPERVISOR_JOB_START, job.uuid)
|
||||||
|
if value is True:
|
||||||
|
self.sys_bus.fire_event(BusEvent.SUPERVISOR_JOB_END, job.uuid)
|
||||||
|
|
||||||
def new_job(
|
def new_job(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str | None = None,
|
||||||
reference: str | None = None,
|
reference: str | None = None,
|
||||||
initial_stage: str | None = None,
|
initial_stage: str | None = None,
|
||||||
internal: bool = False,
|
internal: bool = False,
|
||||||
|
no_parent: bool = False,
|
||||||
) -> SupervisorJob:
|
) -> SupervisorJob:
|
||||||
"""Create a new job."""
|
"""Create a new job."""
|
||||||
job = SupervisorJob(
|
job = SupervisorJob(
|
||||||
@@ -175,6 +229,7 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
|||||||
stage=initial_stage,
|
stage=initial_stage,
|
||||||
on_change=None if internal else self._notify_on_job_change,
|
on_change=None if internal else self._notify_on_job_change,
|
||||||
internal=internal,
|
internal=internal,
|
||||||
|
**({"parent_id": None} if no_parent else {}),
|
||||||
)
|
)
|
||||||
self._jobs[job.uuid] = job
|
self._jobs[job.uuid] = job
|
||||||
return job
|
return job
|
||||||
@@ -194,3 +249,30 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
|||||||
_LOGGER.warning("Removing incomplete job %s from job manager", job.name)
|
_LOGGER.warning("Removing incomplete job %s from job manager", job.name)
|
||||||
|
|
||||||
del self._jobs[job.uuid]
|
del self._jobs[job.uuid]
|
||||||
|
|
||||||
|
# Clean up any completed sub jobs of this one
|
||||||
|
for sub_job in self.jobs:
|
||||||
|
if sub_job.parent_id == job.uuid and job.done:
|
||||||
|
self.remove_job(sub_job)
|
||||||
|
|
||||||
|
def schedule_job(
|
||||||
|
self,
|
||||||
|
job_method: Callable[..., Awaitable[Any]],
|
||||||
|
options: JobSchedulerOptions,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
) -> tuple[SupervisorJob, asyncio.Task | asyncio.TimerHandle]:
|
||||||
|
"""Schedule a job to run later and return job and task or timer handle."""
|
||||||
|
job = self.new_job(no_parent=True)
|
||||||
|
|
||||||
|
def _wrap_task() -> asyncio.Task:
|
||||||
|
return self.sys_create_task(
|
||||||
|
job_method(*args, _job__use_existing=job, **kwargs)
|
||||||
|
)
|
||||||
|
|
||||||
|
if options.start_at:
|
||||||
|
return (job, self.sys_call_at(options.start_at, _wrap_task))
|
||||||
|
if options.delayed_start:
|
||||||
|
return (job, self.sys_call_later(options.delayed_start, _wrap_task))
|
||||||
|
|
||||||
|
return (job, _wrap_task())
|
||||||
|
@@ -9,6 +9,7 @@ FILE_CONFIG_JOBS = Path(SUPERVISOR_DATA, "jobs.json")
|
|||||||
ATTR_IGNORE_CONDITIONS = "ignore_conditions"
|
ATTR_IGNORE_CONDITIONS = "ignore_conditions"
|
||||||
|
|
||||||
JOB_GROUP_ADDON = "addon_{slug}"
|
JOB_GROUP_ADDON = "addon_{slug}"
|
||||||
|
JOB_GROUP_BACKUP = "backup_{slug}"
|
||||||
JOB_GROUP_BACKUP_MANAGER = "backup_manager"
|
JOB_GROUP_BACKUP_MANAGER = "backup_manager"
|
||||||
JOB_GROUP_DOCKER_INTERFACE = "container_{name}"
|
JOB_GROUP_DOCKER_INTERFACE = "container_{name}"
|
||||||
JOB_GROUP_HOME_ASSISTANT_CORE = "home_assistant_core"
|
JOB_GROUP_HOME_ASSISTANT_CORE = "home_assistant_core"
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
"""Job decorator."""
|
"""Job decorator."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
from contextlib import suppress
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
import logging
|
import logging
|
||||||
@@ -17,6 +18,7 @@ from ..exceptions import (
|
|||||||
from ..host.const import HostFeature
|
from ..host.const import HostFeature
|
||||||
from ..resolution.const import MINIMUM_FREE_SPACE_THRESHOLD, ContextType, IssueType
|
from ..resolution.const import MINIMUM_FREE_SPACE_THRESHOLD, ContextType, IssueType
|
||||||
from ..utils.sentry import capture_exception
|
from ..utils.sentry import capture_exception
|
||||||
|
from . import SupervisorJob
|
||||||
from .const import JobCondition, JobExecutionLimit
|
from .const import JobCondition, JobExecutionLimit
|
||||||
from .job_group import JobGroup
|
from .job_group import JobGroup
|
||||||
|
|
||||||
@@ -145,10 +147,8 @@ class Job(CoreSysAttributes):
|
|||||||
def _post_init(self, obj: JobGroup | CoreSysAttributes) -> JobGroup | None:
|
def _post_init(self, obj: JobGroup | CoreSysAttributes) -> JobGroup | None:
|
||||||
"""Runtime init."""
|
"""Runtime init."""
|
||||||
# Coresys
|
# Coresys
|
||||||
try:
|
with suppress(AttributeError):
|
||||||
self.coresys = obj.coresys
|
self.coresys = obj.coresys
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
if not self.coresys:
|
if not self.coresys:
|
||||||
raise RuntimeError(f"Job on {self.name} need to be an coresys object!")
|
raise RuntimeError(f"Job on {self.name} need to be an coresys object!")
|
||||||
|
|
||||||
@@ -157,22 +157,23 @@ class Job(CoreSysAttributes):
|
|||||||
self._lock = asyncio.Semaphore()
|
self._lock = asyncio.Semaphore()
|
||||||
|
|
||||||
# Job groups
|
# Job groups
|
||||||
if self.limit in (
|
try:
|
||||||
|
is_job_group = obj.acquire and obj.release
|
||||||
|
except AttributeError:
|
||||||
|
is_job_group = False
|
||||||
|
|
||||||
|
if not is_job_group and self.limit in (
|
||||||
JobExecutionLimit.GROUP_ONCE,
|
JobExecutionLimit.GROUP_ONCE,
|
||||||
JobExecutionLimit.GROUP_WAIT,
|
JobExecutionLimit.GROUP_WAIT,
|
||||||
JobExecutionLimit.GROUP_THROTTLE,
|
JobExecutionLimit.GROUP_THROTTLE,
|
||||||
JobExecutionLimit.GROUP_THROTTLE_WAIT,
|
JobExecutionLimit.GROUP_THROTTLE_WAIT,
|
||||||
JobExecutionLimit.GROUP_THROTTLE_RATE_LIMIT,
|
JobExecutionLimit.GROUP_THROTTLE_RATE_LIMIT,
|
||||||
):
|
):
|
||||||
try:
|
raise RuntimeError(
|
||||||
_ = obj.acquire and obj.release
|
f"Job on {self.name} need to be a JobGroup to use group based limits!"
|
||||||
except AttributeError:
|
) from None
|
||||||
raise RuntimeError(
|
|
||||||
f"Job on {self.name} need to be a JobGroup to use group based limits!"
|
|
||||||
) from None
|
|
||||||
|
|
||||||
return obj
|
return obj if is_job_group else None
|
||||||
return None
|
|
||||||
|
|
||||||
def _handle_job_condition_exception(self, err: JobConditionException) -> None:
|
def _handle_job_condition_exception(self, err: JobConditionException) -> None:
|
||||||
"""Handle a job condition failure."""
|
"""Handle a job condition failure."""
|
||||||
@@ -187,7 +188,13 @@ class Job(CoreSysAttributes):
|
|||||||
self._method = method
|
self._method = method
|
||||||
|
|
||||||
@wraps(method)
|
@wraps(method)
|
||||||
async def wrapper(obj: JobGroup | CoreSysAttributes, *args, **kwargs) -> Any:
|
async def wrapper(
|
||||||
|
obj: JobGroup | CoreSysAttributes,
|
||||||
|
*args,
|
||||||
|
_job__use_existing: SupervisorJob | None = None,
|
||||||
|
_job_override__cleanup: bool | None = None,
|
||||||
|
**kwargs,
|
||||||
|
) -> Any:
|
||||||
"""Wrap the method.
|
"""Wrap the method.
|
||||||
|
|
||||||
This method must be on an instance of CoreSysAttributes. If a JOB_GROUP limit
|
This method must be on an instance of CoreSysAttributes. If a JOB_GROUP limit
|
||||||
@@ -195,11 +202,18 @@ class Job(CoreSysAttributes):
|
|||||||
"""
|
"""
|
||||||
job_group = self._post_init(obj)
|
job_group = self._post_init(obj)
|
||||||
group_name: str | None = job_group.group_name if job_group else None
|
group_name: str | None = job_group.group_name if job_group else None
|
||||||
job = self.sys_jobs.new_job(
|
if _job__use_existing:
|
||||||
self.name,
|
job = _job__use_existing
|
||||||
job_group.job_reference if job_group else None,
|
job.name = self.name
|
||||||
internal=self._internal,
|
job.internal = self._internal
|
||||||
)
|
if job_group:
|
||||||
|
job.reference = job_group.job_reference
|
||||||
|
else:
|
||||||
|
job = self.sys_jobs.new_job(
|
||||||
|
self.name,
|
||||||
|
job_group.job_reference if job_group else None,
|
||||||
|
internal=self._internal,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Handle condition
|
# Handle condition
|
||||||
@@ -293,9 +307,11 @@ class Job(CoreSysAttributes):
|
|||||||
except JobConditionException as err:
|
except JobConditionException as err:
|
||||||
return self._handle_job_condition_exception(err)
|
return self._handle_job_condition_exception(err)
|
||||||
except HassioError as err:
|
except HassioError as err:
|
||||||
|
job.capture_error(err)
|
||||||
raise err
|
raise err
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
_LOGGER.exception("Unhandled exception: %s", err)
|
_LOGGER.exception("Unhandled exception: %s", err)
|
||||||
|
job.capture_error()
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
raise JobException() from err
|
raise JobException() from err
|
||||||
finally:
|
finally:
|
||||||
@@ -308,7 +324,12 @@ class Job(CoreSysAttributes):
|
|||||||
|
|
||||||
# Jobs that weren't started are always cleaned up. Also clean up done jobs if required
|
# Jobs that weren't started are always cleaned up. Also clean up done jobs if required
|
||||||
finally:
|
finally:
|
||||||
if job.done is None or self.cleanup:
|
if (
|
||||||
|
job.done is None
|
||||||
|
or _job_override__cleanup
|
||||||
|
or _job_override__cleanup is None
|
||||||
|
and self.cleanup
|
||||||
|
):
|
||||||
self.sys_jobs.remove_job(job)
|
self.sys_jobs.remove_job(job)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
@@ -2,9 +2,9 @@
|
|||||||
|
|
||||||
from asyncio import Lock
|
from asyncio import Lock
|
||||||
|
|
||||||
from . import SupervisorJob
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import JobException, JobGroupExecutionLimitExceeded
|
from ..exceptions import JobException, JobGroupExecutionLimitExceeded
|
||||||
|
from . import SupervisorJob
|
||||||
|
|
||||||
|
|
||||||
class JobGroup(CoreSysAttributes):
|
class JobGroup(CoreSysAttributes):
|
||||||
|
@@ -5,7 +5,6 @@ from datetime import date, datetime, time, timedelta
|
|||||||
import logging
|
import logging
|
||||||
from uuid import UUID, uuid4
|
from uuid import UUID, uuid4
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
from ..const import CoreState
|
from ..const import CoreState
|
||||||
@@ -74,7 +73,7 @@ class Scheduler(CoreSysAttributes):
|
|||||||
def _schedule_task(self, task: _Task) -> None:
|
def _schedule_task(self, task: _Task) -> None:
|
||||||
"""Schedule a task on loop."""
|
"""Schedule a task on loop."""
|
||||||
if isinstance(task.interval, (int, float)):
|
if isinstance(task.interval, (int, float)):
|
||||||
task.next = self.sys_loop.call_later(task.interval, self._run_task, task)
|
task.next = self.sys_call_later(task.interval, self._run_task, task)
|
||||||
elif isinstance(task.interval, time):
|
elif isinstance(task.interval, time):
|
||||||
today = datetime.combine(date.today(), task.interval)
|
today = datetime.combine(date.today(), task.interval)
|
||||||
tomorrow = datetime.combine(date.today() + timedelta(days=1), task.interval)
|
tomorrow = datetime.combine(date.today() + timedelta(days=1), task.interval)
|
||||||
@@ -85,7 +84,7 @@ class Scheduler(CoreSysAttributes):
|
|||||||
else:
|
else:
|
||||||
calc = tomorrow
|
calc = tomorrow
|
||||||
|
|
||||||
task.next = self.sys_loop.call_at(calc.timestamp(), self._run_task, task)
|
task.next = self.sys_call_at(calc, self._run_task, task)
|
||||||
else:
|
else:
|
||||||
_LOGGER.critical(
|
_LOGGER.critical(
|
||||||
"Unknown interval %s (type: %s) for scheduler %s",
|
"Unknown interval %s (type: %s) for scheduler %s",
|
||||||
@@ -113,7 +112,7 @@ class Scheduler(CoreSysAttributes):
|
|||||||
|
|
||||||
# Wait until all are shutdown
|
# Wait until all are shutdown
|
||||||
try:
|
try:
|
||||||
async with async_timeout.timeout(timeout):
|
async with asyncio.timeout(timeout):
|
||||||
await asyncio.wait(running)
|
await asyncio.wait(running)
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.error("Timeout while waiting for jobs shutdown")
|
_LOGGER.error("Timeout while waiting for jobs shutdown")
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user