Compare commits

..

2 Commits

Author SHA1 Message Date
J. Nick Koston
af3256e41e Significantly speed up creating backups with isal via zlib-fast
isal is a drop in replacement for zlib with the
cavet that the compression level mappings are different.
zlib-fast is a tiny piece of middleware to convert
the standard zlib compression levels to isal compression
levels to allow for drop-in replacement

https://github.com/bdraco/zlib-fast/releases/tag/v0.1.0
https://github.com/pycompression/python-isal

Compression for backups is ~5x faster than the baseline

https://github.com/powturbo/TurboBench/issues/43
2024-01-27 13:06:41 -10:00
J. Nick Koston
a163121ad4 Fix dirhash failing to import pkg_resources
dirhash needs pkg_resources which is provided by setuptools

https://github.com/home-assistant/supervisor/actions/runs/7513346221/job/20454994962
2024-01-14 00:02:12 -10:00
4329 changed files with 506165 additions and 20053 deletions

View File

@@ -1,51 +1,38 @@
{ {
"name": "Supervisor dev", "name": "Supervisor dev",
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor", "image": "ghcr.io/home-assistant/devcontainer:supervisor",
"containerEnv": { "containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}" "WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
}, },
"remoteEnv": {
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
},
"appPort": ["9123:8123", "7357:4357"], "appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_setup", "postCreateCommand": "bash devcontainer_bootstrap",
"postStartCommand": "bash devcontainer_bootstrap",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"], "runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": { "customizations": {
"vscode": { "vscode": {
"extensions": [ "extensions": [
"charliermarsh.ruff", "ms-python.python",
"ms-python.pylint", "ms-python.pylint",
"ms-python.vscode-pylance", "ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode", "visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml", "esbenp.prettier-vscode"
"esbenp.prettier-vscode",
"GitHub.vscode-pull-request-github"
], ],
"settings": { "settings": {
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": { "terminal.integrated.profiles.linux": {
"zsh": { "zsh": {
"path": "/usr/bin/zsh" "path": "/usr/bin/zsh"
} }
}, },
"terminal.integrated.defaultProfile.linux": "zsh", "terminal.integrated.defaultProfile.linux": "zsh",
"[python]": { "editor.formatOnPaste": false,
"editor.defaultFormatter": "charliermarsh.ruff" "editor.formatOnSave": true,
} "editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"python.formatting.provider": "black",
"python.formatting.blackArgs": ["--target-version", "py312"],
"python.formatting.blackPath": "/usr/local/bin/black"
} }
} }
}, },
"mounts": [ "mounts": ["type=volume,target=/var/lib/docker"]
"type=volume,target=/var/lib/docker",
"type=volume,target=/mnt/supervisor"
]
} }

View File

@@ -38,7 +38,6 @@
- This PR is related to issue: - This PR is related to issue:
- Link to documentation pull request: - Link to documentation pull request:
- Link to cli pull request: - Link to cli pull request:
- Link to client library pull request:
## Checklist ## Checklist
@@ -53,14 +52,12 @@
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass** - [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
- [ ] There is no commented out code in this PR. - [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist] - [ ] I have followed the [development checklist][dev-checklist]
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`) - [ ] The code has been formatted using Black (`black --fast supervisor tests`)
- [ ] Tests have been added to verify that the new code works. - [ ] Tests have been added to verify that the new code works.
If API endpoints or add-on configuration are added/changed: If API endpoints of add-on configuration are added/changed:
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository] - [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
- [ ] [CLI][cli-repository] updated (if necessary)
- [ ] [Client library][client-library-repository] updated (if necessary)
<!-- <!--
Thank you for contributing <3 Thank you for contributing <3
@@ -70,5 +67,3 @@ If API endpoints or add-on configuration are added/changed:
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html [dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
[docs-repository]: https://github.com/home-assistant/developers.home-assistant [docs-repository]: https://github.com/home-assistant/developers.home-assistant
[cli-repository]: https://github.com/home-assistant/cli
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/

View File

@@ -33,7 +33,7 @@ on:
- setup.py - setup.py
env: env:
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.12"
BUILD_NAME: supervisor BUILD_NAME: supervisor
BUILD_TYPE: supervisor BUILD_TYPE: supervisor
@@ -53,7 +53,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }} requirements: ${{ steps.requirements.outputs.changed }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -92,7 +92,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -106,7 +106,7 @@ jobs:
- name: Build wheels - name: Build wheels
if: needs.init.outputs.requirements == 'true' if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2024.11.0 uses: home-assistant/wheels@2024.01.0
with: with:
abi: cp312 abi: cp312
tag: musllinux_1_2 tag: musllinux_1_2
@@ -125,15 +125,15 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.4.0 uses: actions/setup-python@v5.0.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign - name: Install Cosign
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.8.0 uses: sigstore/cosign-installer@v3.3.0
with: with:
cosign-release: "v2.4.0" cosign-release: "v2.0.2"
- name: Install dirhash and calc hash - name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
@@ -149,7 +149,7 @@ jobs:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.3.0 uses: docker/login-action@v3.0.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -160,7 +160,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor - name: Build supervisor
uses: home-assistant/builder@2024.08.2 uses: home-assistant/builder@2024.01.0
with: with:
args: | args: |
$BUILD_ARGS \ $BUILD_ARGS \
@@ -178,7 +178,7 @@ jobs:
steps: steps:
- name: Checkout the repository - name: Checkout the repository
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Initialize git - name: Initialize git
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
@@ -203,11 +203,11 @@ jobs:
timeout-minutes: 60 timeout-minutes: 60
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Build the Supervisor - name: Build the Supervisor
if: needs.init.outputs.publish != 'true' if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2024.08.2 uses: home-assistant/builder@2024.01.0
with: with:
args: | args: |
--test \ --test \

View File

@@ -8,7 +8,7 @@ on:
pull_request: ~ pull_request: ~
env: env:
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.12"
PRE_COMMIT_CACHE: ~/.cache/pre-commit PRE_COMMIT_CACHE: ~/.cache/pre-commit
concurrency: concurrency:
@@ -25,15 +25,15 @@ jobs:
name: Prepare Python dependencies name: Prepare Python dependencies
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Set up Python - name: Set up Python
id: python id: python
uses: actions/setup-python@v5.4.0 uses: actions/setup-python@v5.0.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with: with:
path: venv path: venv
key: | key: |
@@ -47,7 +47,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true lookup-only: true
@@ -61,21 +61,21 @@ jobs:
. venv/bin/activate . venv/bin/activate
pre-commit install-hooks pre-commit install-hooks
lint-ruff-format: lint-black:
name: Check ruff-format name: Check black
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0 uses: actions/setup-python@v5.0.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with: with:
path: venv path: venv
key: | key: |
@@ -85,67 +85,10 @@ jobs:
run: | run: |
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Run black
id: cache-precommit
uses: actions/cache@v4.2.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff-format
run: | run: |
. venv/bin/activate . venv/bin/activate
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure black --target-version py312 --check supervisor tests setup.py
env:
RUFF_OUTPUT_FORMAT: github
lint-ruff:
name: Check ruff
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.0
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-dockerfile: lint-dockerfile:
name: Check Dockerfile name: Check Dockerfile
@@ -153,7 +96,7 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Register hadolint problem matcher - name: Register hadolint problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json" echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -168,15 +111,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0 uses: actions/setup-python@v5.0.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with: with:
path: venv path: venv
key: | key: |
@@ -188,7 +131,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
key: | key: |
@@ -206,21 +149,53 @@ jobs:
. venv/bin/activate . venv/bin/activate
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
lint-json: lint-flake8:
name: Check JSON name: Check flake8
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0 uses: actions/setup-python@v5.0.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register flake8 problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/flake8.json"
- name: Run flake8
run: |
. venv/bin/activate
flake8 supervisor tests
lint-isort:
name: Check isort
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
with: with:
path: venv path: venv
key: | key: |
@@ -232,7 +207,48 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run isort
run: |
. venv/bin/activate
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
lint-json:
name: Check JSON
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
key: | key: |
@@ -256,15 +272,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0 uses: actions/setup-python@v5.0.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with: with:
path: venv path: venv
key: | key: |
@@ -274,10 +290,6 @@ jobs:
run: | run: |
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0
- name: Register pylint problem matcher - name: Register pylint problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/pylint.json" echo "::add-matcher::.github/workflows/matchers/pylint.json"
@@ -286,25 +298,66 @@ jobs:
. venv/bin/activate . venv/bin/activate
pylint supervisor tests pylint supervisor tests
lint-pyupgrade:
name: Check pyupgrade
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run pyupgrade
run: |
. venv/bin/activate
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
pytest: pytest:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: prepare needs: prepare
name: Run tests Python ${{ needs.prepare.outputs.python-version }} name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0 uses: actions/setup-python@v5.0.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign - name: Install Cosign
uses: sigstore/cosign-installer@v3.8.0 uses: sigstore/cosign-installer@v3.3.0
with: with:
cosign-release: "v2.4.0" cosign-release: "v2.0.2"
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with: with:
path: venv path: venv
key: | key: |
@@ -317,7 +370,7 @@ jobs:
- name: Install additional system dependencies - name: Install additional system dependencies
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
- name: Register Python problem matcher - name: Register Python problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/python.json" echo "::add-matcher::.github/workflows/matchers/python.json"
@@ -339,11 +392,10 @@ jobs:
-o console_output_style=count \ -o console_output_style=count \
tests tests
- name: Upload coverage artifact - name: Upload coverage artifact
uses: actions/upload-artifact@v4.6.0 uses: actions/upload-artifact@v4.0.0
with: with:
name: coverage-${{ matrix.python-version }} name: coverage-${{ matrix.python-version }}
path: .coverage path: .coverage
include-hidden-files: true
coverage: coverage:
name: Process test coverage name: Process test coverage
@@ -351,15 +403,15 @@ jobs:
needs: ["pytest", "prepare"] needs: ["pytest", "prepare"]
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.4.0 uses: actions/setup-python@v5.0.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.0 uses: actions/cache@v3.3.3
with: with:
path: venv path: venv
key: | key: |
@@ -370,7 +422,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.8 uses: actions/download-artifact@v4.1.1
- name: Combine coverage results - name: Combine coverage results
run: | run: |
. venv/bin/activate . venv/bin/activate
@@ -378,4 +430,4 @@ jobs:
coverage report coverage report
coverage xml coverage xml
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v5.3.1 uses: codecov/codecov-action@v3.1.4

30
.github/workflows/matchers/flake8.json vendored Normal file
View File

@@ -0,0 +1,30 @@
{
"problemMatcher": [
{
"owner": "flake8-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
},
{
"owner": "flake8-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
}
]
}

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter name: Release Drafter
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT" echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
- name: Run Release Drafter - name: Run Release Drafter
uses: release-drafter/release-drafter@v6.1.0 uses: release-drafter/release-drafter@v5.25.0
with: with:
tag: ${{ steps.version.outputs.version }} tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }} name: ${{ steps.version.outputs.version }}

View File

@@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.1.1
- name: Sentry Release - name: Sentry Release
uses: getsentry/action-release@v1.10.4 uses: getsentry/action-release@v1.6.0
env: env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }} SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -9,7 +9,7 @@ jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/stale@v9.1.0 - uses: actions/stale@v9.0.0
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30 days-before-stale: 30

View File

@@ -1,74 +0,0 @@
name: Update frontend
on:
schedule: # once a day
- cron: "0 0 * * *"
workflow_dispatch:
jobs:
check-version:
runs-on: ubuntu-latest
outputs:
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
latest_tag: ${{ steps.latest_frontend_version.outputs.latest_tag }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Get latest frontend release
id: latest_frontend_version
uses: abatilo/release-info-action@v1.3.3
with:
owner: home-assistant
repo: frontend
- name: Check if version is up to date
id: check_version
run: |
SUPERVISOR_VERSION=$(cat .ha-frontend-version)
LATEST_VERSION=${{ steps.latest_frontend_version.outputs.latest_tag }}
echo "SUPERVISOR_VERSION=$SUPERVISOR_VERSION" >> $GITHUB_ENV
echo "LATEST_VERSION=$LATEST_VERSION" >> $GITHUB_ENV
if [[ ! "$SUPERVISOR_VERSION" < "$LATEST_VERSION" ]]; then
echo "Frontend version is up to date"
echo "skip=true" >> $GITHUB_OUTPUT
fi
- name: Check if there is no open PR with this version
if: steps.check_version.outputs.skip != 'true'
id: check_existing_pr
env:
GH_TOKEN: ${{ github.token }}
run: |
PR=$(gh pr list --state open --base main --json title --search "Autoupdate frontend to version $LATEST_VERSION")
if [[ "$PR" != "[]" ]]; then
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
echo "skip=true" >> $GITHUB_OUTPUT
fi
create-pr:
runs-on: ubuntu-latest
needs: check-version
if: needs.check-version.outputs.skip != 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Clear www folder
run: |
rm -rf supervisor/api/panel/*
- name: Update version file
run: |
echo "${{ needs.check-version.outputs.latest_tag }}" > .ha-frontend-version
- name: Download release assets
uses: robinraju/release-downloader@v1
with:
repository: 'home-assistant/frontend'
tag: ${{ needs.check-version.outputs.latest_tag }}
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_tag }}.tar.gz
extract: true
out-file-path: supervisor/api/panel/
- name: Create PR
uses: peter-evans/create-pull-request@v7
with:
commit-message: "Autoupdate frontend to version ${{ needs.check-version.outputs.latest_tag }}"
branch: autoupdate-frontend
base: main
draft: true
sign-commits: true
title: "Autoupdate frontend to version ${{ needs.check-version.outputs.latest_tag }}"

4
.gitmodules vendored Normal file
View File

@@ -0,0 +1,4 @@
[submodule "home-assistant-polymer"]
path = home-assistant-polymer
url = https://github.com/home-assistant/home-assistant-polymer
branch = dev

View File

@@ -1 +0,0 @@
20250205.0

View File

@@ -1,15 +1,34 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/psf/black
rev: v0.9.1 rev: 23.12.1
hooks: hooks:
- id: ruff - id: black
args: args:
- --fix - --safe
- id: ruff-format - --quiet
- --target-version
- py312
files: ^((supervisor|tests)/.+)?[^/]+\.py$ files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
hooks:
- id: flake8
additional_dependencies:
- flake8-docstrings==1.7.0
- pydocstyle==6.3.0
files: ^(supervisor|script|tests)/.+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0 rev: v4.5.0
hooks: hooks:
- id: check-executables-have-shebangs - id: check-executables-have-shebangs
stages: [manual] stages: [manual]
- id: check-json - id: check-json
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
hooks:
- id: pyupgrade
args: [--py312-plus]

18
.vscode/tasks.json vendored
View File

@@ -58,23 +58,9 @@
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Ruff Check", "label": "Flake8",
"type": "shell", "type": "shell",
"command": "ruff check --fix supervisor tests", "command": "flake8 supervisor tests",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff Format",
"type": "shell",
"command": "ruff format supervisor tests",
"group": { "group": {
"kind": "test", "kind": "test",
"isDefault": true "isDefault": true

View File

@@ -4,8 +4,7 @@ FROM ${BUILD_FROM}
ENV \ ENV \
S6_SERVICES_GRACETIME=10000 \ S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost \ SUPERVISOR_API=http://localhost \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \ CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
UV_SYSTEM_PYTHON=true
ARG \ ARG \
COSIGN_VERSION \ COSIGN_VERSION \
@@ -27,17 +26,14 @@ RUN \
yaml \ yaml \
\ \
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \ && curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \ && chmod a+x /usr/bin/cosign
&& pip3 install uv==0.2.21
# Install requirements # Install requirements
COPY requirements.txt . COPY requirements.txt .
RUN \ RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \ export MAKEFLAGS="-j$(nproc)" \
linux32 uv pip install --no-build -r requirements.txt; \ && pip3 install --only-binary=:all: \
else \ -r ./requirements.txt \
uv pip install --no-build -r requirements.txt; \
fi \
&& rm -f requirements.txt && rm -f requirements.txt
# Install Home Assistant Supervisor # Install Home Assistant Supervisor

View File

@@ -30,5 +30,3 @@ Releases are done in 3 stages (channels) with this structure:
[development]: https://developers.home-assistant.io/docs/supervisor/development [development]: https://developers.home-assistant.io/docs/supervisor/development
[stable]: https://github.com/home-assistant/version/blob/master/stable.json [stable]: https://github.com/home-assistant/version/blob/master/stable.json
[![Home Assistant - A project from the Open Home Foundation](https://www.openhomefoundation.org/badges/home-assistant.png)](https://www.openhomefoundation.org/)

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from: build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.21 aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.18
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.21 armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.18
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.21 armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.18
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.21 amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.18
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.21 i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.18
codenotary: codenotary:
signer: notary@home-assistant.io signer: notary@home-assistant.io
base_image: notary@home-assistant.io base_image: notary@home-assistant.io
@@ -12,7 +12,7 @@ cosign:
base_identity: https://github.com/home-assistant/docker-base/.* base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.* identity: https://github.com/home-assistant/supervisor/.*
args: args:
COSIGN_VERSION: 2.4.0 COSIGN_VERSION: 2.0.2
labels: labels:
io.hass.type: supervisor io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor org.opencontainers.image.title: Home Assistant Supervisor

View File

@@ -1,5 +1,5 @@
[build-system] [build-system]
requires = ["setuptools~=75.8.0", "wheel~=0.45.0"] requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
[project] [project]
@@ -12,7 +12,7 @@ authors = [
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" }, { name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
] ]
keywords = ["docker", "home-assistant", "api"] keywords = ["docker", "home-assistant", "api"]
requires-python = ">=3.13.0" requires-python = ">=3.12.0"
[project.urls] [project.urls]
"Homepage" = "https://www.home-assistant.io/" "Homepage" = "https://www.home-assistant.io/"
@@ -31,7 +31,7 @@ include-package-data = true
include = ["supervisor*"] include = ["supervisor*"]
[tool.pylint.MAIN] [tool.pylint.MAIN]
py-version = "3.13" py-version = "3.11"
# Use a conservative default here; 2 should speed up most setups and not hurt # Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate. # any too bad. Override on command line as appropriate.
jobs = 2 jobs = 2
@@ -44,7 +44,7 @@ good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
[tool.pylint."MESSAGES CONTROL"] [tool.pylint."MESSAGES CONTROL"]
# Reasons disabled: # Reasons disabled:
# format - handled by ruff # format - handled by black
# abstract-method - with intro of async there are always methods missing # abstract-method - with intro of async there are always methods missing
# cyclic-import - doesn't test if both import on load # cyclic-import - doesn't test if both import on load
# duplicate-code - unavoidable # duplicate-code - unavoidable
@@ -71,136 +71,6 @@ disable = [
"too-many-statements", "too-many-statements",
"unused-argument", "unused-argument",
"consider-using-with", "consider-using-with",
# Handled by ruff
# Ref: <https://github.com/astral-sh/ruff/issues/970>
"await-outside-async", # PLE1142
"bad-str-strip-call", # PLE1310
"bad-string-format-type", # PLE1307
"bidirectional-unicode", # PLE2502
"continue-in-finally", # PLE0116
"duplicate-bases", # PLE0241
"format-needs-mapping", # F502
"function-redefined", # F811
# Needed because ruff does not understand type of __all__ generated by a function
# "invalid-all-format", # PLE0605
"invalid-all-object", # PLE0604
"invalid-character-backspace", # PLE2510
"invalid-character-esc", # PLE2513
"invalid-character-nul", # PLE2514
"invalid-character-sub", # PLE2512
"invalid-character-zero-width-space", # PLE2515
"logging-too-few-args", # PLE1206
"logging-too-many-args", # PLE1205
"missing-format-string-key", # F524
"mixed-format-string", # F506
"no-method-argument", # N805
"no-self-argument", # N805
"nonexistent-operator", # B002
"nonlocal-without-binding", # PLE0117
"not-in-loop", # F701, F702
"notimplemented-raised", # F901
"return-in-init", # PLE0101
"return-outside-function", # F706
"syntax-error", # E999
"too-few-format-args", # F524
"too-many-format-args", # F522
"too-many-star-expressions", # F622
"truncated-format-string", # F501
"undefined-all-variable", # F822
"undefined-variable", # F821
"used-prior-global-declaration", # PLE0118
"yield-inside-async-function", # PLE1700
"yield-outside-function", # F704
"anomalous-backslash-in-string", # W605
"assert-on-string-literal", # PLW0129
"assert-on-tuple", # F631
"bad-format-string", # W1302, F
"bad-format-string-key", # W1300, F
"bare-except", # E722
"binary-op-exception", # PLW0711
"cell-var-from-loop", # B023
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
"duplicate-except", # B014
"duplicate-key", # F601
"duplicate-string-formatting-argument", # F
"duplicate-value", # F
"eval-used", # PGH001
"exec-used", # S102
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
"f-string-without-interpolation", # F541
"forgotten-debug-statement", # T100
"format-string-without-interpolation", # F
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
"global-variable-not-assigned", # PLW0602
"implicit-str-concat", # ISC001
"import-self", # PLW0406
"inconsistent-quotes", # Q000
"invalid-envvar-default", # PLW1508
"keyword-arg-before-vararg", # B026
"logging-format-interpolation", # G
"logging-fstring-interpolation", # G
"logging-not-lazy", # G
"misplaced-future", # F404
"named-expr-without-context", # PLW0131
"nested-min-max", # PLW3301
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
"raise-missing-from", # TRY200
# "redefined-builtin", # A001, ruff is way more stricter, needs work
"try-except-raise", # TRY203
"unused-argument", # ARG001, we don't use it
"unused-format-string-argument", #F507
"unused-format-string-key", # F504
"unused-import", # F401
"unused-variable", # F841
"useless-else-on-loop", # PLW0120
"wildcard-import", # F403
"bad-classmethod-argument", # N804
"consider-iterating-dictionary", # SIM118
"empty-docstring", # D419
"invalid-name", # N815
"line-too-long", # E501, disabled globally
"missing-class-docstring", # D101
"missing-final-newline", # W292
"missing-function-docstring", # D103
"missing-module-docstring", # D100
"multiple-imports", #E401
"singleton-comparison", # E711, E712
"subprocess-run-check", # PLW1510
"superfluous-parens", # UP034
"ungrouped-imports", # I001
"unidiomatic-typecheck", # E721
"unnecessary-direct-lambda-call", # PLC3002
"unnecessary-lambda-assignment", # PLC3001
"unneeded-not", # SIM208
"useless-import-alias", # PLC0414
"wrong-import-order", # I001
"wrong-import-position", # E402
"comparison-of-constants", # PLR0133
"comparison-with-itself", # PLR0124
# "consider-alternative-union-syntax", # UP007, typing extension
"consider-merging-isinstance", # PLR1701
# "consider-using-alias", # UP006, typing extension
"consider-using-dict-comprehension", # C402
"consider-using-generator", # C417
"consider-using-get", # SIM401
"consider-using-set-comprehension", # C401
"consider-using-sys-exit", # PLR1722
"consider-using-ternary", # SIM108
"literal-comparison", # F632
"property-with-parameters", # PLR0206
"super-with-arguments", # UP008
"too-many-branches", # PLR0912
"too-many-return-statements", # PLR0911
"too-many-statements", # PLR0915
"trailing-comma-tuple", # COM818
"unnecessary-comprehension", # C416
"use-a-generator", # C417
"use-dict-literal", # C406
"use-list-literal", # C405
"useless-object-inheritance", # UP004
"useless-return", # PLR1711
# "no-self-use", # PLR6301 # Optional plugin, not enabled
] ]
[tool.pylint.REPORTS] [tool.pylint.REPORTS]
@@ -215,15 +85,11 @@ expected-line-ending-format = "LF"
[tool.pylint.EXCEPTIONS] [tool.pylint.EXCEPTIONS]
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pylint.DESIGN]
max-positional-arguments = 10
[tool.pytest.ini_options] [tool.pytest.ini_options]
testpaths = ["tests"] testpaths = ["tests"]
norecursedirs = [".git"] norecursedirs = [".git"]
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
log_date_format = "%Y-%m-%d %H:%M:%S" log_date_format = "%Y-%m-%d %H:%M:%S"
asyncio_default_fixture_loop_scope = "function"
asyncio_mode = "auto" asyncio_mode = "auto"
filterwarnings = [ filterwarnings = [
"error", "error",
@@ -231,144 +97,16 @@ filterwarnings = [
"ignore::pytest.PytestUnraisableExceptionWarning", "ignore::pytest.PytestUnraisableExceptionWarning",
] ]
[tool.ruff] [tool.isort]
lint.select = [ multi_line_output = 3
"B002", # Python does not support the unary prefix increment include_trailing_comma = true
"B007", # Loop control variable {name} not used within loop body force_grid_wrap = 0
"B014", # Exception handler with duplicate exception line_length = 88
"B023", # Function definition does not bind loop variable {name} indent = " "
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged force_sort_within_sections = true
"B904", # Use raise from to specify exception cause sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
"C", # complexity default_section = "THIRDPARTY"
"COM818", # Trailing comma on bare tuple prohibited forced_separate = "tests"
"D", # docstrings combine_as_imports = true
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow() use_parentheses = true
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) known_first_party = ["supervisor", "tests"]
"E", # pycodestyle
"F", # pyflakes/autoflake
"G", # flake8-logging-format
"I", # isort
"ICN001", # import concentions; {name} should be imported as {asname}
"N804", # First argument of a class method should be named cls
"N805", # First argument of a method should be named self
"N815", # Variable {name} in class scope should not be mixedCase
"PGH004", # Use specific rule codes when using noqa
"PLC0414", # Useless import alias. Import alias does not rename original package.
"PLC", # pylint
"PLE", # pylint
"PLR", # pylint
"PLW", # pylint
"Q000", # Double quotes found but single quotes preferred
"RUF006", # Store a reference to the return value of asyncio.create_task
"S102", # Use of exec detected
"S103", # bad-file-permissions
"S108", # hardcoded-temp-file
"S306", # suspicious-mktemp-usage
"S307", # suspicious-eval-usage
"S313", # suspicious-xmlc-element-tree-usage
"S314", # suspicious-xml-element-tree-usage
"S315", # suspicious-xml-expat-reader-usage
"S316", # suspicious-xml-expat-builder-usage
"S317", # suspicious-xml-sax-usage
"S318", # suspicious-xml-mini-dom-usage
"S319", # suspicious-xml-pull-dom-usage
"S320", # suspicious-xmle-tree-usage
"S601", # paramiko-call
"S602", # subprocess-popen-with-shell-equals-true
"S604", # call-with-shell-equals-true
"S608", # hardcoded-sql-expression
"S609", # unix-command-wildcard-injection
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
"SIM117", # Merge with-statements that use the same scope
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
"SIM201", # Use {left} != {right} instead of not {left} == {right}
"SIM208", # Use {expr} instead of not (not {expr})
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
"SIM401", # Use get from dict with default instead of an if block
"T100", # Trace found: {name} used
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY203", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade
"W", # pycodestyle
]
lint.ignore = [
"D202", # No blank lines allowed after function docstring
"D203", # 1 blank line required before class docstring
"D213", # Multi-line docstring summary should start at the second line
"D406", # Section name should end with a newline
"D407", # Section name underlining
"E501", # line too long
"E731", # do not assign a lambda expression, use a def
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
# be ignored anymore without warnings.
# https://github.com/astral-sh/ruff/issues/7491
# "PLC1901", # Lots of false positives
# False positives https://github.com/astral-sh/ruff/issues/5386
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
"PLR0911", # Too many return statements ({returns} > {max_returns})
"PLR0912", # Too many branches ({branches} > {max_branches})
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
"PLR0915", # Too many statements ({statements} > {max_statements})
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
"UP006", # keep type annotation style as is
"UP007", # keep type annotation style as is
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
# Disabled because ruff does not understand type of __all__ generated by a function
"PLE0605",
]
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
voluptuous = "vol"
[tool.ruff.lint.flake8-pytest-style]
fixture-parentheses = false
[tool.ruff.lint.flake8-tidy-imports.banned-api]
"pytz".msg = "use zoneinfo instead"
[tool.ruff.lint.isort]
force-sort-within-sections = true
section-order = [
"future",
"standard-library",
"third-party",
"first-party",
"local-folder",
]
forced-separate = ["tests"]
known-first-party = ["supervisor", "tests"]
combine-as-imports = true
split-on-trailing-comma = false
[tool.ruff.lint.per-file-ignores]
# DBus Service Mocks must use typing and names understood by dbus-fast
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
[tool.ruff.lint.mccabe]
max-complexity = 25

View File

@@ -1,29 +1,30 @@
aiodns==3.2.0 aiodns==3.1.1
aiohttp==3.11.12 aiohttp==3.9.1
aiohttp-fast-url-dispatcher==0.3.0
async_timeout==4.0.3
atomicwrites-homeassistant==1.4.1 atomicwrites-homeassistant==1.4.1
attrs==25.1.0 attrs==23.2.0
awesomeversion==24.6.0 awesomeversion==23.11.0
brotli==1.1.0 brotli==1.1.0
ciso8601==2.3.2 ciso8601==2.3.1
colorlog==6.9.0 colorlog==6.8.0
cpe==1.3.1 cpe==1.2.1
cryptography==44.0.0 cryptography==41.0.7
debugpy==1.8.12 debugpy==1.8.0
deepmerge==2.0 deepmerge==1.1.1
dirhash==0.5.0 dirhash==0.2.1
docker==7.1.0 docker==7.0.0
faust-cchardet==2.1.19 faust-cchardet==2.1.19
gitpython==3.1.44 gitpython==3.1.41
jinja2==3.1.5 jinja2==3.1.3
orjson==3.10.12 orjson==3.9.10
pulsectl==24.12.0 pulsectl==23.5.2
pyudev==0.24.3 pyudev==0.24.1
PyYAML==6.0.2 PyYAML==6.0.1
requests==2.32.3 securetar==2023.12.0
securetar==2025.1.4 sentry-sdk==1.39.2
sentry-sdk==2.20.0 setuptools==69.0.3
setuptools==75.8.0 voluptuous==0.14.1
voluptuous==0.15.2 dbus-fast==2.21.0
dbus-fast==2.33.0 typing_extensions==4.9.0
typing_extensions==4.12.2 zlib-fast==0.1.0
zlib-fast==0.2.0

View File

@@ -1,13 +1,16 @@
astroid==3.3.8 black==23.12.1
coverage==7.6.12 coverage==7.4.0
pre-commit==4.1.0 flake8-docstrings==1.7.0
pylint==3.3.4 flake8==7.0.0
pytest-aiohttp==1.1.0 pre-commit==3.6.0
pytest-asyncio==0.25.2 pydocstyle==6.3.0
pytest-cov==6.0.0 pylint==3.0.3
pytest-timeout==2.3.1 pytest-aiohttp==1.0.5
pytest==8.3.4 pytest-asyncio==0.23.3
ruff==0.9.6 pytest-cov==4.1.0
time-machine==2.16.0 pytest-timeout==2.2.0
typing_extensions==4.12.2 pytest==7.4.4
urllib3==2.3.0 pyupgrade==3.15.0
time-machine==2.13.0
typing_extensions==4.9.0
urllib3==2.1.0

30
scripts/update-frontend.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/bash
source "/etc/supervisor_scripts/common"
set -e
# Update frontend
git submodule update --init --recursive --remote
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
cd home-assistant-polymer
nvm install
script/bootstrap
# Download translations
start_docker
./script/translations_download
# build frontend
cd hassio
./script/build_hassio
# Copy frontend
rm -rf ../../supervisor/api/panel/*
cp -rf build/* ../../supervisor/api/panel/
# Reset frontend git
cd ..
git reset --hard HEAD
stop_docker

17
setup.cfg Normal file
View File

@@ -0,0 +1,17 @@
[flake8]
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
doctests = True
max-line-length = 88
# E501: line too long
# W503: Line break occurred before a binary operator
# E203: Whitespace before ':'
# D202 No blank lines allowed after function docstring
# W504 line break after binary operator
ignore =
E501,
W503,
E203,
D202,
W504
per-file-ignores =
tests/dbus_service_mocks/*.py: F821,F722

View File

@@ -1,5 +1,4 @@
"""Home Assistant Supervisor setup.""" """Home Assistant Supervisor setup."""
from pathlib import Path from pathlib import Path
import re import re
@@ -19,7 +18,7 @@ def _get_supervisor_version():
for line in CONSTANTS.split("/n"): for line in CONSTANTS.split("/n"):
if match := RE_SUPERVISOR_VERSION.match(line): if match := RE_SUPERVISOR_VERSION.match(line):
return match.group(1) return match.group(1)
return "9999.09.9.dev9999" return "99.9.9dev"
setup( setup(

View File

@@ -1,5 +1,4 @@
"""Main file for Supervisor.""" """Main file for Supervisor."""
import asyncio import asyncio
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
import logging import logging
@@ -11,10 +10,8 @@ import zlib_fast
# Enable fast zlib before importing supervisor # Enable fast zlib before importing supervisor
zlib_fast.enable() zlib_fast.enable()
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402 from supervisor import bootstrap # noqa: E402
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402 from supervisor.utils.logging import activate_log_queue_handler # noqa: E402
activate_log_queue_handler,
)
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@@ -1,12 +1,9 @@
"""Init file for Supervisor add-ons.""" """Init file for Supervisor add-ons."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
from contextlib import suppress from contextlib import suppress
from copy import deepcopy from copy import deepcopy
from datetime import datetime
import errno import errno
from functools import partial
from ipaddress import IPv4Address from ipaddress import IPv4Address
import logging import logging
from pathlib import Path, PurePath from pathlib import Path, PurePath
@@ -18,14 +15,11 @@ from tempfile import TemporaryDirectory
from typing import Any, Final from typing import Any, Final
import aiohttp import aiohttp
from awesomeversion import AwesomeVersionCompareException
from deepmerge import Merger from deepmerge import Merger
from securetar import atomic_contents_add, secure_path from securetar import atomic_contents_add, secure_path
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
from supervisor.utils.dt import utc_from_timestamp
from ..bus import EventListener from ..bus import EventListener
from ..const import ( from ..const import (
ATTR_ACCESS_TOKEN, ATTR_ACCESS_TOKEN,
@@ -48,17 +42,13 @@ from ..const import (
ATTR_SLUG, ATTR_SLUG,
ATTR_STATE, ATTR_STATE,
ATTR_SYSTEM, ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TYPE, ATTR_TYPE,
ATTR_USER, ATTR_USER,
ATTR_UUID, ATTR_UUID,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_TIMESTAMP,
ATTR_WATCHDOG, ATTR_WATCHDOG,
DNS_SUFFIX, DNS_SUFFIX,
AddonBoot, AddonBoot,
AddonBootConfig,
AddonStartup, AddonStartup,
AddonState, AddonState,
BusEvent, BusEvent,
@@ -82,8 +72,7 @@ from ..hardware.data import Device
from ..homeassistant.const import WSEvent, WSType from ..homeassistant.const import WSEvent, WSType
from ..jobs.const import JobExecutionLimit from ..jobs.const import JobExecutionLimit
from ..jobs.decorator import Job from ..jobs.decorator import Job
from ..resolution.const import ContextType, IssueType, UnhealthyReason from ..resolution.const import UnhealthyReason
from ..resolution.data import Issue
from ..store.addon import AddonStore from ..store.addon import AddonStore
from ..utils import check_port from ..utils import check_port
from ..utils.apparmor import adjust_profile from ..utils.apparmor import adjust_profile
@@ -146,27 +135,11 @@ class Addon(AddonModel):
self._listeners: list[EventListener] = [] self._listeners: list[EventListener] = []
self._startup_event = asyncio.Event() self._startup_event = asyncio.Event()
self._startup_task: asyncio.Task | None = None self._startup_task: asyncio.Task | None = None
self._boot_failed_issue = Issue(
IssueType.BOOT_FAIL, ContextType.ADDON, reference=self.slug
)
self._device_access_missing_issue = Issue(
IssueType.DEVICE_ACCESS_MISSING, ContextType.ADDON, reference=self.slug
)
def __repr__(self) -> str: def __repr__(self) -> str:
"""Return internal representation.""" """Return internal representation."""
return f"<Addon: {self.slug}>" return f"<Addon: {self.slug}>"
@property
def boot_failed_issue(self) -> Issue:
"""Get issue used if start on boot failed."""
return self._boot_failed_issue
@property
def device_access_missing_issue(self) -> Issue:
"""Get issue used if device access is missing and can't be automatically added."""
return self._device_access_missing_issue
@property @property
def state(self) -> AddonState: def state(self) -> AddonState:
"""Return state of the add-on.""" """Return state of the add-on."""
@@ -184,20 +157,6 @@ class Addon(AddonModel):
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP: if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
self._startup_event.set() self._startup_event.set()
# Dismiss boot failed issue if present and we started
if (
new_state == AddonState.STARTED
and self.boot_failed_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self.boot_failed_issue)
# Dismiss device access missing issue if present and we stopped
if (
new_state == AddonState.STOPPED
and self.device_access_missing_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self.device_access_missing_issue)
self.sys_homeassistant.websocket.send_message( self.sys_homeassistant.websocket.send_message(
{ {
ATTR_TYPE: WSType.SUPERVISOR_EVENT, ATTR_TYPE: WSType.SUPERVISOR_EVENT,
@@ -216,9 +175,6 @@ class Addon(AddonModel):
async def load(self) -> None: async def load(self) -> None:
"""Async initialize of object.""" """Async initialize of object."""
if self.is_detached:
await super().refresh_path_cache()
self._listeners.append( self._listeners.append(
self.sys_bus.register_event( self.sys_bus.register_event(
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
@@ -231,20 +187,9 @@ class Addon(AddonModel):
) )
await self._check_ingress_port() await self._check_ingress_port()
default_image = self._image(self.data) with suppress(DockerError):
try:
await self.instance.attach(version=self.version) await self.instance.attach(version=self.version)
# Ensure we are using correct image for this system
await self.instance.check_image(self.version, default_image, self.arch)
except DockerError:
_LOGGER.info("No %s addon Docker image %s found", self.slug, self.image)
with suppress(DockerError):
await self.instance.install(self.version, default_image, arch=self.arch)
self.persist[ATTR_IMAGE] = default_image
self.save_persist()
@property @property
def ip_address(self) -> IPv4Address: def ip_address(self) -> IPv4Address:
"""Return IP of add-on instance.""" """Return IP of add-on instance."""
@@ -280,34 +225,6 @@ class Addon(AddonModel):
"""Return True if add-on is detached.""" """Return True if add-on is detached."""
return self.slug not in self.sys_store.data.addons return self.slug not in self.sys_store.data.addons
@property
def with_icon(self) -> bool:
"""Return True if an icon exists."""
if self.is_detached:
return super().with_icon
return self.addon_store.with_icon
@property
def with_logo(self) -> bool:
"""Return True if a logo exists."""
if self.is_detached:
return super().with_logo
return self.addon_store.with_logo
@property
def with_changelog(self) -> bool:
"""Return True if a changelog exists."""
if self.is_detached:
return super().with_changelog
return self.addon_store.with_changelog
@property
def with_documentation(self) -> bool:
"""Return True if a documentation exists."""
if self.is_detached:
return super().with_documentation
return self.addon_store.with_documentation
@property @property
def available(self) -> bool: def available(self) -> bool:
"""Return True if this add-on is available on this platform.""" """Return True if this add-on is available on this platform."""
@@ -344,9 +261,7 @@ class Addon(AddonModel):
@property @property
def boot(self) -> AddonBoot: def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced.""" """Return boot config with prio local settings."""
if self.boot_config == AddonBootConfig.MANUAL_ONLY:
return super().boot
return self.persist.get(ATTR_BOOT, super().boot) return self.persist.get(ATTR_BOOT, super().boot)
@boot.setter @boot.setter
@@ -354,13 +269,6 @@ class Addon(AddonModel):
"""Store user boot options.""" """Store user boot options."""
self.persist[ATTR_BOOT] = value self.persist[ATTR_BOOT] = value
# Dismiss boot failed issue if present and boot at start disabled
if (
value == AddonBoot.MANUAL
and self._boot_failed_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self._boot_failed_issue)
@property @property
def auto_update(self) -> bool: def auto_update(self) -> bool:
"""Return if auto update is enable.""" """Return if auto update is enable."""
@@ -371,28 +279,6 @@ class Addon(AddonModel):
"""Set auto update.""" """Set auto update."""
self.persist[ATTR_AUTO_UPDATE] = value self.persist[ATTR_AUTO_UPDATE] = value
@property
def auto_update_available(self) -> bool:
"""Return if it is safe to auto update addon."""
if not self.need_update or not self.auto_update:
return False
for version in self.breaking_versions:
try:
# Must update to latest so if true update crosses a breaking version
if self.version < version:
return False
except AwesomeVersionCompareException:
# If version scheme changed, we may get compare exception
# If latest version >= breaking version then assume update will
# cross it as the version scheme changes
# If both versions have compare exception, ignore as its in the past
with suppress(AwesomeVersionCompareException):
if self.latest_version >= version:
return False
return True
@property @property
def watchdog(self) -> bool: def watchdog(self) -> bool:
"""Return True if watchdog is enable.""" """Return True if watchdog is enable."""
@@ -408,37 +294,6 @@ class Addon(AddonModel):
else: else:
self.persist[ATTR_WATCHDOG] = value self.persist[ATTR_WATCHDOG] = value
@property
def system_managed(self) -> bool:
"""Return True if addon is managed by Home Assistant."""
return self.persist[ATTR_SYSTEM_MANAGED]
@system_managed.setter
def system_managed(self, value: bool) -> None:
"""Set system managed enable/disable."""
if not value and self.system_managed_config_entry:
self.system_managed_config_entry = None
self.persist[ATTR_SYSTEM_MANAGED] = value
@property
def system_managed_config_entry(self) -> str | None:
"""Return id of config entry managing this addon (if any)."""
if not self.system_managed:
return None
return self.persist.get(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY)
@system_managed_config_entry.setter
def system_managed_config_entry(self, value: str | None) -> None:
"""Set ID of config entry managing this addon."""
if not self.system_managed:
_LOGGER.warning(
"Ignoring system managed config entry for %s because it is not system managed",
self.slug,
)
else:
self.persist[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] = value
@property @property
def uuid(self) -> str: def uuid(self) -> str:
"""Return an API token for this add-on.""" """Return an API token for this add-on."""
@@ -466,11 +321,6 @@ class Addon(AddonModel):
"""Return version of add-on.""" """Return version of add-on."""
return self.data_store[ATTR_VERSION] return self.data_store[ATTR_VERSION]
@property
def latest_version_timestamp(self) -> datetime:
"""Return when latest version was first seen."""
return utc_from_timestamp(self.data_store[ATTR_VERSION_TIMESTAMP])
@property @property
def protected(self) -> bool: def protected(self) -> bool:
"""Return if add-on is in protected mode.""" """Return if add-on is in protected mode."""
@@ -805,12 +655,10 @@ class Addon(AddonModel):
limit=JobExecutionLimit.GROUP_ONCE, limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError, on_condition=AddonsJobError,
) )
async def uninstall( async def uninstall(self) -> None:
self, *, remove_config: bool, remove_image: bool = True
) -> None:
"""Uninstall and cleanup this addon.""" """Uninstall and cleanup this addon."""
try: try:
await self.instance.remove(remove_image=remove_image) await self.instance.remove()
except DockerError as err: except DockerError as err:
raise AddonsError() from err raise AddonsError() from err
@@ -818,10 +666,6 @@ class Addon(AddonModel):
await self.unload() await self.unload()
# Remove config if present and requested
if self.addon_config_used and remove_config:
await remove_data(self.path_config)
# Cleanup audio settings # Cleanup audio settings
if self.path_pulse.exists(): if self.path_pulse.exists():
with suppress(OSError): with suppress(OSError):
@@ -926,7 +770,6 @@ class Addon(AddonModel):
raise AddonsError() from err raise AddonsError() from err
self.sys_addons.data.update(self.addon_store) self.sys_addons.data.update(self.addon_store)
await self._check_ingress_port()
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug) _LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
finally: finally:
@@ -1208,25 +1051,6 @@ class Addon(AddonModel):
await self._backup_command(self.backup_post) await self._backup_command(self.backup_post)
return None return None
def _is_excluded_by_filter(
self, origin_path: Path, arcname: str, item_arcpath: PurePath
) -> bool:
"""Filter out files from backup based on filters provided by addon developer.
This tests the dev provided filters against the full path of the file as
Supervisor sees them using match. This is done for legacy reasons, testing
against the relative path makes more sense and may be changed in the future.
"""
full_path = origin_path / item_arcpath.relative_to(arcname)
for exclude in self.backup_exclude:
if not full_path.match(exclude):
continue
_LOGGER.debug("Ignoring %s because of %s", full_path, exclude)
return True
return False
@Job( @Job(
name="addon_backup", name="addon_backup",
limit=JobExecutionLimit.GROUP_ONCE, limit=JobExecutionLimit.GROUP_ONCE,
@@ -1286,9 +1110,7 @@ class Addon(AddonModel):
atomic_contents_add( atomic_contents_add(
backup, backup,
self.path_data, self.path_data,
file_filter=partial( excludes=self.backup_exclude,
self._is_excluded_by_filter, self.path_data, "data"
),
arcname="data", arcname="data",
) )
@@ -1297,9 +1119,7 @@ class Addon(AddonModel):
atomic_contents_add( atomic_contents_add(
backup, backup,
self.path_config, self.path_config,
file_filter=partial( excludes=self.backup_exclude,
self._is_excluded_by_filter, self.path_config, "config"
),
arcname="config", arcname="config",
) )
@@ -1401,7 +1221,7 @@ class Addon(AddonModel):
_LOGGER.info("Restore/Update of image for addon %s", self.slug) _LOGGER.info("Restore/Update of image for addon %s", self.slug)
with suppress(DockerError): with suppress(DockerError):
await self.instance.update(version, restore_image, self.arch) await self.instance.update(version, restore_image, self.arch)
await self._check_ingress_port() self._check_ingress_port()
# Restore data and config # Restore data and config
def _restore_data(): def _restore_data():
@@ -1444,11 +1264,11 @@ class Addon(AddonModel):
) )
raise AddonsError() from err raise AddonsError() from err
finally:
# Is add-on loaded # Is add-on loaded
if not self.loaded: if not self.loaded:
await self.load() await self.load()
finally:
# Run add-on # Run add-on
if data[ATTR_STATE] == AddonState.STARTED: if data[ATTR_STATE] == AddonState.STARTED:
wait_for_start = await self.start() wait_for_start = await self.start()
@@ -1542,9 +1362,3 @@ class Addon(AddonModel):
ContainerState.UNHEALTHY, ContainerState.UNHEALTHY,
]: ]:
await self._restart_after_problem(event.state) await self._restart_after_problem(event.state)
def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths."""
if self.is_detached:
return super().refresh_path_cache()
return self.addon_store.refresh_path_cache()

View File

@@ -1,5 +1,4 @@
"""Supervisor add-on build environment.""" """Supervisor add-on build environment."""
from __future__ import annotations from __future__ import annotations
from functools import cached_property from functools import cached_property
@@ -103,11 +102,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
except HassioArchNotFound: except HassioArchNotFound:
return False return False
def get_docker_args(self, version: AwesomeVersion, image: str | None = None): def get_docker_args(self, version: AwesomeVersion):
"""Create a dict with Docker build arguments.""" """Create a dict with Docker build arguments."""
args = { args = {
"path": str(self.addon.path_location), "path": str(self.addon.path_location),
"tag": f"{image or self.addon.image}:{version!s}", "tag": f"{self.addon.image}:{version!s}",
"dockerfile": str(self.dockerfile), "dockerfile": str(self.dockerfile),
"pull": True, "pull": True,
"forcerm": not self.sys_dev, "forcerm": not self.sys_dev,

View File

@@ -1,5 +1,4 @@
"""Add-on static data.""" """Add-on static data."""
from datetime import timedelta from datetime import timedelta
from enum import StrEnum from enum import StrEnum
@@ -29,7 +28,6 @@ class MappingType(StrEnum):
ATTR_BACKUP = "backup" ATTR_BACKUP = "backup"
ATTR_BREAKING_VERSIONS = "breaking_versions"
ATTR_CODENOTARY = "codenotary" ATTR_CODENOTARY = "codenotary"
ATTR_READ_ONLY = "read_only" ATTR_READ_ONLY = "read_only"
ATTR_PATH = "path" ATTR_PATH = "path"

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-on data.""" """Init file for Supervisor add-on data."""
from copy import deepcopy from copy import deepcopy
from typing import Any from typing import Any

View File

@@ -1,5 +1,4 @@
"""Supervisor add-on manager.""" """Supervisor add-on manager."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
from contextlib import suppress from contextlib import suppress
@@ -7,22 +6,24 @@ import logging
import tarfile import tarfile
from typing import Union from typing import Union
from attr import evolve
from ..const import AddonBoot, AddonStartup, AddonState from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import ( from ..exceptions import (
AddonConfigurationError,
AddonsError, AddonsError,
AddonsJobError, AddonsJobError,
AddonsNotSupportedError, AddonsNotSupportedError,
CoreDNSError, CoreDNSError,
DockerAPIError,
DockerError, DockerError,
DockerNotFound,
HassioError, HassioError,
HomeAssistantAPIError, HomeAssistantAPIError,
) )
from ..jobs.decorator import Job, JobCondition from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore from ..store.addon import AddonStore
from ..utils import check_exception_chain
from ..utils.sentry import capture_exception from ..utils.sentry import capture_exception
from .addon import Addon from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS from .const import ADDON_UPDATE_CONDITIONS
@@ -76,20 +77,15 @@ class AddonManager(CoreSysAttributes):
async def load(self) -> None: async def load(self) -> None:
"""Start up add-on management.""" """Start up add-on management."""
# Refresh cache for all store addons tasks = []
tasks: list[Awaitable[None]] = [
store.refresh_path_cache() for store in self.store.values()
]
# Load all installed addons
for slug in self.data.system: for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug) addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(addon.load()) tasks.append(self.sys_create_task(addon.load()))
# Run initial tasks # Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(self.data.system)) _LOGGER.info("Found %d installed add-ons", len(tasks))
if tasks: if tasks:
await asyncio.gather(*tasks) await asyncio.wait(tasks)
# Sync DNS # Sync DNS
await self.sync_dns() await self.sync_dns()
@@ -116,14 +112,15 @@ class AddonManager(CoreSysAttributes):
try: try:
if start_task := await addon.start(): if start_task := await addon.start():
wait_boot.append(start_task) wait_boot.append(start_task)
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except HassioError: except HassioError:
self.sys_resolution.add_issue( pass # These are already handled
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
else: else:
continue continue
@@ -132,19 +129,6 @@ class AddonManager(CoreSysAttributes):
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True) await asyncio.gather(*wait_boot, return_exceptions=True)
# After waiting for startup, create an issue for boot addons that are error or unknown state
# Ignore stopped as single shot addons can be run at boot and this is successful exit
# Timeout waiting for startup is not a failure, addon is probably just slow
for addon in tasks:
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
self.sys_resolution.add_issue(
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
async def shutdown(self, stage: AddonStartup) -> None: async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons.""" """Shutdown addons."""
tasks: list[Addon] = [] tasks: list[Addon] = []
@@ -189,21 +173,13 @@ class AddonManager(CoreSysAttributes):
_LOGGER.info("Add-on '%s' successfully installed", slug) _LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None: async def uninstall(self, slug: str) -> None:
"""Remove an add-on.""" """Remove an add-on."""
if slug not in self.local: if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug) _LOGGER.warning("Add-on %s is not installed", slug)
return return
shared_image = any( await self.local[slug].uninstall()
self.local[slug].image == addon.image
and self.local[slug].version == addon.version
for addon in self.installed
if addon.slug != slug
)
await self.local[slug].uninstall(
remove_config=remove_config, remove_image=not shared_image
)
_LOGGER.info("Add-on '%s' successfully removed", slug) _LOGGER.info("Add-on '%s' successfully removed", slug)

View File

@@ -1,18 +1,14 @@
"""Init file for Supervisor add-ons.""" """Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from collections import defaultdict from collections import defaultdict
from collections.abc import Awaitable, Callable from collections.abc import Callable
from contextlib import suppress from contextlib import suppress
from datetime import datetime
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from awesomeversion import AwesomeVersion, AwesomeVersionException from awesomeversion import AwesomeVersion, AwesomeVersionException
from supervisor.utils.dt import utc_from_timestamp
from ..const import ( from ..const import (
ATTR_ADVANCED, ATTR_ADVANCED,
ATTR_APPARMOR, ATTR_APPARMOR,
@@ -47,7 +43,7 @@ from ..const import (
ATTR_JOURNALD, ATTR_JOURNALD,
ATTR_KERNEL_MODULES, ATTR_KERNEL_MODULES,
ATTR_LEGACY, ATTR_LEGACY,
ATTR_LOCATION, ATTR_LOCATON,
ATTR_MACHINE, ATTR_MACHINE,
ATTR_MAP, ATTR_MAP,
ATTR_NAME, ATTR_NAME,
@@ -75,7 +71,6 @@ from ..const import (
ATTR_URL, ATTR_URL,
ATTR_USB, ATTR_USB,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_TIMESTAMP,
ATTR_VIDEO, ATTR_VIDEO,
ATTR_WATCHDOG, ATTR_WATCHDOG,
ATTR_WEBUI, ATTR_WEBUI,
@@ -83,7 +78,6 @@ from ..const import (
SECURITY_DISABLE, SECURITY_DISABLE,
SECURITY_PROFILE, SECURITY_PROFILE,
AddonBoot, AddonBoot,
AddonBootConfig,
AddonStage, AddonStage,
AddonStartup, AddonStartup,
) )
@@ -96,7 +90,6 @@ from ..utils import version_is_new_enough
from .configuration import FolderMapping from .configuration import FolderMapping
from .const import ( from .const import (
ATTR_BACKUP, ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY, ATTR_CODENOTARY,
ATTR_PATH, ATTR_PATH,
ATTR_READ_ONLY, ATTR_READ_ONLY,
@@ -120,10 +113,6 @@ class AddonModel(JobGroup, ABC):
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
) )
self.slug: str = slug self.slug: str = slug
self._path_icon_exists: bool = False
self._path_logo_exists: bool = False
self._path_changelog_exists: bool = False
self._path_documentation_exists: bool = False
@property @property
@abstractmethod @abstractmethod
@@ -150,15 +139,10 @@ class AddonModel(JobGroup, ABC):
"""Return options with local changes.""" """Return options with local changes."""
return self.data[ATTR_OPTIONS] return self.data[ATTR_OPTIONS]
@property
def boot_config(self) -> AddonBootConfig:
"""Return boot config."""
return self.data[ATTR_BOOT]
@property @property
def boot(self) -> AddonBoot: def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced.""" """Return boot config with prio local settings."""
return AddonBoot(self.data[ATTR_BOOT]) return self.data[ATTR_BOOT]
@property @property
def auto_update(self) -> bool | None: def auto_update(self) -> bool | None:
@@ -237,11 +221,6 @@ class AddonModel(JobGroup, ABC):
"""Return latest version of add-on.""" """Return latest version of add-on."""
return self.data[ATTR_VERSION] return self.data[ATTR_VERSION]
@property
def latest_version_timestamp(self) -> datetime:
"""Return when latest version was first seen."""
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
@property @property
def version(self) -> AwesomeVersion: def version(self) -> AwesomeVersion:
"""Return version of add-on.""" """Return version of add-on."""
@@ -522,22 +501,22 @@ class AddonModel(JobGroup, ABC):
@property @property
def with_icon(self) -> bool: def with_icon(self) -> bool:
"""Return True if an icon exists.""" """Return True if an icon exists."""
return self._path_icon_exists return self.path_icon.exists()
@property @property
def with_logo(self) -> bool: def with_logo(self) -> bool:
"""Return True if a logo exists.""" """Return True if a logo exists."""
return self._path_logo_exists return self.path_logo.exists()
@property @property
def with_changelog(self) -> bool: def with_changelog(self) -> bool:
"""Return True if a changelog exists.""" """Return True if a changelog exists."""
return self._path_changelog_exists return self.path_changelog.exists()
@property @property
def with_documentation(self) -> bool: def with_documentation(self) -> bool:
"""Return True if a documentation exists.""" """Return True if a documentation exists."""
return self._path_documentation_exists return self.path_documentation.exists()
@property @property
def supported_arch(self) -> list[str]: def supported_arch(self) -> list[str]:
@@ -581,7 +560,7 @@ class AddonModel(JobGroup, ABC):
@property @property
def path_location(self) -> Path: def path_location(self) -> Path:
"""Return path to this add-on.""" """Return path to this add-on."""
return Path(self.data[ATTR_LOCATION]) return Path(self.data[ATTR_LOCATON])
@property @property
def path_icon(self) -> Path: def path_icon(self) -> Path:
@@ -641,22 +620,6 @@ class AddonModel(JobGroup, ABC):
"""Return Signer email address for CAS.""" """Return Signer email address for CAS."""
return self.data.get(ATTR_CODENOTARY) return self.data.get(ATTR_CODENOTARY)
@property
def breaking_versions(self) -> list[AwesomeVersion]:
"""Return breaking versions of addon."""
return self.data[ATTR_BREAKING_VERSIONS]
def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths."""
def check_paths():
self._path_icon_exists = self.path_icon.exists()
self._path_logo_exists = self.path_logo.exists()
self._path_changelog_exists = self.path_changelog.exists()
self._path_documentation_exists = self.path_documentation.exists()
return self.sys_run_in_executor(check_paths)
def validate_availability(self) -> None: def validate_availability(self) -> None:
"""Validate if addon is available for current system.""" """Validate if addon is available for current system."""
return self._validate_availability(self.data, logger=_LOGGER.error) return self._validate_availability(self.data, logger=_LOGGER.error)

View File

@@ -1,5 +1,4 @@
"""Add-on Options / UI rendering.""" """Add-on Options / UI rendering."""
import hashlib import hashlib
import logging import logging
from pathlib import Path from pathlib import Path

View File

@@ -1,5 +1,4 @@
"""Util add-ons functions.""" """Util add-ons functions."""
from __future__ import annotations from __future__ import annotations
import asyncio import asyncio
@@ -46,7 +45,6 @@ def rating_security(addon: AddonModel) -> int:
privilege in addon.privileged privilege in addon.privileged
for privilege in ( for privilege in (
Capabilities.BPF, Capabilities.BPF,
Capabilities.CHECKPOINT_RESTORE,
Capabilities.DAC_READ_SEARCH, Capabilities.DAC_READ_SEARCH,
Capabilities.NET_ADMIN, Capabilities.NET_ADMIN,
Capabilities.NET_RAW, Capabilities.NET_RAW,

View File

@@ -1,5 +1,4 @@
"""Validate add-ons options schema.""" """Validate add-ons options schema."""
import logging import logging
import re import re
import secrets import secrets
@@ -55,7 +54,7 @@ from ..const import (
ATTR_KERNEL_MODULES, ATTR_KERNEL_MODULES,
ATTR_LABELS, ATTR_LABELS,
ATTR_LEGACY, ATTR_LEGACY,
ATTR_LOCATION, ATTR_LOCATON,
ATTR_MACHINE, ATTR_MACHINE,
ATTR_MAP, ATTR_MAP,
ATTR_NAME, ATTR_NAME,
@@ -79,8 +78,6 @@ from ..const import (
ATTR_STATE, ATTR_STATE,
ATTR_STDIN, ATTR_STDIN,
ATTR_SYSTEM, ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TIMEOUT, ATTR_TIMEOUT,
ATTR_TMPFS, ATTR_TMPFS,
ATTR_TRANSLATIONS, ATTR_TRANSLATIONS,
@@ -98,11 +95,11 @@ from ..const import (
ROLE_ALL, ROLE_ALL,
ROLE_DEFAULT, ROLE_DEFAULT,
AddonBoot, AddonBoot,
AddonBootConfig,
AddonStage, AddonStage,
AddonStartup, AddonStartup,
AddonState, AddonState,
) )
from ..discovery.validate import valid_discovery_service
from ..docker.const import Capabilities from ..docker.const import Capabilities
from ..validate import ( from ..validate import (
docker_image, docker_image,
@@ -115,7 +112,6 @@ from ..validate import (
) )
from .const import ( from .const import (
ATTR_BACKUP, ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY, ATTR_CODENOTARY,
ATTR_PATH, ATTR_PATH,
ATTR_READ_ONLY, ATTR_READ_ONLY,
@@ -193,6 +189,20 @@ def _warn_addon_config(config: dict[str, Any]):
name, name,
) )
invalid_services: list[str] = []
for service in config.get(ATTR_DISCOVERY, []):
try:
valid_discovery_service(service)
except vol.Invalid:
invalid_services.append(service)
if invalid_services:
_LOGGER.warning(
"Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s",
", ".join(invalid_services),
name,
)
return config return config
@@ -322,9 +332,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce( vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
AddonStartup AddonStartup
), ),
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce( vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
AddonBootConfig
),
vol.Optional(ATTR_INIT, default=True): vol.Boolean(), vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(), vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage), vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
@@ -414,7 +422,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Coerce(int), vol.Range(min=10, max=300) vol.Coerce(int), vol.Range(min=10, max=300)
), ),
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(), vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
}, },
extra=vol.REMOVE_EXTRA, extra=vol.REMOVE_EXTRA,
) )
@@ -473,8 +480,6 @@ SCHEMA_ADDON_USER = vol.Schema(
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(), vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(), vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(), vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
}, },
extra=vol.REMOVE_EXTRA, extra=vol.REMOVE_EXTRA,
) )
@@ -483,7 +488,7 @@ SCHEMA_ADDON_SYSTEM = vol.All(
_migrate_addon_config(), _migrate_addon_config(),
_SCHEMA_ADDON_CONFIG.extend( _SCHEMA_ADDON_CONFIG.extend(
{ {
vol.Required(ATTR_LOCATION): str, vol.Required(ATTR_LOCATON): str,
vol.Required(ATTR_REPOSITORY): str, vol.Required(ATTR_REPOSITORY): str,
vol.Required(ATTR_TRANSLATIONS, default=dict): { vol.Required(ATTR_TRANSLATIONS, default=dict): {
str: SCHEMA_ADDON_TRANSLATIONS str: SCHEMA_ADDON_TRANSLATIONS

View File

@@ -1,22 +1,20 @@
"""Init file for Supervisor RESTful API.""" """Init file for Supervisor RESTful API."""
from functools import partial from functools import partial
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from aiohttp import web from aiohttp import web
from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher
from ..const import AddonState from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError from ..exceptions import APIAddonNotInstalled
from ..utils.sentry import capture_exception
from .addons import APIAddons from .addons import APIAddons
from .audio import APIAudio from .audio import APIAudio
from .auth import APIAuth from .auth import APIAuth
from .backups import APIBackups from .backups import APIBackups
from .cli import APICli from .cli import APICli
from .const import CONTENT_TYPE_TEXT
from .discovery import APIDiscovery from .discovery import APIDiscovery
from .dns import APICoreDNS from .dns import APICoreDNS
from .docker import APIDocker from .docker import APIDocker
@@ -38,7 +36,7 @@ from .security import APISecurity
from .services import APIServices from .services import APIServices
from .store import APIStore from .store import APIStore
from .supervisor import APISupervisor from .supervisor import APISupervisor
from .utils import api_process, api_process_raw from .utils import api_process
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -67,19 +65,14 @@ class RestAPI(CoreSysAttributes):
"max_field_size": MAX_LINE_SIZE, "max_field_size": MAX_LINE_SIZE,
}, },
) )
attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher())
# service stuff # service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5) self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
self._site: web.TCPSite | None = None self._site: web.TCPSite | None = None
# share single host API handler for reuse in logging endpoints
self._api_host: APIHost | None = None
async def load(self) -> None: async def load(self) -> None:
"""Register REST API Calls.""" """Register REST API Calls."""
self._api_host = APIHost()
self._api_host.coresys = self.coresys
self._register_addons() self._register_addons()
self._register_audio() self._register_audio()
self._register_auth() self._register_auth()
@@ -109,41 +102,10 @@ class RestAPI(CoreSysAttributes):
await self.start() await self.start()
def _register_advanced_logs(self, path: str, syslog_identifier: str):
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
self.webapp.add_routes(
[
web.get(
f"{path}/logs",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
web.get(
f"{path}/logs/boots/{{bootid}}",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/boots/{{bootid}}/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
]
)
def _register_host(self) -> None: def _register_host(self) -> None:
"""Register hostcontrol functions.""" """Register hostcontrol functions."""
api_host = self._api_host api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes( self.webapp.add_routes(
[ [
@@ -220,8 +182,6 @@ class RestAPI(CoreSysAttributes):
web.post("/os/config/sync", api_os.config_sync), web.post("/os/config/sync", api_os.config_sync),
web.post("/os/datadisk/move", api_os.migrate_data), web.post("/os/datadisk/move", api_os.migrate_data),
web.get("/os/datadisk/list", api_os.list_data), web.get("/os/datadisk/list", api_os.list_data),
web.post("/os/datadisk/wipe", api_os.wipe_data),
web.post("/os/boot-slot", api_os.set_boot_slot),
] ]
) )
@@ -259,8 +219,6 @@ class RestAPI(CoreSysAttributes):
web.get("/jobs/info", api_jobs.info), web.get("/jobs/info", api_jobs.info),
web.post("/jobs/options", api_jobs.options), web.post("/jobs/options", api_jobs.options),
web.post("/jobs/reset", api_jobs.reset), web.post("/jobs/reset", api_jobs.reset),
web.get("/jobs/{uuid}", api_jobs.job_info),
web.delete("/jobs/{uuid}", api_jobs.remove_job),
] ]
) )
@@ -299,11 +257,11 @@ class RestAPI(CoreSysAttributes):
[ [
web.get("/multicast/info", api_multicast.info), web.get("/multicast/info", api_multicast.info),
web.get("/multicast/stats", api_multicast.stats), web.get("/multicast/stats", api_multicast.stats),
web.get("/multicast/logs", api_multicast.logs),
web.post("/multicast/update", api_multicast.update), web.post("/multicast/update", api_multicast.update),
web.post("/multicast/restart", api_multicast.restart), web.post("/multicast/restart", api_multicast.restart),
] ]
) )
self._register_advanced_logs("/multicast", "hassio_multicast")
def _register_hardware(self) -> None: def _register_hardware(self) -> None:
"""Register hardware functions.""" """Register hardware functions."""
@@ -376,7 +334,6 @@ class RestAPI(CoreSysAttributes):
web.post("/auth", api_auth.auth), web.post("/auth", api_auth.auth),
web.post("/auth/reset", api_auth.reset), web.post("/auth/reset", api_auth.reset),
web.delete("/auth/cache", api_auth.cache), web.delete("/auth/cache", api_auth.cache),
web.get("/auth/list", api_auth.list_users),
] ]
) )
@@ -390,6 +347,7 @@ class RestAPI(CoreSysAttributes):
web.get("/supervisor/ping", api_supervisor.ping), web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info), web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats), web.get("/supervisor/stats", api_supervisor.stats),
web.get("/supervisor/logs", api_supervisor.logs),
web.post("/supervisor/update", api_supervisor.update), web.post("/supervisor/update", api_supervisor.update),
web.post("/supervisor/reload", api_supervisor.reload), web.post("/supervisor/reload", api_supervisor.reload),
web.post("/supervisor/restart", api_supervisor.restart), web.post("/supervisor/restart", api_supervisor.restart),
@@ -398,39 +356,6 @@ class RestAPI(CoreSysAttributes):
] ]
) )
async def get_supervisor_logs(*args, **kwargs):
try:
return await self._api_host.advanced_logs_handler(
*args, identifier="hassio_supervisor", **kwargs
)
except Exception as err: # pylint: disable=broad-exception-caught
# Supervisor logs are critical, so catch everything, log the exception
# and try to return Docker container logs as the fallback
_LOGGER.exception(
"Failed to get supervisor logs using advanced_logs API"
)
if not isinstance(err, HostNotSupportedError):
# No need to capture HostNotSupportedError to Sentry, the cause
# is known and reported to the user using the resolution center.
capture_exception(err)
kwargs.pop("follow", None) # Follow is not supported for Docker logs
return await api_supervisor.logs(*args, **kwargs)
self.webapp.add_routes(
[
web.get("/supervisor/logs", get_supervisor_logs),
web.get(
"/supervisor/logs/follow",
partial(get_supervisor_logs, follow=True),
),
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
web.get(
"/supervisor/logs/boots/{bootid}/follow",
partial(get_supervisor_logs, follow=True),
),
]
)
def _register_homeassistant(self) -> None: def _register_homeassistant(self) -> None:
"""Register Home Assistant functions.""" """Register Home Assistant functions."""
api_hass = APIHomeAssistant() api_hass = APIHomeAssistant()
@@ -439,6 +364,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/core/info", api_hass.info), web.get("/core/info", api_hass.info),
web.get("/core/logs", api_hass.logs),
web.get("/core/stats", api_hass.stats), web.get("/core/stats", api_hass.stats),
web.post("/core/options", api_hass.options), web.post("/core/options", api_hass.options),
web.post("/core/update", api_hass.update), web.post("/core/update", api_hass.update),
@@ -450,12 +376,11 @@ class RestAPI(CoreSysAttributes):
] ]
) )
self._register_advanced_logs("/core", "homeassistant")
# Reroute from legacy # Reroute from legacy
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/homeassistant/info", api_hass.info), web.get("/homeassistant/info", api_hass.info),
web.get("/homeassistant/logs", api_hass.logs),
web.get("/homeassistant/stats", api_hass.stats), web.get("/homeassistant/stats", api_hass.stats),
web.post("/homeassistant/options", api_hass.options), web.post("/homeassistant/options", api_hass.options),
web.post("/homeassistant/restart", api_hass.restart), web.post("/homeassistant/restart", api_hass.restart),
@@ -467,8 +392,6 @@ class RestAPI(CoreSysAttributes):
] ]
) )
self._register_advanced_logs("/homeassistant", "homeassistant")
def _register_proxy(self) -> None: def _register_proxy(self) -> None:
"""Register Home Assistant API Proxy.""" """Register Home Assistant API Proxy."""
api_proxy = APIProxy() api_proxy = APIProxy()
@@ -510,39 +433,18 @@ class RestAPI(CoreSysAttributes):
web.post("/addons/{addon}/stop", api_addons.stop), web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart), web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/options", api_addons.options), web.post("/addons/{addon}/options", api_addons.options),
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
web.post( web.post(
"/addons/{addon}/options/validate", api_addons.options_validate "/addons/{addon}/options/validate", api_addons.options_validate
), ),
web.get("/addons/{addon}/options/config", api_addons.options_config), web.get("/addons/{addon}/options/config", api_addons.options_config),
web.post("/addons/{addon}/rebuild", api_addons.rebuild), web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.get("/addons/{addon}/logs", api_addons.logs),
web.post("/addons/{addon}/stdin", api_addons.stdin), web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security), web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats), web.get("/addons/{addon}/stats", api_addons.stats),
] ]
) )
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def get_addon_logs(request, *args, **kwargs):
addon = api_addons.get_addon_for_request(request)
kwargs["identifier"] = f"addon_{addon.slug}"
return await self._api_host.advanced_logs(request, *args, **kwargs)
self.webapp.add_routes(
[
web.get("/addons/{addon}/logs", get_addon_logs),
web.get(
"/addons/{addon}/logs/follow",
partial(get_addon_logs, follow=True),
),
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
web.get(
"/addons/{addon}/logs/boots/{bootid}/follow",
partial(get_addon_logs, follow=True),
),
]
)
# Legacy routing to support requests for not installed addons # Legacy routing to support requests for not installed addons
api_store = APIStore() api_store = APIStore()
api_store.coresys = self.coresys api_store.coresys = self.coresys
@@ -640,6 +542,7 @@ class RestAPI(CoreSysAttributes):
[ [
web.get("/dns/info", api_dns.info), web.get("/dns/info", api_dns.info),
web.get("/dns/stats", api_dns.stats), web.get("/dns/stats", api_dns.stats),
web.get("/dns/logs", api_dns.logs),
web.post("/dns/update", api_dns.update), web.post("/dns/update", api_dns.update),
web.post("/dns/options", api_dns.options), web.post("/dns/options", api_dns.options),
web.post("/dns/restart", api_dns.restart), web.post("/dns/restart", api_dns.restart),
@@ -647,17 +550,18 @@ class RestAPI(CoreSysAttributes):
] ]
) )
self._register_advanced_logs("/dns", "hassio_dns")
def _register_audio(self) -> None: def _register_audio(self) -> None:
"""Register Audio functions.""" """Register Audio functions."""
api_audio = APIAudio() api_audio = APIAudio()
api_audio.coresys = self.coresys api_audio.coresys = self.coresys
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/audio/info", api_audio.info), web.get("/audio/info", api_audio.info),
web.get("/audio/stats", api_audio.stats), web.get("/audio/stats", api_audio.stats),
web.get("/audio/logs", api_audio.logs),
web.post("/audio/update", api_audio.update), web.post("/audio/update", api_audio.update),
web.post("/audio/restart", api_audio.restart), web.post("/audio/restart", api_audio.restart),
web.post("/audio/reload", api_audio.reload), web.post("/audio/reload", api_audio.reload),
@@ -670,8 +574,6 @@ class RestAPI(CoreSysAttributes):
] ]
) )
self._register_advanced_logs("/audio", "hassio_audio")
def _register_mounts(self) -> None: def _register_mounts(self) -> None:
"""Register mounts endpoints.""" """Register mounts endpoints."""
api_mounts = APIMounts() api_mounts = APIMounts()
@@ -698,6 +600,7 @@ class RestAPI(CoreSysAttributes):
web.get("/store", api_store.store_info), web.get("/store", api_store.store_info),
web.get("/store/addons", api_store.addons_list), web.get("/store/addons", api_store.addons_list),
web.get("/store/addons/{addon}", api_store.addons_addon_info), web.get("/store/addons/{addon}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon), web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo), web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
web.get( web.get(
@@ -719,8 +622,6 @@ class RestAPI(CoreSysAttributes):
"/store/addons/{addon}/update/{version}", "/store/addons/{addon}/update/{version}",
api_store.addons_addon_update, api_store.addons_addon_update,
), ),
# Must be below others since it has a wildcard in resource path
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.post("/store/reload", api_store.reload), web.post("/store/reload", api_store.reload),
web.get("/store/repositories", api_store.repositories_list), web.get("/store/repositories", api_store.repositories_list),
web.get( web.get(

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API.""" """Init file for Supervisor Home Assistant RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
@@ -82,8 +81,6 @@ from ..const import (
ATTR_STARTUP, ATTR_STARTUP,
ATTR_STATE, ATTR_STATE,
ATTR_STDIN, ATTR_STDIN,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TRANSLATIONS, ATTR_TRANSLATIONS,
ATTR_UART, ATTR_UART,
ATTR_UDEV, ATTR_UDEV,
@@ -98,7 +95,6 @@ from ..const import (
ATTR_WEBUI, ATTR_WEBUI,
REQUEST_FROM, REQUEST_FROM,
AddonBoot, AddonBoot,
AddonBootConfig,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
@@ -106,13 +102,12 @@ from ..exceptions import (
APIAddonNotInstalled, APIAddonNotInstalled,
APIError, APIError,
APIForbidden, APIForbidden,
APINotFound,
PwnedError, PwnedError,
PwnedSecret, PwnedSecret,
) )
from ..validate import docker_ports from ..validate import docker_ports
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
from .utils import api_process, api_validate, json_loads from .utils import api_process, api_process_raw, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -131,26 +126,15 @@ SCHEMA_OPTIONS = vol.Schema(
} }
) )
SCHEMA_SYS_OPTIONS = vol.Schema( # pylint: disable=no-value-for-parameter
{
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
}
)
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()}) SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
SCHEMA_UNINSTALL = vol.Schema(
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
)
# pylint: enable=no-value-for-parameter
class APIAddons(CoreSysAttributes): class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions.""" """Handle RESTful API for add-on functions."""
def get_addon_for_request(self, request: web.Request) -> Addon: def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception if it doesn't exist.""" """Return addon, throw an exception it it doesn't exist."""
addon_slug: str = request.match_info.get("addon") addon_slug: str = request.match_info.get("addon")
# Lookup itself # Lookup itself
@@ -162,7 +146,7 @@ class APIAddons(CoreSysAttributes):
addon = self.sys_addons.get(addon_slug) addon = self.sys_addons.get(addon_slug)
if not addon: if not addon:
raise APINotFound(f"Addon {addon_slug} does not exist") raise APIError(f"Addon {addon_slug} does not exist")
if not isinstance(addon, Addon) or not addon.is_installed: if not isinstance(addon, Addon) or not addon.is_installed:
raise APIAddonNotInstalled("Addon is not installed") raise APIAddonNotInstalled("Addon is not installed")
@@ -190,7 +174,6 @@ class APIAddons(CoreSysAttributes):
ATTR_URL: addon.url, ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon, ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo, ATTR_LOGO: addon.with_logo,
ATTR_SYSTEM_MANAGED: addon.system_managed,
} }
for addon in self.sys_addons.installed for addon in self.sys_addons.installed
] ]
@@ -204,7 +187,7 @@ class APIAddons(CoreSysAttributes):
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information.""" """Return add-on information."""
addon: AnyAddon = self.get_addon_for_request(request) addon: AnyAddon = self._extract_addon(request)
data = { data = {
ATTR_NAME: addon.name, ATTR_NAME: addon.name,
@@ -219,7 +202,6 @@ class APIAddons(CoreSysAttributes):
ATTR_VERSION_LATEST: addon.latest_version, ATTR_VERSION_LATEST: addon.latest_version,
ATTR_PROTECTED: addon.protected, ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon), ATTR_RATING: rating_security(addon),
ATTR_BOOT_CONFIG: addon.boot_config,
ATTR_BOOT: addon.boot, ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options, ATTR_OPTIONS: addon.options,
ATTR_SCHEMA: addon.schema_ui, ATTR_SCHEMA: addon.schema_ui,
@@ -279,8 +261,6 @@ class APIAddons(CoreSysAttributes):
ATTR_WATCHDOG: addon.watchdog, ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices], + [device.path for device in addon.devices],
ATTR_SYSTEM_MANAGED: addon.system_managed,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
} }
return data return data
@@ -288,7 +268,7 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def options(self, request: web.Request) -> None: async def options(self, request: web.Request) -> None:
"""Store user options for add-on.""" """Store user options for add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
# Update secrets for validation # Update secrets for validation
await self.sys_homeassistant.secrets.reload() await self.sys_homeassistant.secrets.reload()
@@ -303,10 +283,6 @@ class APIAddons(CoreSysAttributes):
if ATTR_OPTIONS in body: if ATTR_OPTIONS in body:
addon.options = body[ATTR_OPTIONS] addon.options = body[ATTR_OPTIONS]
if ATTR_BOOT in body: if ATTR_BOOT in body:
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
raise APIError(
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
)
addon.boot = body[ATTR_BOOT] addon.boot = body[ATTR_BOOT]
if ATTR_AUTO_UPDATE in body: if ATTR_AUTO_UPDATE in body:
addon.auto_update = body[ATTR_AUTO_UPDATE] addon.auto_update = body[ATTR_AUTO_UPDATE]
@@ -324,24 +300,10 @@ class APIAddons(CoreSysAttributes):
addon.save_persist() addon.save_persist()
@api_process
async def sys_options(self, request: web.Request) -> None:
"""Store system options for an add-on."""
addon = self.get_addon_for_request(request)
# Validate/Process Body
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
if ATTR_SYSTEM_MANAGED in body:
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
addon.save_persist()
@api_process @api_process
async def options_validate(self, request: web.Request) -> None: async def options_validate(self, request: web.Request) -> None:
"""Validate user options for add-on.""" """Validate user options for add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False} data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
options = await request.json(loads=json_loads) or addon.options options = await request.json(loads=json_loads) or addon.options
@@ -383,7 +345,7 @@ class APIAddons(CoreSysAttributes):
slug: str = request.match_info.get("addon") slug: str = request.match_info.get("addon")
if slug != "self": if slug != "self":
raise APIForbidden("This can be only read by the Add-on itself!") raise APIForbidden("This can be only read by the Add-on itself!")
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
# Lookup/reload secrets # Lookup/reload secrets
await self.sys_homeassistant.secrets.reload() await self.sys_homeassistant.secrets.reload()
@@ -395,7 +357,7 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def security(self, request: web.Request) -> None: async def security(self, request: web.Request) -> None:
"""Store security options for add-on.""" """Store security options for add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request) body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body: if ATTR_PROTECTED in body:
@@ -407,7 +369,7 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def stats(self, request: web.Request) -> dict[str, Any]: async def stats(self, request: web.Request) -> dict[str, Any]:
"""Return resource information.""" """Return resource information."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
stats: DockerStats = await addon.stats() stats: DockerStats = await addon.stats()
@@ -423,47 +385,48 @@ class APIAddons(CoreSysAttributes):
} }
@api_process @api_process
async def uninstall(self, request: web.Request) -> Awaitable[None]: def uninstall(self, request: web.Request) -> Awaitable[None]:
"""Uninstall add-on.""" """Uninstall add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request) return asyncio.shield(self.sys_addons.uninstall(addon.slug))
return await asyncio.shield(
self.sys_addons.uninstall(
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
)
)
@api_process @api_process
async def start(self, request: web.Request) -> None: async def start(self, request: web.Request) -> None:
"""Start add-on.""" """Start add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
if start_task := await asyncio.shield(addon.start()): if start_task := await asyncio.shield(addon.start()):
await start_task await start_task
@api_process @api_process
def stop(self, request: web.Request) -> Awaitable[None]: def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop add-on.""" """Stop add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
return asyncio.shield(addon.stop()) return asyncio.shield(addon.stop())
@api_process @api_process
async def restart(self, request: web.Request) -> None: async def restart(self, request: web.Request) -> None:
"""Restart add-on.""" """Restart add-on."""
addon: Addon = self.get_addon_for_request(request) addon: Addon = self._extract_addon(request)
if start_task := await asyncio.shield(addon.restart()): if start_task := await asyncio.shield(addon.restart()):
await start_task await start_task
@api_process @api_process
async def rebuild(self, request: web.Request) -> None: async def rebuild(self, request: web.Request) -> None:
"""Rebuild local build add-on.""" """Rebuild local build add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)): if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
await start_task await start_task
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return logs from add-on."""
addon = self._extract_addon(request)
return addon.logs()
@api_process @api_process
async def stdin(self, request: web.Request) -> None: async def stdin(self, request: web.Request) -> None:
"""Write to stdin of add-on.""" """Write to stdin of add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
if not addon.with_stdin: if not addon.with_stdin:
raise APIError(f"STDIN not supported the {addon.slug} add-on") raise APIError(f"STDIN not supported the {addon.slug} add-on")

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Audio RESTful API.""" """Init file for Supervisor Audio RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
from dataclasses import asdict from dataclasses import asdict
@@ -36,7 +35,8 @@ from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..host.sound import StreamType from ..host.sound import StreamType
from ..validate import version_tag from ..validate import version_tag
from .utils import api_process, api_validate from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -111,6 +111,11 @@ class APIAudio(CoreSysAttributes):
raise APIError(f"Version {version} is already in use") raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.audio.update(version)) await asyncio.shield(self.sys_plugins.audio.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Audio Docker logs."""
return self.sys_plugins.audio.logs()
@api_process @api_process
def restart(self, request: web.Request) -> Awaitable[None]: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Audio plugin.""" """Restart Audio plugin."""

View File

@@ -1,8 +1,6 @@
"""Init file for Supervisor auth/SSO RESTful API.""" """Init file for Supervisor auth/SSO RESTful API."""
import asyncio import asyncio
import logging import logging
from typing import Any
from aiohttp import BasicAuth, web from aiohttp import BasicAuth, web
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
@@ -10,19 +8,11 @@ from aiohttp.web_exceptions import HTTPUnauthorized
import voluptuous as vol import voluptuous as vol
from ..addons.addon import Addon from ..addons.addon import Addon
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden from ..exceptions import APIForbidden
from ..utils.json import json_loads from ..utils.json import json_loads
from .const import ( from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
ATTR_GROUP_IDS,
ATTR_IS_ACTIVE,
ATTR_IS_OWNER,
ATTR_LOCAL_ONLY,
ATTR_USERS,
CONTENT_TYPE_JSON,
CONTENT_TYPE_URL,
)
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -100,21 +90,3 @@ class APIAuth(CoreSysAttributes):
async def cache(self, request: web.Request) -> None: async def cache(self, request: web.Request) -> None:
"""Process cache reset request.""" """Process cache reset request."""
self.sys_auth.reset_data() self.sys_auth.reset_data()
@api_process
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
"""List users on the Home Assistant instance."""
return {
ATTR_USERS: [
{
ATTR_USERNAME: user[ATTR_USERNAME],
ATTR_NAME: user[ATTR_NAME],
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
}
for user in await self.sys_auth.list_users()
if user[ATTR_USERNAME]
]
}

View File

@@ -1,9 +1,5 @@
"""Backups RESTful API.""" """Backups RESTful API."""
from __future__ import annotations
import asyncio import asyncio
from collections.abc import Callable
import errno import errno
import logging import logging
from pathlib import Path from pathlib import Path
@@ -14,10 +10,7 @@ from typing import Any
from aiohttp import web from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..backups.backup import Backup
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
from ..const import ( from ..const import (
ATTR_ADDONS, ATTR_ADDONS,
@@ -26,16 +19,12 @@ from ..const import (
ATTR_CONTENT, ATTR_CONTENT,
ATTR_DATE, ATTR_DATE,
ATTR_DAYS_UNTIL_STALE, ATTR_DAYS_UNTIL_STALE,
ATTR_EXTRA,
ATTR_FILENAME,
ATTR_FOLDERS, ATTR_FOLDERS,
ATTR_HOMEASSISTANT, ATTR_HOMEASSISTANT,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE, ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
ATTR_JOB_ID, ATTR_LOCATON,
ATTR_LOCATION,
ATTR_NAME, ATTR_NAME,
ATTR_PASSWORD, ATTR_PASSWORD,
ATTR_PATH,
ATTR_PROTECTED, ATTR_PROTECTED,
ATTR_REPOSITORIES, ATTR_REPOSITORIES,
ATTR_SIZE, ATTR_SIZE,
@@ -44,100 +33,63 @@ from ..const import (
ATTR_TIMEOUT, ATTR_TIMEOUT,
ATTR_TYPE, ATTR_TYPE,
ATTR_VERSION, ATTR_VERSION,
REQUEST_FROM,
BusEvent,
CoreState,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden, APINotFound from ..exceptions import APIError
from ..jobs import JobSchedulerOptions
from ..mounts.const import MountUsage from ..mounts.const import MountUsage
from ..resolution.const import UnhealthyReason from ..resolution.const import UnhealthyReason
from .const import ( from .const import CONTENT_TYPE_TAR
ATTR_ADDITIONAL_LOCATIONS,
ATTR_BACKGROUND,
ATTR_LOCATION_ATTRIBUTES,
ATTR_LOCATIONS,
ATTR_SIZE_BYTES,
CONTENT_TYPE_TAR,
)
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
ALL_ADDONS_FLAG = "ALL"
LOCATION_LOCAL = ".local"
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+") RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
# Backwards compatible # Backwards compatible
# Remove: 2022.08 # Remove: 2022.08
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT] _ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
def _ensure_list(item: Any) -> list:
"""Ensure value is a list."""
if not isinstance(item, list):
return [item]
return item
def _convert_local_location(item: str | None) -> str | None:
"""Convert local location value."""
if item in {LOCATION_LOCAL, ""}:
return None
return item
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()) SCHEMA_RESTORE_PARTIAL = vol.Schema(
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
SCHEMA_RESTORE_FULL = vol.Schema(
{ {
vol.Optional(ATTR_PASSWORD): vol.Maybe(str), vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION, vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
} }
) )
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend( SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
{
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
}
)
SCHEMA_BACKUP_FULL = vol.Schema( SCHEMA_BACKUP_FULL = vol.Schema(
{ {
vol.Optional(ATTR_NAME): str, vol.Optional(ATTR_NAME): str,
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
vol.Optional(ATTR_PASSWORD): vol.Maybe(str), vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()), vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST, vol.Optional(ATTR_LOCATON): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
vol.Optional(ATTR_EXTRA): dict,
} }
) )
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend( SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{ {
vol.Optional(ATTR_ADDONS): vol.Or( vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
ALL_ADDONS_FLAG, vol.All([str], vol.Unique()) vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
} }
) )
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale}) SCHEMA_OPTIONS = vol.Schema(
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))}) {
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST}) vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
}
)
SCHEMA_FREEZE = vol.Schema(
{
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
}
)
class APIBackups(CoreSysAttributes): class APIBackups(CoreSysAttributes):
@@ -147,19 +99,9 @@ class APIBackups(CoreSysAttributes):
"""Return backup, throw an exception if it doesn't exist.""" """Return backup, throw an exception if it doesn't exist."""
backup = self.sys_backups.get(request.match_info.get("slug")) backup = self.sys_backups.get(request.match_info.get("slug"))
if not backup: if not backup:
raise APINotFound("Backup does not exist") raise APIError("Backup does not exist")
return backup return backup
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
"""Make location attributes dictionary."""
return {
loc if loc else LOCATION_LOCAL: {
ATTR_PROTECTED: backup.all_locations[loc][ATTR_PROTECTED],
ATTR_SIZE_BYTES: backup.location_size(loc),
}
for loc in backup.locations
}
def _list_backups(self): def _list_backups(self):
"""Return list of backups.""" """Return list of backups."""
return [ return [
@@ -169,11 +111,8 @@ class APIBackups(CoreSysAttributes):
ATTR_DATE: backup.date, ATTR_DATE: backup.date,
ATTR_TYPE: backup.sys_type, ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size, ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes, ATTR_LOCATON: backup.location,
ATTR_LOCATION: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_PROTECTED: backup.protected, ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_COMPRESSED: backup.compressed, ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: { ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None, ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
@@ -182,7 +121,6 @@ class APIBackups(CoreSysAttributes):
}, },
} }
for backup in self.sys_backups.list_backups for backup in self.sys_backups.list_backups
if backup.location != LOCATION_CLOUD_BACKUP
] ]
@api_process @api_process
@@ -242,283 +180,104 @@ class APIBackups(CoreSysAttributes):
ATTR_NAME: backup.name, ATTR_NAME: backup.name,
ATTR_DATE: backup.date, ATTR_DATE: backup.date,
ATTR_SIZE: backup.size, ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes,
ATTR_COMPRESSED: backup.compressed, ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected, ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_SUPERVISOR_VERSION: backup.supervisor_version, ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
ATTR_HOMEASSISTANT: backup.homeassistant_version, ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_LOCATION: backup.location, ATTR_LOCATON: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_ADDONS: data_addons, ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: backup.repositories, ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders, ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database, ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
ATTR_EXTRA: backup.extra,
} }
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE: def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Convert a single location to a mount if possible.""" """Change location field to mount if necessary."""
if not location or location == LOCATION_CLOUD_BACKUP: if not body.get(ATTR_LOCATON):
return location return body
mount = self.sys_mounts.get(location) body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
if mount.usage != MountUsage.BACKUP: if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
raise APIError( raise APIError(
f"Mount {mount.name} is not used for backups, cannot backup to there" f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
) )
return mount
def _location_field_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Change location field to mount if necessary."""
body[ATTR_LOCATION] = self._location_to_mount(body.get(ATTR_LOCATION))
return body return body
def _validate_cloud_backup_location(
self, request: web.Request, location: list[str | None] | str | None
) -> None:
"""Cloud backup location is only available to Home Assistant."""
if not isinstance(location, list):
location = [location]
if (
LOCATION_CLOUD_BACKUP in location
and request.get(REQUEST_FROM) != self.sys_homeassistant
):
raise APIForbidden(
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
)
async def _background_backup_task(
self, backup_method: Callable, *args, **kwargs
) -> tuple[asyncio.Task, str]:
"""Start backup task in background and return task and job ID."""
event = asyncio.Event()
job, backup_task = self.sys_jobs.schedule_job(
backup_method, JobSchedulerOptions(), *args, **kwargs
)
async def release_on_freeze(new_state: CoreState):
if new_state == CoreState.FREEZE:
event.set()
# Wait for system to get into freeze state before returning
# If the backup fails validation it will raise before getting there
listener = self.sys_bus.register_event(
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
)
try:
event_task = self.sys_create_task(event.wait())
_, pending = await asyncio.wait(
(
backup_task,
event_task,
),
return_when=asyncio.FIRST_COMPLETED,
)
# It seems backup returned early (error or something), make sure to cancel
# the event task to avoid "Task was destroyed but it is pending!" errors.
if event_task in pending:
event_task.cancel()
return (backup_task, job.uuid)
finally:
self.sys_bus.remove_listener(listener)
@api_process @api_process
async def backup_full(self, request: web.Request): async def backup_full(self, request):
"""Create full backup.""" """Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request) body = await api_validate(SCHEMA_BACKUP_FULL, request)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body: backup = await asyncio.shield(
location_names: list[str | None] = body.pop(ATTR_LOCATION) self.sys_backups.do_backup_full(**self._location_to_mount(body))
self._validate_cloud_backup_location(request, location_names)
locations = [
self._location_to_mount(location) for location in location_names
]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_full, **body
) )
if background and not backup_task.done():
return {ATTR_JOB_ID: job_id}
backup: Backup = await backup_task
if backup: if backup:
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug} return {ATTR_SLUG: backup.slug}
raise APIError( return False
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process @api_process
async def backup_partial(self, request: web.Request): async def backup_partial(self, request):
"""Create a partial backup.""" """Create a partial backup."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request) body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
locations: list[LOCATION_TYPE] | None = None backup = await asyncio.shield(
self.sys_backups.do_backup_partial(**self._location_to_mount(body))
if ATTR_LOCATION in body:
location_names: list[str | None] = body.pop(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
locations = [
self._location_to_mount(location) for location in location_names
]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG:
body[ATTR_ADDONS] = list(self.sys_addons.local)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_partial, **body
) )
if background and not backup_task.done():
return {ATTR_JOB_ID: job_id}
backup: Backup = await backup_task
if backup: if backup:
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug} return {ATTR_SLUG: backup.slug}
raise APIError( return False
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process @api_process
async def restore_full(self, request: web.Request): async def restore_full(self, request):
"""Full restore of a backup.""" """Full restore of a backup."""
backup = self._extract_slug(request) backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_FULL, request) body = await api_validate(SCHEMA_RESTORE_FULL, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_full, backup, **body
)
if background and not restore_task.done() or await restore_task: return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process @api_process
async def restore_partial(self, request: web.Request): async def restore_partial(self, request):
"""Partial restore a backup.""" """Partial restore a backup."""
backup = self._extract_slug(request) backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_partial, backup, **body
)
if background and not restore_task.done() or await restore_task: return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process @api_process
async def freeze(self, request: web.Request): async def freeze(self, request):
"""Initiate manual freeze for external backup.""" """Initiate manual freeze for external backup."""
body = await api_validate(SCHEMA_FREEZE, request) body = await api_validate(SCHEMA_FREEZE, request)
await asyncio.shield(self.sys_backups.freeze_all(**body)) await asyncio.shield(self.sys_backups.freeze_all(**body))
@api_process @api_process
async def thaw(self, request: web.Request): async def thaw(self, request):
"""Begin thaw after manual freeze.""" """Begin thaw after manual freeze."""
await self.sys_backups.thaw_all() await self.sys_backups.thaw_all()
@api_process @api_process
async def remove(self, request: web.Request): async def remove(self, request):
"""Remove a backup.""" """Remove a backup."""
backup = self._extract_slug(request) backup = self._extract_slug(request)
body = await api_validate(SCHEMA_REMOVE, request) return self.sys_backups.remove(backup)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body: async def download(self, request):
self._validate_cloud_backup_location(request, body[ATTR_LOCATION])
locations = [self._location_to_mount(name) for name in body[ATTR_LOCATION]]
else:
self._validate_cloud_backup_location(request, backup.location)
self.sys_backups.remove(backup, locations=locations)
@api_process
async def download(self, request: web.Request):
"""Download a backup file.""" """Download a backup file."""
backup = self._extract_slug(request) backup = self._extract_slug(request)
# Query will give us '' for /backups, convert value to None
location = _convert_local_location(
request.query.get(ATTR_LOCATION, backup.location)
)
self._validate_cloud_backup_location(request, location)
if location not in backup.all_locations:
raise APIError(f"Backup {backup.slug} is not in location {location}")
_LOGGER.info("Downloading backup %s", backup.slug) _LOGGER.info("Downloading backup %s", backup.slug)
filename = backup.all_locations[location][ATTR_PATH] response = web.FileResponse(backup.tarfile)
# If the file is missing, return 404 and trigger reload of location
if not filename.is_file():
self.sys_create_task(self.sys_backups.reload(location))
return web.Response(status=404)
response = web.FileResponse(filename)
response.content_type = CONTENT_TYPE_TAR response.content_type = CONTENT_TYPE_TAR
response.headers[
download_filename = filename.name CONTENT_DISPOSITION
if download_filename == f"{backup.slug}.tar": ] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
response.headers[CONTENT_DISPOSITION] = (
f"attachment; filename={download_filename}"
)
return response return response
@api_process @api_process
async def upload(self, request: web.Request): async def upload(self, request):
"""Upload a backup file.""" """Upload a backup file."""
location: LOCATION_TYPE = None with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
locations: list[LOCATION_TYPE] | None = None
tmp_path = self.sys_config.path_tmp
if ATTR_LOCATION in request.query:
location_names: list[str] = request.query.getall(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
# Convert empty string to None if necessary
locations = [
self._location_to_mount(location)
if _convert_local_location(location)
else None
for location in location_names
]
location = locations.pop(0)
if location and location != LOCATION_CLOUD_BACKUP:
tmp_path = location.local_where
filename: str | None = None
if ATTR_FILENAME in request.query:
filename = request.query.get(ATTR_FILENAME)
try:
vol.Match(RE_BACKUP_FILENAME)(filename)
except vol.Invalid as ex:
raise APIError(humanize_error(filename, ex)) from None
with TemporaryDirectory(dir=tmp_path.as_posix()) as temp_dir:
tar_file = Path(temp_dir, "backup.tar") tar_file = Path(temp_dir, "backup.tar")
reader = await request.multipart() reader = await request.multipart()
contents = await reader.next() contents = await reader.next()
@@ -531,10 +290,7 @@ class APIBackups(CoreSysAttributes):
backup.write(chunk) backup.write(chunk)
except OSError as err: except OSError as err:
if err.errno == errno.EBADMSG and location in { if err.errno == errno.EBADMSG:
LOCATION_CLOUD_BACKUP,
None,
}:
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
_LOGGER.error("Can't write new backup file: %s", err) _LOGGER.error("Can't write new backup file: %s", err)
return False return False
@@ -542,14 +298,7 @@ class APIBackups(CoreSysAttributes):
except asyncio.CancelledError: except asyncio.CancelledError:
return False return False
backup = await asyncio.shield( backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
self.sys_backups.import_backup(
tar_file,
filename,
location=location,
additional_locations=locations,
)
)
if backup: if backup:
return {ATTR_SLUG: backup.slug} return {ATTR_SLUG: backup.slug}

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HA cli RESTful API.""" """Init file for Supervisor HA cli RESTful API."""
import asyncio import asyncio
import logging import logging
from typing import Any from typing import Any

View File

@@ -1,26 +1,18 @@
"""Const for API.""" """Const for API."""
from enum import StrEnum
CONTENT_TYPE_BINARY = "application/octet-stream" CONTENT_TYPE_BINARY = "application/octet-stream"
CONTENT_TYPE_JSON = "application/json" CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_PNG = "image/png" CONTENT_TYPE_PNG = "image/png"
CONTENT_TYPE_TAR = "application/tar" CONTENT_TYPE_TAR = "application/tar"
CONTENT_TYPE_TEXT = "text/plain" CONTENT_TYPE_TEXT = "text/plain"
CONTENT_TYPE_URL = "application/x-www-form-urlencoded" CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
CONTENT_TYPE_X_LOG = "text/x-log"
COOKIE_INGRESS = "ingress_session" COOKIE_INGRESS = "ingress_session"
ATTR_ADDITIONAL_LOCATIONS = "additional_locations"
ATTR_AGENT_VERSION = "agent_version" ATTR_AGENT_VERSION = "agent_version"
ATTR_APPARMOR_VERSION = "apparmor_version" ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes" ATTR_ATTRIBUTES = "attributes"
ATTR_AVAILABLE_UPDATES = "available_updates" ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BACKGROUND = "background"
ATTR_BOOT_CONFIG = "boot_config"
ATTR_BOOT_SLOT = "boot_slot"
ATTR_BOOT_SLOTS = "boot_slots"
ATTR_BOOT_TIMESTAMP = "boot_timestamp" ATTR_BOOT_TIMESTAMP = "boot_timestamp"
ATTR_BOOTS = "boots" ATTR_BOOTS = "boots"
ATTR_BROADCAST_LLMNR = "broadcast_llmnr" ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
@@ -38,46 +30,25 @@ ATTR_DT_UTC = "dt_utc"
ATTR_EJECTABLE = "ejectable" ATTR_EJECTABLE = "ejectable"
ATTR_FALLBACK = "fallback" ATTR_FALLBACK = "fallback"
ATTR_FILESYSTEMS = "filesystems" ATTR_FILESYSTEMS = "filesystems"
ATTR_FORCE = "force"
ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers" ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active"
ATTR_IS_OWNER = "is_owner"
ATTR_JOBS = "jobs" ATTR_JOBS = "jobs"
ATTR_LLMNR = "llmnr" ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname" ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_LOCAL_ONLY = "local_only"
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
ATTR_LOCATIONS = "locations"
ATTR_MDNS = "mdns" ATTR_MDNS = "mdns"
ATTR_MODEL = "model" ATTR_MODEL = "model"
ATTR_MOUNTS = "mounts" ATTR_MOUNTS = "mounts"
ATTR_MOUNT_POINTS = "mount_points" ATTR_MOUNT_POINTS = "mount_points"
ATTR_PANEL_PATH = "panel_path" ATTR_PANEL_PATH = "panel_path"
ATTR_REMOVABLE = "removable" ATTR_REMOVABLE = "removable"
ATTR_REMOVE_CONFIG = "remove_config"
ATTR_REVISION = "revision" ATTR_REVISION = "revision"
ATTR_SAFE_MODE = "safe_mode"
ATTR_SEAT = "seat" ATTR_SEAT = "seat"
ATTR_SIGNED = "signed" ATTR_SIGNED = "signed"
ATTR_SIZE_BYTES = "size_bytes"
ATTR_STARTUP_TIME = "startup_time" ATTR_STARTUP_TIME = "startup_time"
ATTR_STATUS = "status"
ATTR_SUBSYSTEM = "subsystem" ATTR_SUBSYSTEM = "subsystem"
ATTR_SYSFS = "sysfs" ATTR_SYSFS = "sysfs"
ATTR_SYSTEM_HEALTH_LED = "system_health_led" ATTR_SYSTEM_HEALTH_LED = "system_health_led"
ATTR_TIME_DETECTED = "time_detected" ATTR_TIME_DETECTED = "time_detected"
ATTR_UPDATE_TYPE = "update_type" ATTR_UPDATE_TYPE = "update_type"
ATTR_USAGE = "usage"
ATTR_USE_NTP = "use_ntp" ATTR_USE_NTP = "use_ntp"
ATTR_USERS = "users" ATTR_USAGE = "usage"
ATTR_USER_PATH = "user_path"
ATTR_VENDOR = "vendor" ATTR_VENDOR = "vendor"
ATTR_VIRTUALIZATION = "virtualization"
class BootSlot(StrEnum):
"""Boot slots used by HAOS."""
A = "A"
B = "B"

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor network RESTful API.""" """Init file for Supervisor network RESTful API."""
import logging import logging
import voluptuous as vol import voluptuous as vol
@@ -16,7 +15,8 @@ from ..const import (
AddonState, AddonState,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden, APINotFound from ..discovery.validate import valid_discovery_service
from ..exceptions import APIError, APIForbidden
from .utils import api_process, api_validate, require_home_assistant from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -24,7 +24,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_DISCOVERY = vol.Schema( SCHEMA_DISCOVERY = vol.Schema(
{ {
vol.Required(ATTR_SERVICE): str, vol.Required(ATTR_SERVICE): str,
vol.Required(ATTR_CONFIG): dict, vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
} }
) )
@@ -36,7 +36,7 @@ class APIDiscovery(CoreSysAttributes):
"""Extract discovery message from URL.""" """Extract discovery message from URL."""
message = self.sys_discovery.get(request.match_info.get("uuid")) message = self.sys_discovery.get(request.match_info.get("uuid"))
if not message: if not message:
raise APINotFound("Discovery message not found") raise APIError("Discovery message not found")
return message return message
@api_process @api_process
@@ -71,6 +71,15 @@ class APIDiscovery(CoreSysAttributes):
addon: Addon = request[REQUEST_FROM] addon: Addon = request[REQUEST_FROM]
service = body[ATTR_SERVICE] service = body[ATTR_SERVICE]
try:
valid_discovery_service(service)
except vol.Invalid:
_LOGGER.warning(
"Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on",
service,
addon.name,
)
# Access? # Access?
if body[ATTR_SERVICE] not in addon.discovery: if body[ATTR_SERVICE] not in addon.discovery:
_LOGGER.error( _LOGGER.error(

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor DNS RESTful API.""" """Init file for Supervisor DNS RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
@@ -27,8 +26,8 @@ from ..const import (
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import dns_server_list, version_tag from ..validate import dns_server_list, version_tag
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
from .utils import api_process, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -106,6 +105,11 @@ class APICoreDNS(CoreSysAttributes):
raise APIError(f"Version {version} is already in use") raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.dns.update(version)) await asyncio.shield(self.sys_plugins.dns.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return DNS Docker logs."""
return self.sys_plugins.dns.logs()
@api_process @api_process
def restart(self, request: web.Request) -> Awaitable[None]: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart CoreDNS plugin.""" """Restart CoreDNS plugin."""

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API.""" """Init file for Supervisor Home Assistant RESTful API."""
import logging import logging
from typing import Any from typing import Any
@@ -16,7 +15,6 @@ from ..const import (
ATTR_VERSION, ATTR_VERSION,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APINotFound
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -59,9 +57,6 @@ class APIDocker(CoreSysAttributes):
async def remove_registry(self, request: web.Request): async def remove_registry(self, request: web.Request):
"""Delete a docker registry.""" """Delete a docker registry."""
hostname = request.match_info.get(ATTR_HOSTNAME) hostname = request.match_info.get(ATTR_HOSTNAME)
if hostname not in self.sys_docker.config.registries:
raise APINotFound(f"Hostname {hostname} does not exist in registries")
del self.sys_docker.config.registries[hostname] del self.sys_docker.config.registries[hostname]
self.sys_docker.config.save_data() self.sys_docker.config.save_data()

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor hardware RESTful API.""" """Init file for Supervisor hardware RESTful API."""
import logging import logging
from typing import Any from typing import Any
@@ -17,7 +16,7 @@ from ..const import (
ATTR_SYSTEM, ATTR_SYSTEM,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..dbus.udisks2 import UDisks2Manager from ..dbus.udisks2 import UDisks2
from ..dbus.udisks2.block import UDisks2Block from ..dbus.udisks2.block import UDisks2Block
from ..dbus.udisks2.drive import UDisks2Drive from ..dbus.udisks2.drive import UDisks2Drive
from ..hardware.data import Device from ..hardware.data import Device
@@ -73,7 +72,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
} }
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]: def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
"""Return a dict with information of a disk to be used in the API.""" """Return a dict with information of a disk to be used in the API."""
return { return {
ATTR_VENDOR: drive.vendor, ATTR_VENDOR: drive.vendor,

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API.""" """Init file for Supervisor Home Assistant RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
@@ -35,10 +34,10 @@ from ..const import (
ATTR_WATCHDOG, ATTR_WATCHDOG,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError from ..exceptions import APIError
from ..validate import docker_image, network_port, version_tag from ..validate import docker_image, network_port, version_tag
from .const import ATTR_FORCE, ATTR_SAFE_MODE from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -64,34 +63,10 @@ SCHEMA_UPDATE = vol.Schema(
} }
) )
SCHEMA_RESTART = vol.Schema(
{
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
SCHEMA_STOP = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
class APIHomeAssistant(CoreSysAttributes): class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions.""" """Handle RESTful API for Home Assistant functions."""
async def _check_offline_migration(self, force: bool = False) -> None:
"""Check and raise if there's an offline DB migration in progress."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Offline database migration in progress, try again after it has completed"
)
@api_process @api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return host information.""" """Return host information."""
@@ -119,9 +94,6 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_IMAGE in body: if ATTR_IMAGE in body:
self.sys_homeassistant.image = body[ATTR_IMAGE] self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.override_image = (
self.sys_homeassistant.image != self.sys_homeassistant.default_image
)
if ATTR_BOOT in body: if ATTR_BOOT in body:
self.sys_homeassistant.boot = body[ATTR_BOOT] self.sys_homeassistant.boot = body[ATTR_BOOT]
@@ -173,7 +145,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def update(self, request: web.Request) -> None: async def update(self, request: web.Request) -> None:
"""Update Home Assistant.""" """Update Home Assistant."""
body = await api_validate(SCHEMA_UPDATE, request) body = await api_validate(SCHEMA_UPDATE, request)
await self._check_offline_migration()
await asyncio.shield( await asyncio.shield(
self.sys_homeassistant.core.update( self.sys_homeassistant.core.update(
@@ -183,12 +154,9 @@ class APIHomeAssistant(CoreSysAttributes):
) )
@api_process @api_process
async def stop(self, request: web.Request) -> Awaitable[None]: def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop Home Assistant.""" """Stop Home Assistant."""
body = await api_validate(SCHEMA_STOP, request) return asyncio.shield(self.sys_homeassistant.core.stop())
await self._check_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_homeassistant.core.stop())
@api_process @api_process
def start(self, request: web.Request) -> Awaitable[None]: def start(self, request: web.Request) -> Awaitable[None]:
@@ -196,24 +164,19 @@ class APIHomeAssistant(CoreSysAttributes):
return asyncio.shield(self.sys_homeassistant.core.start()) return asyncio.shield(self.sys_homeassistant.core.start())
@api_process @api_process
async def restart(self, request: web.Request) -> None: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Home Assistant.""" """Restart Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request) return asyncio.shield(self.sys_homeassistant.core.restart())
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield(
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
)
@api_process @api_process
async def rebuild(self, request: web.Request) -> None: def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild Home Assistant.""" """Rebuild Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request) return asyncio.shield(self.sys_homeassistant.core.rebuild())
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield( @api_process_raw(CONTENT_TYPE_BINARY)
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE]) def logs(self, request: web.Request) -> Awaitable[bytes]:
) """Return Home Assistant Docker logs."""
return self.sys_homeassistant.core.logs()
@api_process @api_process
async def check(self, request: web.Request) -> None: async def check(self, request: web.Request) -> None:

View File

@@ -1,10 +1,9 @@
"""Init file for Supervisor host RESTful API.""" """Init file for Supervisor host RESTful API."""
import asyncio import asyncio
from contextlib import suppress from contextlib import suppress
import logging import logging
from aiohttp import ClientConnectionResetError, web from aiohttp import web
from aiohttp.hdrs import ACCEPT, RANGE from aiohttp.hdrs import ACCEPT, RANGE
import voluptuous as vol import voluptuous as vol
from voluptuous.error import CoerceInvalid from voluptuous.error import CoerceInvalid
@@ -28,15 +27,8 @@ from ..const import (
ATTR_TIMEZONE, ATTR_TIMEZONE,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError from ..exceptions import APIError, HostLogError
from ..host.const import ( from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER
PARAM_BOOT_ID,
PARAM_FOLLOW,
PARAM_SYSLOG_IDENTIFIER,
LogFormat,
LogFormatter,
)
from ..utils.systemd_journal import journal_logs_reader
from .const import ( from .const import (
ATTR_AGENT_VERSION, ATTR_AGENT_VERSION,
ATTR_APPARMOR_VERSION, ATTR_APPARMOR_VERSION,
@@ -46,48 +38,26 @@ from .const import (
ATTR_BROADCAST_MDNS, ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED, ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC, ATTR_DT_UTC,
ATTR_FORCE,
ATTR_IDENTIFIERS, ATTR_IDENTIFIERS,
ATTR_LLMNR_HOSTNAME, ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME, ATTR_STARTUP_TIME,
ATTR_USE_NTP, ATTR_USE_NTP,
ATTR_VIRTUALIZATION,
CONTENT_TYPE_TEXT, CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
) )
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
IDENTIFIER = "identifier" IDENTIFIER = "identifier"
BOOTID = "bootid" BOOTID = "bootid"
DEFAULT_LINES = 100 DEFAULT_RANGE = 100
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str}) SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
# pylint: disable=no-value-for-parameter
SCHEMA_SHUTDOWN = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
class APIHost(CoreSysAttributes): class APIHost(CoreSysAttributes):
"""Handle RESTful API for host functions.""" """Handle RESTful API for host functions."""
async def _check_ha_offline_migration(self, force: bool) -> None:
"""Check if HA has an offline migration in progress and raise if not forced."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
)
@api_process @api_process
async def info(self, request): async def info(self, request):
"""Return host information.""" """Return host information."""
@@ -95,7 +65,6 @@ class APIHost(CoreSysAttributes):
ATTR_AGENT_VERSION: self.sys_dbus.agent.version, ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version, ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
ATTR_CHASSIS: self.sys_host.info.chassis, ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
ATTR_CPE: self.sys_host.info.cpe, ATTR_CPE: self.sys_host.info.cpe,
ATTR_DEPLOYMENT: self.sys_host.info.deployment, ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_DISK_FREE: self.sys_host.info.free_space, ATTR_DISK_FREE: self.sys_host.info.free_space,
@@ -129,20 +98,14 @@ class APIHost(CoreSysAttributes):
) )
@api_process @api_process
async def reboot(self, request): def reboot(self, request):
"""Reboot host.""" """Reboot host."""
body = await api_validate(SCHEMA_SHUTDOWN, request) return asyncio.shield(self.sys_host.control.reboot())
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.reboot())
@api_process @api_process
async def shutdown(self, request): def shutdown(self, request):
"""Poweroff host.""" """Poweroff host."""
body = await api_validate(SCHEMA_SHUTDOWN, request) return asyncio.shield(self.sys_host.control.shutdown())
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.shutdown())
@api_process @api_process
def reload(self, request): def reload(self, request):
@@ -190,11 +153,11 @@ class APIHost(CoreSysAttributes):
raise APIError() from err raise APIError() from err
return possible_offset return possible_offset
async def advanced_logs_handler( @api_process
async def advanced_logs(
self, request: web.Request, identifier: str | None = None, follow: bool = False self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse: ) -> web.StreamResponse:
"""Return systemd-journald logs.""" """Return systemd-journald logs."""
log_formatter = LogFormatter.PLAIN
params = {} params = {}
if identifier: if identifier:
params[PARAM_SYSLOG_IDENTIFIER] = identifier params[PARAM_SYSLOG_IDENTIFIER] = identifier
@@ -202,8 +165,6 @@ class APIHost(CoreSysAttributes):
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER) params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
else: else:
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
# host logs should be always verbose, no matter what Accept header is used
log_formatter = LogFormatter.VERBOSE
if BOOTID in request.match_info: if BOOTID in request.match_info:
params[PARAM_BOOT_ID] = await self._get_boot_id( params[PARAM_BOOT_ID] = await self._get_boot_id(
@@ -214,66 +175,28 @@ class APIHost(CoreSysAttributes):
if ACCEPT in request.headers and request.headers[ACCEPT] not in [ if ACCEPT in request.headers and request.headers[ACCEPT] not in [
CONTENT_TYPE_TEXT, CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
"*/*", "*/*",
]: ]:
raise APIError( raise APIError(
"Invalid content type requested. Only text/plain and text/x-log " "Invalid content type requested. Only text/plain supported for now."
"supported for now."
) )
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG: if RANGE in request.headers:
log_formatter = LogFormatter.VERBOSE
if "lines" in request.query:
lines = request.query.get("lines", DEFAULT_LINES)
try:
lines = int(lines)
except ValueError:
# If the user passed a non-integer value, just use the default instead of error.
lines = DEFAULT_LINES
finally:
# We can't use the entries= Range header syntax to refer to the last 1 line,
# and passing 1 to the calculation below would return the 1st line of the logs
# instead. Since this is really an edge case that doesn't matter much, we'll just
# return 2 lines at minimum.
lines = max(2, lines)
# entries=cursor[[:num_skip]:num_entries]
range_header = f"entries=:-{lines - 1}:{'' if follow else lines}"
elif RANGE in request.headers:
range_header = request.headers.get(RANGE) range_header = request.headers.get(RANGE)
else: else:
range_header = ( range_header = f"entries=:-{DEFAULT_RANGE}:"
f"entries=:-{DEFAULT_LINES - 1}:{'' if follow else DEFAULT_LINES}"
)
async with self.sys_host.logs.journald_logs( async with self.sys_host.logs.journald_logs(
params=params, range_header=range_header, accept=LogFormat.JOURNAL params=params, range_header=range_header
) as resp: ) as resp:
try: try:
response = web.StreamResponse() response = web.StreamResponse()
response.content_type = CONTENT_TYPE_TEXT response.content_type = CONTENT_TYPE_TEXT
headers_returned = False await response.prepare(request)
async for cursor, line in journal_logs_reader(resp, log_formatter): async for data in resp.content:
if not headers_returned: await response.write(data)
if cursor:
response.headers["X-First-Cursor"] = cursor
response.headers["X-Accel-Buffering"] = "no"
await response.prepare(request)
headers_returned = True
# When client closes the connection while reading busy logs, we
# sometimes get this exception. It should be safe to ignore it.
with suppress(ClientConnectionResetError):
await response.write(line.encode("utf-8") + b"\n")
except ConnectionResetError as ex: except ConnectionResetError as ex:
raise APIError( raise APIError(
"Connection reset when trying to fetch data from systemd-journald." "Connection reset when trying to fetch data from systemd-journald."
) from ex ) from ex
return response return response
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def advanced_logs(
self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse:
"""Return systemd-journald logs. Wrapped as standard API handler."""
return await self.advanced_logs_handler(request, identifier, follow)

View File

@@ -1,5 +1,4 @@
"""Supervisor Add-on ingress service.""" """Supervisor Add-on ingress service."""
import asyncio import asyncio
from ipaddress import ip_address from ipaddress import ip_address
import logging import logging
@@ -277,7 +276,6 @@ class APIIngress(CoreSysAttributes):
response.content_type = content_type response.content_type = content_type
try: try:
response.headers["X-Accel-Buffering"] = "no"
await response.prepare(request) await response.prepare(request)
async for data in result.content.iter_chunked(4096): async for data in result.content.iter_chunked(4096):
await response.write(data) await response.write(data)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Jobs RESTful API.""" """Init file for Supervisor Jobs RESTful API."""
import logging import logging
from typing import Any from typing import Any
@@ -7,7 +6,6 @@ from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound, JobNotFound
from ..jobs import SupervisorJob from ..jobs import SupervisorJob
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
from .const import ATTR_JOBS from .const import ATTR_JOBS
@@ -23,24 +21,10 @@ SCHEMA_OPTIONS = vol.Schema(
class APIJobs(CoreSysAttributes): class APIJobs(CoreSysAttributes):
"""Handle RESTful API for OS functions.""" """Handle RESTful API for OS functions."""
def _extract_job(self, request: web.Request) -> SupervisorJob: def _list_jobs(self) -> list[dict[str, Any]]:
"""Extract job from request or raise.""" """Return current job tree."""
try:
return self.sys_jobs.get_job(request.match_info.get("uuid"))
except JobNotFound:
raise APINotFound("Job does not exist") from None
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
"""Return current job tree.
Jobs are added to cache as they are created so by default they are in oldest to newest.
This is correct ordering for child jobs as it makes logical sense to present those in
the order they occurred within the parent. For the list as a whole, sort from newest
to oldest as its likely any client is most interested in the newer ones.
"""
# Initially sort oldest to newest so all child lists end up in correct order
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {} jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
for job in sorted(self.sys_jobs.jobs): for job in self.sys_jobs.jobs:
if job.internal: if job.internal:
continue continue
@@ -49,16 +33,10 @@ class APIJobs(CoreSysAttributes):
else: else:
jobs_by_parent[job.parent_id].append(job) jobs_by_parent[job.parent_id].append(job)
# After parent-child organization, sort the root jobs only from newest to oldest
job_list: list[dict[str, Any]] = [] job_list: list[dict[str, Any]] = []
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = ( queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = [
[(job_list, start)] (job_list, job) for job in jobs_by_parent.get(None, [])
if start ]
else [
(job_list, job)
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
]
)
while queue: while queue:
(current_list, current_job) = queue.pop(0) (current_list, current_job) = queue.pop(0)
@@ -100,19 +78,3 @@ class APIJobs(CoreSysAttributes):
async def reset(self, request: web.Request) -> None: async def reset(self, request: web.Request) -> None:
"""Reset options for JobManager.""" """Reset options for JobManager."""
self.sys_jobs.reset_data() self.sys_jobs.reset_data()
@api_process
async def job_info(self, request: web.Request) -> dict[str, Any]:
"""Get details of a job by ID."""
job = self._extract_job(request)
return self._list_jobs(job)[0]
@api_process
async def remove_job(self, request: web.Request) -> None:
"""Remove a completed job."""
job = self._extract_job(request)
if not job.done:
raise APIError(f"Job {job.uuid} is not done!")
self.sys_jobs.remove_job(job)

View File

@@ -1,5 +1,4 @@
"""Handle security part of this API.""" """Handle security part of this API."""
import logging import logging
import re import re
from typing import Final from typing import Final
@@ -9,8 +8,6 @@ from aiohttp.web import Request, RequestHandler, Response, middleware
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
from awesomeversion import AwesomeVersion from awesomeversion import AwesomeVersion
from supervisor.homeassistant.const import LANDINGPAGE
from ...addons.const import RE_SLUG from ...addons.const import RE_SLUG
from ...const import ( from ...const import (
REQUEST_FROM, REQUEST_FROM,
@@ -80,13 +77,6 @@ ADDONS_API_BYPASS: Final = re.compile(
r")$" r")$"
) )
# Home Assistant only
CORE_ONLY_PATHS: Final = re.compile(
r"^(?:"
r"/addons/" + RE_SLUG + "/sys_options"
r")$"
)
# Policy role add-on API access # Policy role add-on API access
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = { ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
ROLE_DEFAULT: re.compile( ROLE_DEFAULT: re.compile(
@@ -113,8 +103,6 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?" r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
r"|/audio/.+" r"|/audio/.+"
r"|/auth/cache" r"|/auth/cache"
r"|/available_updates"
r"|/backups.*"
r"|/cli/.+" r"|/cli/.+"
r"|/core/.+" r"|/core/.+"
r"|/dns/.+" r"|/dns/.+"
@@ -124,17 +112,16 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
r"|/hassos/.+" r"|/hassos/.+"
r"|/homeassistant/.+" r"|/homeassistant/.+"
r"|/host/.+" r"|/host/.+"
r"|/mounts.*"
r"|/multicast/.+" r"|/multicast/.+"
r"|/network/.+" r"|/network/.+"
r"|/observer/.+" r"|/observer/.+"
r"|/os/(?!datadisk/wipe).+" r"|/os/.+"
r"|/refresh_updates"
r"|/resolution/.+" r"|/resolution/.+"
r"|/security/.+" r"|/backups.*"
r"|/snapshots.*" r"|/snapshots.*"
r"|/store.*" r"|/store.*"
r"|/supervisor/.+" r"|/supervisor/.+"
r"|/security/.+"
r")$" r")$"
), ),
ROLE_ADMIN: re.compile( ROLE_ADMIN: re.compile(
@@ -242,9 +229,6 @@ class SecurityMiddleware(CoreSysAttributes):
if supervisor_token == self.sys_homeassistant.supervisor_token: if supervisor_token == self.sys_homeassistant.supervisor_token:
_LOGGER.debug("%s access from Home Assistant", request.path) _LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant request_from = self.sys_homeassistant
elif CORE_ONLY_PATHS.match(request.path):
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
raise HTTPForbidden()
# Host # Host
if supervisor_token == self.sys_plugins.cli.supervisor_token: if supervisor_token == self.sys_plugins.cli.supervisor_token:
@@ -290,10 +274,8 @@ class SecurityMiddleware(CoreSysAttributes):
@middleware @middleware
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response: async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy.""" """Validate user from Core API proxy."""
if ( if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough(
request[REQUEST_FROM] != self.sys_homeassistant self.sys_homeassistant.version, _CORE_VERSION
or self.sys_homeassistant.version == LANDINGPAGE
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
): ):
return await handler(request) return await handler(request)

View File

@@ -7,11 +7,11 @@ import voluptuous as vol
from ..const import ATTR_NAME, ATTR_STATE from ..const import ATTR_NAME, ATTR_STATE
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound from ..exceptions import APIError
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
from ..mounts.mount import Mount from ..mounts.mount import Mount
from ..mounts.validate import SCHEMA_MOUNT_CONFIG from ..mounts.validate import SCHEMA_MOUNT_CONFIG
from .const import ATTR_MOUNTS, ATTR_USER_PATH from .const import ATTR_MOUNTS
from .utils import api_process, api_validate from .utils import api_process, api_validate
SCHEMA_OPTIONS = vol.Schema( SCHEMA_OPTIONS = vol.Schema(
@@ -24,13 +24,6 @@ SCHEMA_OPTIONS = vol.Schema(
class APIMounts(CoreSysAttributes): class APIMounts(CoreSysAttributes):
"""Handle REST API for mounting options.""" """Handle REST API for mounting options."""
def _extract_mount(self, request: web.Request) -> Mount:
"""Extract mount from request or raise."""
name = request.match_info.get("mount")
if name not in self.sys_mounts:
raise APINotFound(f"No mount exists with name {name}")
return self.sys_mounts.get(name)
@api_process @api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return MountManager info.""" """Return MountManager info."""
@@ -39,13 +32,7 @@ class APIMounts(CoreSysAttributes):
if self.sys_mounts.default_backup_mount if self.sys_mounts.default_backup_mount
else None, else None,
ATTR_MOUNTS: [ ATTR_MOUNTS: [
mount.to_dict() mount.to_dict() | {ATTR_STATE: mount.state}
| {
ATTR_STATE: mount.state,
ATTR_USER_PATH: mount.container_where.as_posix()
if mount.container_where
else None,
}
for mount in self.sys_mounts.mounts for mount in self.sys_mounts.mounts
], ],
} }
@@ -92,13 +79,15 @@ class APIMounts(CoreSysAttributes):
@api_process @api_process
async def update_mount(self, request: web.Request) -> None: async def update_mount(self, request: web.Request) -> None:
"""Update an existing mount in supervisor.""" """Update an existing mount in supervisor."""
current = self._extract_mount(request) name = request.match_info.get("mount")
name_schema = vol.Schema( name_schema = vol.Schema(
{vol.Optional(ATTR_NAME, default=current.name): current.name}, {vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
extra=vol.ALLOW_EXTRA,
) )
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request) body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
if name not in self.sys_mounts:
raise APIError(f"No mount exists with name {name}")
mount = Mount.from_dict(self.coresys, body) mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount) await self.sys_mounts.create_mount(mount)
@@ -115,8 +104,8 @@ class APIMounts(CoreSysAttributes):
@api_process @api_process
async def delete_mount(self, request: web.Request) -> None: async def delete_mount(self, request: web.Request) -> None:
"""Delete an existing mount in supervisor.""" """Delete an existing mount in supervisor."""
current = self._extract_mount(request) name = request.match_info.get("mount")
mount = await self.sys_mounts.remove_mount(current.name) mount = await self.sys_mounts.remove_mount(name)
# If it was a backup mount, reload backups # If it was a backup mount, reload backups
if mount.usage == MountUsage.BACKUP: if mount.usage == MountUsage.BACKUP:
@@ -127,9 +116,9 @@ class APIMounts(CoreSysAttributes):
@api_process @api_process
async def reload_mount(self, request: web.Request) -> None: async def reload_mount(self, request: web.Request) -> None:
"""Reload an existing mount in supervisor.""" """Reload an existing mount in supervisor."""
mount = self._extract_mount(request) name = request.match_info.get("mount")
await self.sys_mounts.reload_mount(mount.name) await self.sys_mounts.reload_mount(name)
# If it's a backup mount, reload backups # If it's a backup mount, reload backups
if mount.usage == MountUsage.BACKUP: if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload()) self.sys_create_task(self.sys_backups.reload())

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Multicast RESTful API.""" """Init file for Supervisor Multicast RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
@@ -24,7 +23,8 @@ from ..const import (
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import version_tag from ..validate import version_tag
from .utils import api_process, api_validate from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -69,6 +69,11 @@ class APIMulticast(CoreSysAttributes):
raise APIError(f"Version {version} is already in use") raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.multicast.update(version)) await asyncio.shield(self.sys_plugins.multicast.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Multicast Docker logs."""
return self.sys_plugins.multicast.logs()
@api_process @api_process
def restart(self, request: web.Request) -> Awaitable[None]: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Multicast plugin.""" """Restart Multicast plugin."""

View File

@@ -1,8 +1,8 @@
"""REST API for network.""" """REST API for network."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface from dataclasses import replace
from ipaddress import ip_address, ip_interface
from typing import Any from typing import Any
from aiohttp import web from aiohttp import web
@@ -42,34 +42,24 @@ from ..const import (
DOCKER_NETWORK_MASK, DOCKER_NETWORK_MASK,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound, HostNetworkNotFound from ..exceptions import APIError, HostNetworkNotFound
from ..host.configuration import ( from ..host.configuration import (
AccessPoint, AccessPoint,
Interface, Interface,
InterfaceMethod, InterfaceMethod,
IpConfig, IpConfig,
IpSetting,
VlanConfig, VlanConfig,
WifiConfig, WifiConfig,
) )
from ..host.const import AuthMethod, InterfaceType, WifiMode from ..host.const import AuthMethod, InterfaceType, WifiMode
from .utils import api_process, api_validate from .utils import api_process, api_validate
_SCHEMA_IPV4_CONFIG = vol.Schema( _SCHEMA_IP_CONFIG = vol.Schema(
{ {
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)], vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod), vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address), vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)], vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
}
)
_SCHEMA_IPV6_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
} }
) )
@@ -86,18 +76,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_UPDATE = vol.Schema( SCHEMA_UPDATE = vol.Schema(
{ {
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG, vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG, vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG, vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
vol.Optional(ATTR_ENABLED): vol.Boolean(), vol.Optional(ATTR_ENABLED): vol.Boolean(),
} }
) )
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]: def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
"""Return a dict with information about ip configuration.""" """Return a dict with information about ip configuration."""
return { return {
ATTR_METHOD: setting.method, ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address], ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers], ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None, ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
@@ -132,8 +122,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_CONNECTED: interface.connected, ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary, ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac, ATTR_MAC: interface.mac,
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting), ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting), ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None, ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None, ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
} }
@@ -167,7 +157,7 @@ class APINetwork(CoreSysAttributes):
except HostNetworkNotFound: except HostNetworkNotFound:
pass pass
raise APINotFound(f"Interface {name} does not exist") from None raise APIError(f"Interface {name} does not exist") from None
@api_process @api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
@@ -207,26 +197,24 @@ class APINetwork(CoreSysAttributes):
# Apply config # Apply config
for key, config in body.items(): for key, config in body.items():
if key == ATTR_IPV4: if key == ATTR_IPV4:
interface.ipv4setting = IpSetting( interface.ipv4 = replace(
config.get(ATTR_METHOD, InterfaceMethod.STATIC), interface.ipv4
config.get(ATTR_ADDRESS, []), or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
config.get(ATTR_GATEWAY), **config,
config.get(ATTR_NAMESERVERS, []),
) )
elif key == ATTR_IPV6: elif key == ATTR_IPV6:
interface.ipv6setting = IpSetting( interface.ipv6 = replace(
config.get(ATTR_METHOD, InterfaceMethod.STATIC), interface.ipv6
config.get(ATTR_ADDRESS, []), or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
config.get(ATTR_GATEWAY), **config,
config.get(ATTR_NAMESERVERS, []),
) )
elif key == ATTR_WIFI: elif key == ATTR_WIFI:
interface.wifi = WifiConfig( interface.wifi = replace(
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE), interface.wifi
config.get(ATTR_SSID, ""), or WifiConfig(
config.get(ATTR_AUTH, AuthMethod.OPEN), WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
config.get(ATTR_PSK, None), ),
None, **config,
) )
elif key == ATTR_ENABLED: elif key == ATTR_ENABLED:
interface.enabled = config interface.enabled = config
@@ -268,22 +256,24 @@ class APINetwork(CoreSysAttributes):
vlan_config = VlanConfig(vlan, interface.name) vlan_config = VlanConfig(vlan, interface.name)
ipv4_setting = None ipv4_config = None
if ATTR_IPV4 in body: if ATTR_IPV4 in body:
ipv4_setting = IpSetting( ipv4_config = IpConfig(
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO), body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV4].get(ATTR_ADDRESS, []), body[ATTR_IPV4].get(ATTR_ADDRESS, []),
body[ATTR_IPV4].get(ATTR_GATEWAY, None), body[ATTR_IPV4].get(ATTR_GATEWAY, None),
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []), body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
None,
) )
ipv6_setting = None ipv6_config = None
if ATTR_IPV6 in body: if ATTR_IPV6 in body:
ipv6_setting = IpSetting( ipv6_config = IpConfig(
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO), body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV6].get(ATTR_ADDRESS, []), body[ATTR_IPV6].get(ATTR_ADDRESS, []),
body[ATTR_IPV6].get(ATTR_GATEWAY, None), body[ATTR_IPV6].get(ATTR_GATEWAY, None),
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []), body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
None,
) )
vlan_interface = Interface( vlan_interface = Interface(
@@ -294,10 +284,8 @@ class APINetwork(CoreSysAttributes):
True, True,
False, False,
InterfaceType.VLAN, InterfaceType.VLAN,
None, ipv4_config,
ipv4_setting, ipv6_config,
None,
ipv6_setting,
None, None,
vlan_config, vlan_config,
) )

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Observer RESTful API.""" """Init file for Supervisor Observer RESTful API."""
import asyncio import asyncio
import logging import logging
from typing import Any from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HassOS RESTful API.""" """Init file for Supervisor HassOS RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
@@ -20,7 +19,6 @@ from ..const import (
ATTR_POWER_LED, ATTR_POWER_LED,
ATTR_SERIAL, ATTR_SERIAL,
ATTR_SIZE, ATTR_SIZE,
ATTR_STATE,
ATTR_UPDATE_AVAILABLE, ATTR_UPDATE_AVAILABLE,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
@@ -30,17 +28,13 @@ from ..exceptions import BoardInvalidError
from ..resolution.const import ContextType, IssueType, SuggestionType from ..resolution.const import ContextType, IssueType, SuggestionType
from ..validate import version_tag from ..validate import version_tag
from .const import ( from .const import (
ATTR_BOOT_SLOT,
ATTR_BOOT_SLOTS,
ATTR_DATA_DISK, ATTR_DATA_DISK,
ATTR_DEV_PATH, ATTR_DEV_PATH,
ATTR_DEVICE, ATTR_DEVICE,
ATTR_DISKS, ATTR_DISKS,
ATTR_MODEL, ATTR_MODEL,
ATTR_STATUS,
ATTR_SYSTEM_HEALTH_LED, ATTR_SYSTEM_HEALTH_LED,
ATTR_VENDOR, ATTR_VENDOR,
BootSlot,
) )
from .utils import api_process, api_validate from .utils import api_process, api_validate
@@ -48,7 +42,6 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag}) SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str}) SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
SCHEMA_YELLOW_OPTIONS = vol.Schema( SCHEMA_YELLOW_OPTIONS = vol.Schema(
@@ -81,15 +74,6 @@ class APIOS(CoreSysAttributes):
ATTR_BOARD: self.sys_os.board, ATTR_BOARD: self.sys_os.board,
ATTR_BOOT: self.sys_dbus.rauc.boot_slot, ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id, ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
ATTR_BOOT_SLOTS: {
slot.bootname: {
ATTR_STATE: slot.state,
ATTR_STATUS: slot.boot_status,
ATTR_VERSION: slot.bundle_version,
}
for slot in self.sys_os.slots
if slot.bootname
},
} }
@api_process @api_process
@@ -112,17 +96,6 @@ class APIOS(CoreSysAttributes):
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE])) await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
@api_process
def wipe_data(self, request: web.Request) -> Awaitable[None]:
"""Trigger data disk wipe on Host."""
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
@api_process
async def set_boot_slot(self, request: web.Request) -> None:
"""Change the active boot slot and reboot into it."""
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
@api_process @api_process
async def list_data(self, request: web.Request) -> dict[str, Any]: async def list_data(self, request: web.Request) -> dict[str, Any]:
"""Return possible data targets.""" """Return possible data targets."""
@@ -157,17 +130,13 @@ class APIOS(CoreSysAttributes):
body = await api_validate(SCHEMA_GREEN_OPTIONS, request) body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
if ATTR_ACTIVITY_LED in body: if ATTR_ACTIVITY_LED in body:
await self.sys_dbus.agent.board.green.set_activity_led( self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED]
body[ATTR_ACTIVITY_LED]
)
if ATTR_POWER_LED in body: if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED]) self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED]
if ATTR_SYSTEM_HEALTH_LED in body: if ATTR_SYSTEM_HEALTH_LED in body:
await self.sys_dbus.agent.board.green.set_user_led( self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED]
body[ATTR_SYSTEM_HEALTH_LED]
)
self.sys_dbus.agent.board.green.save_data() self.sys_dbus.agent.board.green.save_data()
@@ -186,15 +155,13 @@ class APIOS(CoreSysAttributes):
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request) body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
if ATTR_DISK_LED in body: if ATTR_DISK_LED in body:
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED]) self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
if ATTR_HEARTBEAT_LED in body: if ATTR_HEARTBEAT_LED in body:
await self.sys_dbus.agent.board.yellow.set_heartbeat_led( self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
body[ATTR_HEARTBEAT_LED]
)
if ATTR_POWER_LED in body: if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED]) self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
self.sys_dbus.agent.board.yellow.save_data() self.sys_dbus.agent.board.yellow.save_data()
self.sys_resolution.create_issue( self.sys_resolution.create_issue(

View File

@@ -1 +1 @@
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([4-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(10[7-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[4-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.73ec900e351835f9.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.163d6939af79fd9b.js")}else d("/api/hassio/app/frontend_es5/entrypoint.163d6939af79fd9b.js")}() !function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()

View File

@@ -1,4 +0,0 @@
<1B> <20>Z<EFBFBD>^_<>w<EFBFBD>"5N<35><4E><EFBFBD>2<1D>PW2q9<71>I<EFBFBD>$<24><>/<2F><><EFBFBD>X<EFBFBD><41><D2B0><03>!<21><>ӭ˳;<3B><><EFBFBD><1B>`<01><><EFBFBD><EFBFBD><EFBFBD>YD<59>Zkl<6B><6C><EFBFBD>>/t<><74> <20><><EFBFBD>tA<02>T<EFBFBD><05>}<7D>2 <20>y<1E>m<EFBFBD>{<7B>F1ʑ8<CA91><10><><EFBFBD>;*մ<><D5B4><EFBFBD>7<EFBFBD>ɰ<0F>y<EFBFBD>;l<>K<>I<<3C>z<EFBFBD><7A>$<24><><EFBFBD><EFBFBD><EFBFBD>6<EFBFBD>9U<39><55>*<12>i<><69>DU<44>n+aЬAE\<5C>F`<04> <09>(<28><>I<EFBFBD><49><EFBFBD><E791BD><EFBFBD><EFBFBD>I<EFBFBD>t<EFBFBD><74><EFBFBD><EFBFBD>˛8MU
<EFBFBD> 7/<2F><><EFBFBD><EFBFBD><EFBFBD>ݷ<0F><><EFBFBD><EFBFBD>*<2A>!J<><4A><EFBFBD>H<EFBFBD>r<EFBFBD><72>_<EFBFBD><5F><05><>W!$%<25>kTy7<> <0B><><EFBFBD>
<1D>&<26>?_s<5F> <0B>DA;8t6<74><36>C<EFBFBD><43><EFBFBD><EFBFBD>zU<7A>.<2E><08><>C<EFBFBD>U<><55><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ѵ<EFBFBD><D1B5><EFBFBD><EFBFBD><EFBFBD>As*")<29>lPr~<11><>j<EFBFBD>M<EFBFBD><4D><EFBFBD><14><>#<23><>r2<1E><><EFBFBD><EFBFBD>?<3F>3HT
<EFBFBD>^<5E><> B1<42><31><19><>yM'<27>l<EFBFBD><6C><0E><><06><16>N8/Ñ0<C391><30>h<EFBFBD><68><EFBFBD>=,2Gb<47>r[<5B><02>bzEN4<4E><34>J<EFBFBD><4A>2<EFBFBD>]VJ<56>Y<EFBFBD>!<21><><15><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>vbƥ<62>1)g<>,3iٰ<><D9B0>`J<><17><><EFBFBD>"

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"version":3,"file":"1081.e647cbe586ff9dd0.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250205.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250205.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"sXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,GACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250205.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}

View File

@@ -1,2 +0,0 @@
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"1236.64ca65d0ea4d76d4.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"wHAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More