Compare commits

..

2 Commits

Author SHA1 Message Date
J. Nick Koston
af3256e41e Significantly speed up creating backups with isal via zlib-fast
isal is a drop in replacement for zlib with the
cavet that the compression level mappings are different.
zlib-fast is a tiny piece of middleware to convert
the standard zlib compression levels to isal compression
levels to allow for drop-in replacement

https://github.com/bdraco/zlib-fast/releases/tag/v0.1.0
https://github.com/pycompression/python-isal

Compression for backups is ~5x faster than the baseline

https://github.com/powturbo/TurboBench/issues/43
2024-01-27 13:06:41 -10:00
J. Nick Koston
a163121ad4 Fix dirhash failing to import pkg_resources
dirhash needs pkg_resources which is provided by setuptools

https://github.com/home-assistant/supervisor/actions/runs/7513346221/job/20454994962
2024-01-14 00:02:12 -10:00
478 changed files with 3588 additions and 10544 deletions

View File

@@ -4,43 +4,33 @@
"containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
"remoteEnv": {
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
},
"appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_setup",
"postStartCommand": "bash devcontainer_bootstrap",
"postCreateCommand": "bash devcontainer_bootstrap",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": {
"vscode": {
"extensions": [
"charliermarsh.ruff",
"ms-python.python",
"ms-python.pylint",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml",
"esbenp.prettier-vscode",
"GitHub.vscode-pull-request-github"
"esbenp.prettier-vscode"
],
"settings": {
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"python.formatting.provider": "black",
"python.formatting.blackArgs": ["--target-version", "py312"],
"python.formatting.blackPath": "/usr/local/bin/black"
}
}
},

View File

@@ -38,7 +38,6 @@
- This PR is related to issue:
- Link to documentation pull request:
- Link to cli pull request:
- Link to client library pull request:
## Checklist
@@ -53,14 +52,12 @@
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
- [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist]
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
- [ ] Tests have been added to verify that the new code works.
If API endpoints or add-on configuration are added/changed:
If API endpoints of add-on configuration are added/changed:
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
- [ ] [CLI][cli-repository] updated (if necessary)
- [ ] [Client library][client-library-repository] updated (if necessary)
<!--
Thank you for contributing <3
@@ -70,5 +67,3 @@ If API endpoints or add-on configuration are added/changed:
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
[cli-repository]: https://github.com/home-assistant/cli
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/

View File

@@ -53,7 +53,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
with:
fetch-depth: 0
@@ -92,7 +92,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
with:
fetch-depth: 0
@@ -106,7 +106,7 @@ jobs:
- name: Build wheels
if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.01.0
with:
abi: cp312
tag: musllinux_1_2
@@ -125,15 +125,15 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign
if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.7.0
uses: sigstore/cosign-installer@v3.3.0
with:
cosign-release: "v2.4.0"
cosign-release: "v2.0.2"
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
@@ -149,7 +149,7 @@ jobs:
- name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -160,7 +160,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2024.01.0
with:
args: |
$BUILD_ARGS \
@@ -178,7 +178,7 @@ jobs:
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Initialize git
if: needs.init.outputs.publish == 'true'
@@ -203,11 +203,11 @@ jobs:
timeout-minutes: 60
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2024.01.0
with:
args: |
--test \

View File

@@ -25,15 +25,15 @@ jobs:
name: Prepare Python dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Set up Python
id: python
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: venv
key: |
@@ -47,7 +47,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -61,21 +61,21 @@ jobs:
. venv/bin/activate
pre-commit install-hooks
lint-ruff-format:
name: Check ruff-format
lint-black:
name: Check black
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: venv
key: |
@@ -85,67 +85,10 @@ jobs:
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff-format
- name: Run black
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-ruff:
name: Check ruff
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
black --target-version py312 --check supervisor tests setup.py
lint-dockerfile:
name: Check Dockerfile
@@ -153,7 +96,7 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -168,15 +111,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: venv
key: |
@@ -188,7 +131,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -206,21 +149,53 @@ jobs:
. venv/bin/activate
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
lint-json:
name: Check JSON
lint-flake8:
name: Check flake8
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register flake8 problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/flake8.json"
- name: Run flake8
run: |
. venv/bin/activate
flake8 supervisor tests
lint-isort:
name: Check isort
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
with:
path: venv
key: |
@@ -232,7 +207,48 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run isort
run: |
. venv/bin/activate
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
lint-json:
name: Check JSON
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -256,15 +272,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: venv
key: |
@@ -282,25 +298,66 @@ jobs:
. venv/bin/activate
pylint supervisor tests
lint-pyupgrade:
name: Check pyupgrade
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run pyupgrade
run: |
. venv/bin/activate
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
pytest:
runs-on: ubuntu-latest
needs: prepare
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign
uses: sigstore/cosign-installer@v3.7.0
uses: sigstore/cosign-installer@v3.3.0
with:
cosign-release: "v2.4.0"
cosign-release: "v2.0.2"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: venv
key: |
@@ -313,7 +370,7 @@ jobs:
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
@@ -335,11 +392,10 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v4.4.3
uses: actions/upload-artifact@v4.0.0
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
include-hidden-files: true
coverage:
name: Process test coverage
@@ -347,15 +403,15 @@ jobs:
needs: ["pytest", "prepare"]
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.3.3
with:
path: venv
key: |
@@ -366,7 +422,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.8
uses: actions/download-artifact@v4.1.1
- name: Combine coverage results
run: |
. venv/bin/activate
@@ -374,4 +430,4 @@ jobs:
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4.6.0
uses: codecov/codecov-action@v3.1.4

30
.github/workflows/matchers/flake8.json vendored Normal file
View File

@@ -0,0 +1,30 @@
{
"problemMatcher": [
{
"owner": "flake8-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
},
{
"owner": "flake8-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
}
]
}

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
with:
fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
- name: Run Release Drafter
uses: release-drafter/release-drafter@v6.0.0
uses: release-drafter/release-drafter@v5.25.0
with:
tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }}

View File

@@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.1
- name: Sentry Release
uses: getsentry/action-release@v1.7.0
uses: getsentry/action-release@v1.6.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -1,15 +1,34 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.7
- repo: https://github.com/psf/black
rev: 23.12.1
hooks:
- id: ruff
- id: black
args:
- --fix
- id: ruff-format
- --safe
- --quiet
- --target-version
- py312
files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
hooks:
- id: flake8
additional_dependencies:
- flake8-docstrings==1.7.0
- pydocstyle==6.3.0
files: ^(supervisor|script|tests)/.+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]
- id: check-json
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
hooks:
- id: pyupgrade
args: [--py312-plus]

18
.vscode/tasks.json vendored
View File

@@ -58,23 +58,9 @@
"problemMatcher": []
},
{
"label": "Ruff Check",
"label": "Flake8",
"type": "shell",
"command": "ruff check --fix supervisor tests",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff Format",
"type": "shell",
"command": "ruff format supervisor tests",
"command": "flake8 supervisor tests",
"group": {
"kind": "test",
"isDefault": true

View File

@@ -4,8 +4,7 @@ FROM ${BUILD_FROM}
ENV \
S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
UV_SYSTEM_PYTHON=true
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
ARG \
COSIGN_VERSION \
@@ -27,17 +26,14 @@ RUN \
yaml \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \
&& pip3 install uv==0.2.21
&& chmod a+x /usr/bin/cosign
# Install requirements
COPY requirements.txt .
RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \
linux32 uv pip install --no-build -r requirements.txt; \
else \
uv pip install --no-build -r requirements.txt; \
fi \
export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --only-binary=:all: \
-r ./requirements.txt \
&& rm -f requirements.txt
# Install Home Assistant Supervisor

View File

@@ -30,5 +30,3 @@ Releases are done in 3 stages (channels) with this structure:
[development]: https://developers.home-assistant.io/docs/supervisor/development
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
[![Home Assistant - A project from the Open Home Foundation](https://www.openhomefoundation.org/badges/home-assistant.png)](https://www.openhomefoundation.org/)

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.18
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.18
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.18
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.18
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.18
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
@@ -12,7 +12,7 @@ cosign:
base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.*
args:
COSIGN_VERSION: 2.4.0
COSIGN_VERSION: 2.0.2
labels:
io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor

View File

@@ -31,7 +31,7 @@ include-package-data = true
include = ["supervisor*"]
[tool.pylint.MAIN]
py-version = "3.12"
py-version = "3.11"
# Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate.
jobs = 2
@@ -44,7 +44,7 @@ good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
[tool.pylint."MESSAGES CONTROL"]
# Reasons disabled:
# format - handled by ruff
# format - handled by black
# abstract-method - with intro of async there are always methods missing
# cyclic-import - doesn't test if both import on load
# duplicate-code - unavoidable
@@ -71,136 +71,6 @@ disable = [
"too-many-statements",
"unused-argument",
"consider-using-with",
# Handled by ruff
# Ref: <https://github.com/astral-sh/ruff/issues/970>
"await-outside-async", # PLE1142
"bad-str-strip-call", # PLE1310
"bad-string-format-type", # PLE1307
"bidirectional-unicode", # PLE2502
"continue-in-finally", # PLE0116
"duplicate-bases", # PLE0241
"format-needs-mapping", # F502
"function-redefined", # F811
# Needed because ruff does not understand type of __all__ generated by a function
# "invalid-all-format", # PLE0605
"invalid-all-object", # PLE0604
"invalid-character-backspace", # PLE2510
"invalid-character-esc", # PLE2513
"invalid-character-nul", # PLE2514
"invalid-character-sub", # PLE2512
"invalid-character-zero-width-space", # PLE2515
"logging-too-few-args", # PLE1206
"logging-too-many-args", # PLE1205
"missing-format-string-key", # F524
"mixed-format-string", # F506
"no-method-argument", # N805
"no-self-argument", # N805
"nonexistent-operator", # B002
"nonlocal-without-binding", # PLE0117
"not-in-loop", # F701, F702
"notimplemented-raised", # F901
"return-in-init", # PLE0101
"return-outside-function", # F706
"syntax-error", # E999
"too-few-format-args", # F524
"too-many-format-args", # F522
"too-many-star-expressions", # F622
"truncated-format-string", # F501
"undefined-all-variable", # F822
"undefined-variable", # F821
"used-prior-global-declaration", # PLE0118
"yield-inside-async-function", # PLE1700
"yield-outside-function", # F704
"anomalous-backslash-in-string", # W605
"assert-on-string-literal", # PLW0129
"assert-on-tuple", # F631
"bad-format-string", # W1302, F
"bad-format-string-key", # W1300, F
"bare-except", # E722
"binary-op-exception", # PLW0711
"cell-var-from-loop", # B023
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
"duplicate-except", # B014
"duplicate-key", # F601
"duplicate-string-formatting-argument", # F
"duplicate-value", # F
"eval-used", # PGH001
"exec-used", # S102
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
"f-string-without-interpolation", # F541
"forgotten-debug-statement", # T100
"format-string-without-interpolation", # F
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
"global-variable-not-assigned", # PLW0602
"implicit-str-concat", # ISC001
"import-self", # PLW0406
"inconsistent-quotes", # Q000
"invalid-envvar-default", # PLW1508
"keyword-arg-before-vararg", # B026
"logging-format-interpolation", # G
"logging-fstring-interpolation", # G
"logging-not-lazy", # G
"misplaced-future", # F404
"named-expr-without-context", # PLW0131
"nested-min-max", # PLW3301
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
"raise-missing-from", # TRY200
# "redefined-builtin", # A001, ruff is way more stricter, needs work
"try-except-raise", # TRY302
"unused-argument", # ARG001, we don't use it
"unused-format-string-argument", #F507
"unused-format-string-key", # F504
"unused-import", # F401
"unused-variable", # F841
"useless-else-on-loop", # PLW0120
"wildcard-import", # F403
"bad-classmethod-argument", # N804
"consider-iterating-dictionary", # SIM118
"empty-docstring", # D419
"invalid-name", # N815
"line-too-long", # E501, disabled globally
"missing-class-docstring", # D101
"missing-final-newline", # W292
"missing-function-docstring", # D103
"missing-module-docstring", # D100
"multiple-imports", #E401
"singleton-comparison", # E711, E712
"subprocess-run-check", # PLW1510
"superfluous-parens", # UP034
"ungrouped-imports", # I001
"unidiomatic-typecheck", # E721
"unnecessary-direct-lambda-call", # PLC3002
"unnecessary-lambda-assignment", # PLC3001
"unneeded-not", # SIM208
"useless-import-alias", # PLC0414
"wrong-import-order", # I001
"wrong-import-position", # E402
"comparison-of-constants", # PLR0133
"comparison-with-itself", # PLR0124
# "consider-alternative-union-syntax", # UP007, typing extension
"consider-merging-isinstance", # PLR1701
# "consider-using-alias", # UP006, typing extension
"consider-using-dict-comprehension", # C402
"consider-using-generator", # C417
"consider-using-get", # SIM401
"consider-using-set-comprehension", # C401
"consider-using-sys-exit", # PLR1722
"consider-using-ternary", # SIM108
"literal-comparison", # F632
"property-with-parameters", # PLR0206
"super-with-arguments", # UP008
"too-many-branches", # PLR0912
"too-many-return-statements", # PLR0911
"too-many-statements", # PLR0915
"trailing-comma-tuple", # COM818
"unnecessary-comprehension", # C416
"use-a-generator", # C417
"use-dict-literal", # C406
"use-list-literal", # C405
"useless-object-inheritance", # UP004
"useless-return", # PLR1711
# "no-self-use", # PLR6301 # Optional plugin, not enabled
]
[tool.pylint.REPORTS]
@@ -215,9 +85,6 @@ expected-line-ending-format = "LF"
[tool.pylint.EXCEPTIONS]
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pylint.DESIGN]
max-positional-arguments = 10
[tool.pytest.ini_options]
testpaths = ["tests"]
norecursedirs = [".git"]
@@ -230,144 +97,16 @@ filterwarnings = [
"ignore::pytest.PytestUnraisableExceptionWarning",
]
[tool.ruff]
lint.select = [
"B002", # Python does not support the unary prefix increment
"B007", # Loop control variable {name} not used within loop body
"B014", # Exception handler with duplicate exception
"B023", # Function definition does not bind loop variable {name}
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
"B904", # Use raise from to specify exception cause
"C", # complexity
"COM818", # Trailing comma on bare tuple prohibited
"D", # docstrings
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
"E", # pycodestyle
"F", # pyflakes/autoflake
"G", # flake8-logging-format
"I", # isort
"ICN001", # import concentions; {name} should be imported as {asname}
"N804", # First argument of a class method should be named cls
"N805", # First argument of a method should be named self
"N815", # Variable {name} in class scope should not be mixedCase
"PGH004", # Use specific rule codes when using noqa
"PLC0414", # Useless import alias. Import alias does not rename original package.
"PLC", # pylint
"PLE", # pylint
"PLR", # pylint
"PLW", # pylint
"Q000", # Double quotes found but single quotes preferred
"RUF006", # Store a reference to the return value of asyncio.create_task
"S102", # Use of exec detected
"S103", # bad-file-permissions
"S108", # hardcoded-temp-file
"S306", # suspicious-mktemp-usage
"S307", # suspicious-eval-usage
"S313", # suspicious-xmlc-element-tree-usage
"S314", # suspicious-xml-element-tree-usage
"S315", # suspicious-xml-expat-reader-usage
"S316", # suspicious-xml-expat-builder-usage
"S317", # suspicious-xml-sax-usage
"S318", # suspicious-xml-mini-dom-usage
"S319", # suspicious-xml-pull-dom-usage
"S320", # suspicious-xmle-tree-usage
"S601", # paramiko-call
"S602", # subprocess-popen-with-shell-equals-true
"S604", # call-with-shell-equals-true
"S608", # hardcoded-sql-expression
"S609", # unix-command-wildcard-injection
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
"SIM117", # Merge with-statements that use the same scope
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
"SIM201", # Use {left} != {right} instead of not {left} == {right}
"SIM208", # Use {expr} instead of not (not {expr})
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
"SIM401", # Use get from dict with default instead of an if block
"T100", # Trace found: {name} used
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY302", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade
"W", # pycodestyle
]
lint.ignore = [
"D202", # No blank lines allowed after function docstring
"D203", # 1 blank line required before class docstring
"D213", # Multi-line docstring summary should start at the second line
"D406", # Section name should end with a newline
"D407", # Section name underlining
"E501", # line too long
"E731", # do not assign a lambda expression, use a def
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
# be ignored anymore without warnings.
# https://github.com/astral-sh/ruff/issues/7491
# "PLC1901", # Lots of false positives
# False positives https://github.com/astral-sh/ruff/issues/5386
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
"PLR0911", # Too many return statements ({returns} > {max_returns})
"PLR0912", # Too many branches ({branches} > {max_branches})
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
"PLR0915", # Too many statements ({statements} > {max_statements})
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
"UP006", # keep type annotation style as is
"UP007", # keep type annotation style as is
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
# Disabled because ruff does not understand type of __all__ generated by a function
"PLE0605",
]
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
voluptuous = "vol"
[tool.ruff.lint.flake8-pytest-style]
fixture-parentheses = false
[tool.ruff.lint.flake8-tidy-imports.banned-api]
"pytz".msg = "use zoneinfo instead"
[tool.ruff.lint.isort]
force-sort-within-sections = true
section-order = [
"future",
"standard-library",
"third-party",
"first-party",
"local-folder",
]
forced-separate = ["tests"]
known-first-party = ["supervisor", "tests"]
combine-as-imports = true
split-on-trailing-comma = false
[tool.ruff.lint.per-file-ignores]
# DBus Service Mocks must use typing and names understood by dbus-fast
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
[tool.ruff.lint.mccabe]
max-complexity = 25
[tool.isort]
multi_line_output = 3
include_trailing_comma = true
force_grid_wrap = 0
line_length = 88
indent = " "
force_sort_within_sections = true
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
default_section = "THIRDPARTY"
forced_separate = "tests"
combine_as_imports = true
use_parentheses = true
known_first_party = ["supervisor", "tests"]

View File

@@ -1,29 +1,30 @@
aiodns==3.2.0
aiohttp==3.10.10
aiodns==3.1.1
aiohttp==3.9.1
aiohttp-fast-url-dispatcher==0.3.0
async_timeout==4.0.3
atomicwrites-homeassistant==1.4.1
attrs==24.2.0
awesomeversion==24.6.0
attrs==23.2.0
awesomeversion==23.11.0
brotli==1.1.0
ciso8601==2.3.1
colorlog==6.8.2
cpe==1.3.1
cryptography==43.0.1
debugpy==1.8.7
deepmerge==2.0
dirhash==0.5.0
docker==7.1.0
colorlog==6.8.0
cpe==1.2.1
cryptography==41.0.7
debugpy==1.8.0
deepmerge==1.1.1
dirhash==0.2.1
docker==7.0.0
faust-cchardet==2.1.19
gitpython==3.1.43
jinja2==3.1.4
orjson==3.10.7
pulsectl==24.8.0
pyudev==0.24.3
PyYAML==6.0.2
requests==2.32.3
securetar==2024.2.1
sentry-sdk==2.16.0
setuptools==75.1.0
voluptuous==0.15.2
dbus-fast==2.24.3
typing_extensions==4.12.2
zlib-fast==0.2.0
gitpython==3.1.41
jinja2==3.1.3
orjson==3.9.10
pulsectl==23.5.2
pyudev==0.24.1
PyYAML==6.0.1
securetar==2023.12.0
sentry-sdk==1.39.2
setuptools==69.0.3
voluptuous==0.14.1
dbus-fast==2.21.0
typing_extensions==4.9.0
zlib-fast==0.1.0

View File

@@ -1,12 +1,16 @@
coverage==7.6.3
pre-commit==4.0.1
pylint==3.3.1
black==23.12.1
coverage==7.4.0
flake8-docstrings==1.7.0
flake8==7.0.0
pre-commit==3.6.0
pydocstyle==6.3.0
pylint==3.0.3
pytest-aiohttp==1.0.5
pytest-asyncio==0.23.6
pytest-cov==5.0.0
pytest-timeout==2.3.1
pytest==8.3.3
ruff==0.6.9
time-machine==2.16.0
typing_extensions==4.12.2
urllib3==2.2.3
pytest-asyncio==0.23.3
pytest-cov==4.1.0
pytest-timeout==2.2.0
pytest==7.4.4
pyupgrade==3.15.0
time-machine==2.13.0
typing_extensions==4.9.0
urllib3==2.1.0

17
setup.cfg Normal file
View File

@@ -0,0 +1,17 @@
[flake8]
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
doctests = True
max-line-length = 88
# E501: line too long
# W503: Line break occurred before a binary operator
# E203: Whitespace before ':'
# D202 No blank lines allowed after function docstring
# W504 line break after binary operator
ignore =
E501,
W503,
E203,
D202,
W504
per-file-ignores =
tests/dbus_service_mocks/*.py: F821,F722

View File

@@ -1,5 +1,4 @@
"""Home Assistant Supervisor setup."""
from pathlib import Path
import re

View File

@@ -1,5 +1,4 @@
"""Main file for Supervisor."""
import asyncio
from concurrent.futures import ThreadPoolExecutor
import logging
@@ -11,10 +10,8 @@ import zlib_fast
# Enable fast zlib before importing supervisor
zlib_fast.enable()
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
activate_log_queue_handler,
)
from supervisor import bootstrap # noqa: E402
from supervisor.utils.logging import activate_log_queue_handler # noqa: E402
_LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@@ -1,10 +1,8 @@
"""Init file for Supervisor add-ons."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
from copy import deepcopy
from datetime import datetime
import errno
from ipaddress import IPv4Address
import logging
@@ -17,14 +15,11 @@ from tempfile import TemporaryDirectory
from typing import Any, Final
import aiohttp
from awesomeversion import AwesomeVersionCompareException
from deepmerge import Merger
from securetar import atomic_contents_add, secure_path
import voluptuous as vol
from voluptuous.humanize import humanize_error
from supervisor.utils.dt import utc_from_timestamp
from ..bus import EventListener
from ..const import (
ATTR_ACCESS_TOKEN,
@@ -47,17 +42,13 @@ from ..const import (
ATTR_SLUG,
ATTR_STATE,
ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TYPE,
ATTR_USER,
ATTR_UUID,
ATTR_VERSION,
ATTR_VERSION_TIMESTAMP,
ATTR_WATCHDOG,
DNS_SUFFIX,
AddonBoot,
AddonBootConfig,
AddonStartup,
AddonState,
BusEvent,
@@ -184,9 +175,6 @@ class Addon(AddonModel):
async def load(self) -> None:
"""Async initialize of object."""
if self.is_detached:
await super().refresh_path_cache()
self._listeners.append(
self.sys_bus.register_event(
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
@@ -199,20 +187,9 @@ class Addon(AddonModel):
)
await self._check_ingress_port()
default_image = self._image(self.data)
try:
with suppress(DockerError):
await self.instance.attach(version=self.version)
# Ensure we are using correct image for this system
await self.instance.check_image(self.version, default_image, self.arch)
except DockerError:
_LOGGER.info("No %s addon Docker image %s found", self.slug, self.image)
with suppress(DockerError):
await self.instance.install(self.version, default_image, arch=self.arch)
self.persist[ATTR_IMAGE] = default_image
self.save_persist()
@property
def ip_address(self) -> IPv4Address:
"""Return IP of add-on instance."""
@@ -248,34 +225,6 @@ class Addon(AddonModel):
"""Return True if add-on is detached."""
return self.slug not in self.sys_store.data.addons
@property
def with_icon(self) -> bool:
"""Return True if an icon exists."""
if self.is_detached:
return super().with_icon
return self.addon_store.with_icon
@property
def with_logo(self) -> bool:
"""Return True if a logo exists."""
if self.is_detached:
return super().with_logo
return self.addon_store.with_logo
@property
def with_changelog(self) -> bool:
"""Return True if a changelog exists."""
if self.is_detached:
return super().with_changelog
return self.addon_store.with_changelog
@property
def with_documentation(self) -> bool:
"""Return True if a documentation exists."""
if self.is_detached:
return super().with_documentation
return self.addon_store.with_documentation
@property
def available(self) -> bool:
"""Return True if this add-on is available on this platform."""
@@ -312,9 +261,7 @@ class Addon(AddonModel):
@property
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced."""
if self.boot_config == AddonBootConfig.MANUAL_ONLY:
return super().boot
"""Return boot config with prio local settings."""
return self.persist.get(ATTR_BOOT, super().boot)
@boot.setter
@@ -332,28 +279,6 @@ class Addon(AddonModel):
"""Set auto update."""
self.persist[ATTR_AUTO_UPDATE] = value
@property
def auto_update_available(self) -> bool:
"""Return if it is safe to auto update addon."""
if not self.need_update or not self.auto_update:
return False
for version in self.breaking_versions:
try:
# Must update to latest so if true update crosses a breaking version
if self.version < version:
return False
except AwesomeVersionCompareException:
# If version scheme changed, we may get compare exception
# If latest version >= breaking version then assume update will
# cross it as the version scheme changes
# If both versions have compare exception, ignore as its in the past
with suppress(AwesomeVersionCompareException):
if self.latest_version >= version:
return False
return True
@property
def watchdog(self) -> bool:
"""Return True if watchdog is enable."""
@@ -369,37 +294,6 @@ class Addon(AddonModel):
else:
self.persist[ATTR_WATCHDOG] = value
@property
def system_managed(self) -> bool:
"""Return True if addon is managed by Home Assistant."""
return self.persist[ATTR_SYSTEM_MANAGED]
@system_managed.setter
def system_managed(self, value: bool) -> None:
"""Set system managed enable/disable."""
if not value and self.system_managed_config_entry:
self.system_managed_config_entry = None
self.persist[ATTR_SYSTEM_MANAGED] = value
@property
def system_managed_config_entry(self) -> str | None:
"""Return id of config entry managing this addon (if any)."""
if not self.system_managed:
return None
return self.persist.get(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY)
@system_managed_config_entry.setter
def system_managed_config_entry(self, value: str | None) -> None:
"""Set ID of config entry managing this addon."""
if not self.system_managed:
_LOGGER.warning(
"Ignoring system managed config entry for %s because it is not system managed",
self.slug,
)
else:
self.persist[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] = value
@property
def uuid(self) -> str:
"""Return an API token for this add-on."""
@@ -427,11 +321,6 @@ class Addon(AddonModel):
"""Return version of add-on."""
return self.data_store[ATTR_VERSION]
@property
def latest_version_timestamp(self) -> datetime:
"""Return when latest version was first seen."""
return utc_from_timestamp(self.data_store[ATTR_VERSION_TIMESTAMP])
@property
def protected(self) -> bool:
"""Return if add-on is in protected mode."""
@@ -766,12 +655,10 @@ class Addon(AddonModel):
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def uninstall(
self, *, remove_config: bool, remove_image: bool = True
) -> None:
async def uninstall(self) -> None:
"""Uninstall and cleanup this addon."""
try:
await self.instance.remove(remove_image=remove_image)
await self.instance.remove()
except DockerError as err:
raise AddonsError() from err
@@ -779,10 +666,6 @@ class Addon(AddonModel):
await self.unload()
# Remove config if present and requested
if self.addon_config_used and remove_config:
await remove_data(self.path_config)
# Cleanup audio settings
if self.path_pulse.exists():
with suppress(OSError):
@@ -887,7 +770,6 @@ class Addon(AddonModel):
raise AddonsError() from err
self.sys_addons.data.update(self.addon_store)
await self._check_ingress_port()
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
finally:
@@ -1339,7 +1221,7 @@ class Addon(AddonModel):
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
with suppress(DockerError):
await self.instance.update(version, restore_image, self.arch)
await self._check_ingress_port()
self._check_ingress_port()
# Restore data and config
def _restore_data():
@@ -1382,11 +1264,11 @@ class Addon(AddonModel):
)
raise AddonsError() from err
finally:
# Is add-on loaded
if not self.loaded:
await self.load()
finally:
# Run add-on
if data[ATTR_STATE] == AddonState.STARTED:
wait_for_start = await self.start()
@@ -1480,9 +1362,3 @@ class Addon(AddonModel):
ContainerState.UNHEALTHY,
]:
await self._restart_after_problem(event.state)
def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths."""
if self.is_detached:
return super().refresh_path_cache()
return self.addon_store.refresh_path_cache()

View File

@@ -1,5 +1,4 @@
"""Supervisor add-on build environment."""
from __future__ import annotations
from functools import cached_property
@@ -103,11 +102,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
except HassioArchNotFound:
return False
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
def get_docker_args(self, version: AwesomeVersion):
"""Create a dict with Docker build arguments."""
args = {
"path": str(self.addon.path_location),
"tag": f"{image or self.addon.image}:{version!s}",
"tag": f"{self.addon.image}:{version!s}",
"dockerfile": str(self.dockerfile),
"pull": True,
"forcerm": not self.sys_dev,

View File

@@ -1,5 +1,4 @@
"""Add-on static data."""
from datetime import timedelta
from enum import StrEnum
@@ -29,7 +28,6 @@ class MappingType(StrEnum):
ATTR_BACKUP = "backup"
ATTR_BREAKING_VERSIONS = "breaking_versions"
ATTR_CODENOTARY = "codenotary"
ATTR_READ_ONLY = "read_only"
ATTR_PATH = "path"

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-on data."""
from copy import deepcopy
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Supervisor add-on manager."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
@@ -78,20 +77,15 @@ class AddonManager(CoreSysAttributes):
async def load(self) -> None:
"""Start up add-on management."""
# Refresh cache for all store addons
tasks: list[Awaitable[None]] = [
store.refresh_path_cache() for store in self.store.values()
]
# Load all installed addons
tasks = []
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(addon.load())
tasks.append(self.sys_create_task(addon.load()))
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
_LOGGER.info("Found %d installed add-ons", len(tasks))
if tasks:
await asyncio.gather(*tasks)
await asyncio.wait(tasks)
# Sync DNS
await self.sync_dns()
@@ -179,21 +173,13 @@ class AddonManager(CoreSysAttributes):
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
async def uninstall(self, slug: str) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
shared_image = any(
self.local[slug].image == addon.image
and self.local[slug].version == addon.version
for addon in self.installed
if addon.slug != slug
)
await self.local[slug].uninstall(
remove_config=remove_config, remove_image=not shared_image
)
await self.local[slug].uninstall()
_LOGGER.info("Add-on '%s' successfully removed", slug)

View File

@@ -1,18 +1,14 @@
"""Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod
from collections import defaultdict
from collections.abc import Awaitable, Callable
from collections.abc import Callable
from contextlib import suppress
from datetime import datetime
import logging
from pathlib import Path
from typing import Any
from awesomeversion import AwesomeVersion, AwesomeVersionException
from supervisor.utils.dt import utc_from_timestamp
from ..const import (
ATTR_ADVANCED,
ATTR_APPARMOR,
@@ -75,7 +71,6 @@ from ..const import (
ATTR_URL,
ATTR_USB,
ATTR_VERSION,
ATTR_VERSION_TIMESTAMP,
ATTR_VIDEO,
ATTR_WATCHDOG,
ATTR_WEBUI,
@@ -83,7 +78,6 @@ from ..const import (
SECURITY_DISABLE,
SECURITY_PROFILE,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
)
@@ -96,7 +90,6 @@ from ..utils import version_is_new_enough
from .configuration import FolderMapping
from .const import (
ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
@@ -120,10 +113,6 @@ class AddonModel(JobGroup, ABC):
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
)
self.slug: str = slug
self._path_icon_exists: bool = False
self._path_logo_exists: bool = False
self._path_changelog_exists: bool = False
self._path_documentation_exists: bool = False
@property
@abstractmethod
@@ -150,15 +139,10 @@ class AddonModel(JobGroup, ABC):
"""Return options with local changes."""
return self.data[ATTR_OPTIONS]
@property
def boot_config(self) -> AddonBootConfig:
"""Return boot config."""
return self.data[ATTR_BOOT]
@property
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced."""
return AddonBoot(self.data[ATTR_BOOT])
"""Return boot config with prio local settings."""
return self.data[ATTR_BOOT]
@property
def auto_update(self) -> bool | None:
@@ -237,11 +221,6 @@ class AddonModel(JobGroup, ABC):
"""Return latest version of add-on."""
return self.data[ATTR_VERSION]
@property
def latest_version_timestamp(self) -> datetime:
"""Return when latest version was first seen."""
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
@property
def version(self) -> AwesomeVersion:
"""Return version of add-on."""
@@ -522,22 +501,22 @@ class AddonModel(JobGroup, ABC):
@property
def with_icon(self) -> bool:
"""Return True if an icon exists."""
return self._path_icon_exists
return self.path_icon.exists()
@property
def with_logo(self) -> bool:
"""Return True if a logo exists."""
return self._path_logo_exists
return self.path_logo.exists()
@property
def with_changelog(self) -> bool:
"""Return True if a changelog exists."""
return self._path_changelog_exists
return self.path_changelog.exists()
@property
def with_documentation(self) -> bool:
"""Return True if a documentation exists."""
return self._path_documentation_exists
return self.path_documentation.exists()
@property
def supported_arch(self) -> list[str]:
@@ -641,22 +620,6 @@ class AddonModel(JobGroup, ABC):
"""Return Signer email address for CAS."""
return self.data.get(ATTR_CODENOTARY)
@property
def breaking_versions(self) -> list[AwesomeVersion]:
"""Return breaking versions of addon."""
return self.data[ATTR_BREAKING_VERSIONS]
def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths."""
def check_paths():
self._path_icon_exists = self.path_icon.exists()
self._path_logo_exists = self.path_logo.exists()
self._path_changelog_exists = self.path_changelog.exists()
self._path_documentation_exists = self.path_documentation.exists()
return self.sys_run_in_executor(check_paths)
def validate_availability(self) -> None:
"""Validate if addon is available for current system."""
return self._validate_availability(self.data, logger=_LOGGER.error)

View File

@@ -1,5 +1,4 @@
"""Add-on Options / UI rendering."""
import hashlib
import logging
from pathlib import Path

View File

@@ -1,5 +1,4 @@
"""Util add-ons functions."""
from __future__ import annotations
import asyncio

View File

@@ -1,5 +1,4 @@
"""Validate add-ons options schema."""
import logging
import re
import secrets
@@ -79,8 +78,6 @@ from ..const import (
ATTR_STATE,
ATTR_STDIN,
ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
@@ -98,11 +95,11 @@ from ..const import (
ROLE_ALL,
ROLE_DEFAULT,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
AddonState,
)
from ..discovery.validate import valid_discovery_service
from ..docker.const import Capabilities
from ..validate import (
docker_image,
@@ -115,7 +112,6 @@ from ..validate import (
)
from .const import (
ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
@@ -193,6 +189,20 @@ def _warn_addon_config(config: dict[str, Any]):
name,
)
invalid_services: list[str] = []
for service in config.get(ATTR_DISCOVERY, []):
try:
valid_discovery_service(service)
except vol.Invalid:
invalid_services.append(service)
if invalid_services:
_LOGGER.warning(
"Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s",
", ".join(invalid_services),
name,
)
return config
@@ -322,9 +332,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
AddonStartup
),
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
AddonBootConfig
),
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
@@ -414,7 +422,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Coerce(int), vol.Range(min=10, max=300)
),
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
},
extra=vol.REMOVE_EXTRA,
)
@@ -473,8 +480,6 @@ SCHEMA_ADDON_USER = vol.Schema(
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
},
extra=vol.REMOVE_EXTRA,
)

View File

@@ -1,22 +1,20 @@
"""Init file for Supervisor RESTful API."""
from functools import partial
import logging
from pathlib import Path
from typing import Any
from aiohttp import web
from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher
from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
from ..utils.sentry import capture_exception
from ..exceptions import APIAddonNotInstalled
from .addons import APIAddons
from .audio import APIAudio
from .auth import APIAuth
from .backups import APIBackups
from .cli import APICli
from .const import CONTENT_TYPE_TEXT
from .discovery import APIDiscovery
from .dns import APICoreDNS
from .docker import APIDocker
@@ -38,7 +36,7 @@ from .security import APISecurity
from .services import APIServices
from .store import APIStore
from .supervisor import APISupervisor
from .utils import api_process, api_process_raw
from .utils import api_process
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -67,19 +65,14 @@ class RestAPI(CoreSysAttributes):
"max_field_size": MAX_LINE_SIZE,
},
)
attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher())
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
self._site: web.TCPSite | None = None
# share single host API handler for reuse in logging endpoints
self._api_host: APIHost | None = None
async def load(self) -> None:
"""Register REST API Calls."""
self._api_host = APIHost()
self._api_host.coresys = self.coresys
self._register_addons()
self._register_audio()
self._register_auth()
@@ -109,41 +102,10 @@ class RestAPI(CoreSysAttributes):
await self.start()
def _register_advanced_logs(self, path: str, syslog_identifier: str):
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
self.webapp.add_routes(
[
web.get(
f"{path}/logs",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
web.get(
f"{path}/logs/boots/{{bootid}}",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/boots/{{bootid}}/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
]
)
def _register_host(self) -> None:
"""Register hostcontrol functions."""
api_host = self._api_host
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes(
[
@@ -220,8 +182,6 @@ class RestAPI(CoreSysAttributes):
web.post("/os/config/sync", api_os.config_sync),
web.post("/os/datadisk/move", api_os.migrate_data),
web.get("/os/datadisk/list", api_os.list_data),
web.post("/os/datadisk/wipe", api_os.wipe_data),
web.post("/os/boot-slot", api_os.set_boot_slot),
]
)
@@ -259,8 +219,6 @@ class RestAPI(CoreSysAttributes):
web.get("/jobs/info", api_jobs.info),
web.post("/jobs/options", api_jobs.options),
web.post("/jobs/reset", api_jobs.reset),
web.get("/jobs/{uuid}", api_jobs.job_info),
web.delete("/jobs/{uuid}", api_jobs.remove_job),
]
)
@@ -299,11 +257,11 @@ class RestAPI(CoreSysAttributes):
[
web.get("/multicast/info", api_multicast.info),
web.get("/multicast/stats", api_multicast.stats),
web.get("/multicast/logs", api_multicast.logs),
web.post("/multicast/update", api_multicast.update),
web.post("/multicast/restart", api_multicast.restart),
]
)
self._register_advanced_logs("/multicast", "hassio_multicast")
def _register_hardware(self) -> None:
"""Register hardware functions."""
@@ -376,7 +334,6 @@ class RestAPI(CoreSysAttributes):
web.post("/auth", api_auth.auth),
web.post("/auth/reset", api_auth.reset),
web.delete("/auth/cache", api_auth.cache),
web.get("/auth/list", api_auth.list_users),
]
)
@@ -390,6 +347,7 @@ class RestAPI(CoreSysAttributes):
web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats),
web.get("/supervisor/logs", api_supervisor.logs),
web.post("/supervisor/update", api_supervisor.update),
web.post("/supervisor/reload", api_supervisor.reload),
web.post("/supervisor/restart", api_supervisor.restart),
@@ -398,38 +356,6 @@ class RestAPI(CoreSysAttributes):
]
)
async def get_supervisor_logs(*args, **kwargs):
try:
return await self._api_host.advanced_logs_handler(
*args, identifier="hassio_supervisor", **kwargs
)
except Exception as err: # pylint: disable=broad-exception-caught
# Supervisor logs are critical, so catch everything, log the exception
# and try to return Docker container logs as the fallback
_LOGGER.exception(
"Failed to get supervisor logs using advanced_logs API"
)
if not isinstance(err, HostNotSupportedError):
# No need to capture HostNotSupportedError to Sentry, the cause
# is known and reported to the user using the resolution center.
capture_exception(err)
return await api_supervisor.logs(*args, **kwargs)
self.webapp.add_routes(
[
web.get("/supervisor/logs", get_supervisor_logs),
web.get(
"/supervisor/logs/follow",
partial(get_supervisor_logs, follow=True),
),
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
web.get(
"/supervisor/logs/boots/{bootid}/follow",
partial(get_supervisor_logs, follow=True),
),
]
)
def _register_homeassistant(self) -> None:
"""Register Home Assistant functions."""
api_hass = APIHomeAssistant()
@@ -438,6 +364,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes(
[
web.get("/core/info", api_hass.info),
web.get("/core/logs", api_hass.logs),
web.get("/core/stats", api_hass.stats),
web.post("/core/options", api_hass.options),
web.post("/core/update", api_hass.update),
@@ -449,12 +376,11 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/core", "homeassistant")
# Reroute from legacy
self.webapp.add_routes(
[
web.get("/homeassistant/info", api_hass.info),
web.get("/homeassistant/logs", api_hass.logs),
web.get("/homeassistant/stats", api_hass.stats),
web.post("/homeassistant/options", api_hass.options),
web.post("/homeassistant/restart", api_hass.restart),
@@ -466,8 +392,6 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/homeassistant", "homeassistant")
def _register_proxy(self) -> None:
"""Register Home Assistant API Proxy."""
api_proxy = APIProxy()
@@ -509,39 +433,18 @@ class RestAPI(CoreSysAttributes):
web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/options", api_addons.options),
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
web.post(
"/addons/{addon}/options/validate", api_addons.options_validate
),
web.get("/addons/{addon}/options/config", api_addons.options_config),
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.get("/addons/{addon}/logs", api_addons.logs),
web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats),
]
)
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def get_addon_logs(request, *args, **kwargs):
addon = api_addons.get_addon_for_request(request)
kwargs["identifier"] = f"addon_{addon.slug}"
return await self._api_host.advanced_logs(request, *args, **kwargs)
self.webapp.add_routes(
[
web.get("/addons/{addon}/logs", get_addon_logs),
web.get(
"/addons/{addon}/logs/follow",
partial(get_addon_logs, follow=True),
),
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
web.get(
"/addons/{addon}/logs/boots/{bootid}/follow",
partial(get_addon_logs, follow=True),
),
]
)
# Legacy routing to support requests for not installed addons
api_store = APIStore()
api_store.coresys = self.coresys
@@ -639,6 +542,7 @@ class RestAPI(CoreSysAttributes):
[
web.get("/dns/info", api_dns.info),
web.get("/dns/stats", api_dns.stats),
web.get("/dns/logs", api_dns.logs),
web.post("/dns/update", api_dns.update),
web.post("/dns/options", api_dns.options),
web.post("/dns/restart", api_dns.restart),
@@ -646,17 +550,18 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/dns", "hassio_dns")
def _register_audio(self) -> None:
"""Register Audio functions."""
api_audio = APIAudio()
api_audio.coresys = self.coresys
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/audio/info", api_audio.info),
web.get("/audio/stats", api_audio.stats),
web.get("/audio/logs", api_audio.logs),
web.post("/audio/update", api_audio.update),
web.post("/audio/restart", api_audio.restart),
web.post("/audio/reload", api_audio.reload),
@@ -669,8 +574,6 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/audio", "hassio_audio")
def _register_mounts(self) -> None:
"""Register mounts endpoints."""
api_mounts = APIMounts()
@@ -697,6 +600,7 @@ class RestAPI(CoreSysAttributes):
web.get("/store", api_store.store_info),
web.get("/store/addons", api_store.addons_list),
web.get("/store/addons/{addon}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
web.get(
@@ -718,8 +622,6 @@ class RestAPI(CoreSysAttributes):
"/store/addons/{addon}/update/{version}",
api_store.addons_addon_update,
),
# Must be below others since it has a wildcard in resource path
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.post("/store/reload", api_store.reload),
web.get("/store/repositories", api_store.repositories_list),
web.get(

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -82,8 +81,6 @@ from ..const import (
ATTR_STARTUP,
ATTR_STATE,
ATTR_STDIN,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TRANSLATIONS,
ATTR_UART,
ATTR_UDEV,
@@ -98,7 +95,6 @@ from ..const import (
ATTR_WEBUI,
REQUEST_FROM,
AddonBoot,
AddonBootConfig,
)
from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats
@@ -110,8 +106,8 @@ from ..exceptions import (
PwnedSecret,
)
from ..validate import docker_ports
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED
from .utils import api_process, api_validate, json_loads
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -130,26 +126,15 @@ SCHEMA_OPTIONS = vol.Schema(
}
)
SCHEMA_SYS_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
}
)
# pylint: disable=no-value-for-parameter
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
SCHEMA_UNINSTALL = vol.Schema(
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
)
# pylint: enable=no-value-for-parameter
class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions."""
def get_addon_for_request(self, request: web.Request) -> Addon:
"""Return addon, throw an exception if it doesn't exist."""
def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception it it doesn't exist."""
addon_slug: str = request.match_info.get("addon")
# Lookup itself
@@ -189,7 +174,6 @@ class APIAddons(CoreSysAttributes):
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_SYSTEM_MANAGED: addon.system_managed,
}
for addon in self.sys_addons.installed
]
@@ -203,7 +187,7 @@ class APIAddons(CoreSysAttributes):
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information."""
addon: AnyAddon = self.get_addon_for_request(request)
addon: AnyAddon = self._extract_addon(request)
data = {
ATTR_NAME: addon.name,
@@ -218,7 +202,6 @@ class APIAddons(CoreSysAttributes):
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon),
ATTR_BOOT_CONFIG: addon.boot_config,
ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options,
ATTR_SCHEMA: addon.schema_ui,
@@ -278,8 +261,6 @@ class APIAddons(CoreSysAttributes):
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
ATTR_SYSTEM_MANAGED: addon.system_managed,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
}
return data
@@ -287,7 +268,7 @@ class APIAddons(CoreSysAttributes):
@api_process
async def options(self, request: web.Request) -> None:
"""Store user options for add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
# Update secrets for validation
await self.sys_homeassistant.secrets.reload()
@@ -302,10 +283,6 @@ class APIAddons(CoreSysAttributes):
if ATTR_OPTIONS in body:
addon.options = body[ATTR_OPTIONS]
if ATTR_BOOT in body:
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
raise APIError(
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
)
addon.boot = body[ATTR_BOOT]
if ATTR_AUTO_UPDATE in body:
addon.auto_update = body[ATTR_AUTO_UPDATE]
@@ -323,24 +300,10 @@ class APIAddons(CoreSysAttributes):
addon.save_persist()
@api_process
async def sys_options(self, request: web.Request) -> None:
"""Store system options for an add-on."""
addon = self.get_addon_for_request(request)
# Validate/Process Body
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
if ATTR_SYSTEM_MANAGED in body:
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
addon.save_persist()
@api_process
async def options_validate(self, request: web.Request) -> None:
"""Validate user options for add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
options = await request.json(loads=json_loads) or addon.options
@@ -382,7 +345,7 @@ class APIAddons(CoreSysAttributes):
slug: str = request.match_info.get("addon")
if slug != "self":
raise APIForbidden("This can be only read by the Add-on itself!")
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
# Lookup/reload secrets
await self.sys_homeassistant.secrets.reload()
@@ -394,7 +357,7 @@ class APIAddons(CoreSysAttributes):
@api_process
async def security(self, request: web.Request) -> None:
"""Store security options for add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body:
@@ -406,7 +369,7 @@ class APIAddons(CoreSysAttributes):
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
"""Return resource information."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
stats: DockerStats = await addon.stats()
@@ -422,47 +385,48 @@ class APIAddons(CoreSysAttributes):
}
@api_process
async def uninstall(self, request: web.Request) -> Awaitable[None]:
def uninstall(self, request: web.Request) -> Awaitable[None]:
"""Uninstall add-on."""
addon = self.get_addon_for_request(request)
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
return await asyncio.shield(
self.sys_addons.uninstall(
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
)
)
addon = self._extract_addon(request)
return asyncio.shield(self.sys_addons.uninstall(addon.slug))
@api_process
async def start(self, request: web.Request) -> None:
"""Start add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
if start_task := await asyncio.shield(addon.start()):
await start_task
@api_process
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
return asyncio.shield(addon.stop())
@api_process
async def restart(self, request: web.Request) -> None:
"""Restart add-on."""
addon: Addon = self.get_addon_for_request(request)
addon: Addon = self._extract_addon(request)
if start_task := await asyncio.shield(addon.restart()):
await start_task
@api_process
async def rebuild(self, request: web.Request) -> None:
"""Rebuild local build add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
await start_task
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return logs from add-on."""
addon = self._extract_addon(request)
return addon.logs()
@api_process
async def stdin(self, request: web.Request) -> None:
"""Write to stdin of add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
if not addon.with_stdin:
raise APIError(f"STDIN not supported the {addon.slug} add-on")

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Audio RESTful API."""
import asyncio
from collections.abc import Awaitable
from dataclasses import asdict
@@ -36,7 +35,8 @@ from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..host.sound import StreamType
from ..validate import version_tag
from .utils import api_process, api_validate
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -111,6 +111,11 @@ class APIAudio(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.audio.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Audio Docker logs."""
return self.sys_plugins.audio.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Audio plugin."""

View File

@@ -1,8 +1,6 @@
"""Init file for Supervisor auth/SSO RESTful API."""
import asyncio
import logging
from typing import Any
from aiohttp import BasicAuth, web
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
@@ -10,19 +8,11 @@ from aiohttp.web_exceptions import HTTPUnauthorized
import voluptuous as vol
from ..addons.addon import Addon
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden
from ..utils.json import json_loads
from .const import (
ATTR_GROUP_IDS,
ATTR_IS_ACTIVE,
ATTR_IS_OWNER,
ATTR_LOCAL_ONLY,
ATTR_USERS,
CONTENT_TYPE_JSON,
CONTENT_TYPE_URL,
)
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -100,21 +90,3 @@ class APIAuth(CoreSysAttributes):
async def cache(self, request: web.Request) -> None:
"""Process cache reset request."""
self.sys_auth.reset_data()
@api_process
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
"""List users on the Home Assistant instance."""
return {
ATTR_USERS: [
{
ATTR_USERNAME: user[ATTR_USERNAME],
ATTR_NAME: user[ATTR_NAME],
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
}
for user in await self.sys_auth.list_users()
if user[ATTR_USERNAME]
]
}

View File

@@ -1,7 +1,5 @@
"""Backups RESTful API."""
import asyncio
from collections.abc import Callable
import errno
import logging
from pathlib import Path
@@ -13,7 +11,6 @@ from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol
from ..backups.backup import Backup
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
from ..const import (
ATTR_ADDONS,
@@ -36,15 +33,12 @@ from ..const import (
ATTR_TIMEOUT,
ATTR_TYPE,
ATTR_VERSION,
BusEvent,
CoreState,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..jobs import JobSchedulerOptions
from ..mounts.const import MountUsage
from ..resolution.const import UnhealthyReason
from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
from .const import CONTENT_TYPE_TAR
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -56,21 +50,17 @@ RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
# pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_FULL = vol.Schema(
SCHEMA_RESTORE_PARTIAL = vol.Schema(
{
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
}
)
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
}
)
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
SCHEMA_BACKUP_FULL = vol.Schema(
{
vol.Optional(ATTR_NAME): str,
@@ -78,7 +68,6 @@ SCHEMA_BACKUP_FULL = vol.Schema(
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
}
)
@@ -215,109 +204,46 @@ class APIBackups(CoreSysAttributes):
return body
async def _background_backup_task(
self, backup_method: Callable, *args, **kwargs
) -> tuple[asyncio.Task, str]:
"""Start backup task in background and return task and job ID."""
event = asyncio.Event()
job, backup_task = self.sys_jobs.schedule_job(
backup_method, JobSchedulerOptions(), *args, **kwargs
)
async def release_on_freeze(new_state: CoreState):
if new_state == CoreState.FREEZE:
event.set()
# Wait for system to get into freeze state before returning
# If the backup fails validation it will raise before getting there
listener = self.sys_bus.register_event(
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
)
try:
await asyncio.wait(
(
backup_task,
self.sys_create_task(event.wait()),
),
return_when=asyncio.FIRST_COMPLETED,
)
return (backup_task, job.uuid)
finally:
self.sys_bus.remove_listener(listener)
@api_process
async def backup_full(self, request):
"""Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_full, **self._location_to_mount(body)
backup = await asyncio.shield(
self.sys_backups.do_backup_full(**self._location_to_mount(body))
)
if background and not backup_task.done():
return {ATTR_JOB_ID: job_id}
backup: Backup = await backup_task
if backup:
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
raise APIError(
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
return {ATTR_SLUG: backup.slug}
return False
@api_process
async def backup_partial(self, request):
"""Create a partial backup."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_partial, **self._location_to_mount(body)
backup = await asyncio.shield(
self.sys_backups.do_backup_partial(**self._location_to_mount(body))
)
if background and not backup_task.done():
return {ATTR_JOB_ID: job_id}
backup: Backup = await backup_task
if backup:
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
raise APIError(
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
return {ATTR_SLUG: backup.slug}
return False
@api_process
async def restore_full(self, request):
"""Full restore of a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_FULL, request)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_full, backup, **body
)
if background and not restore_task.done() or await restore_task:
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
@api_process
async def restore_partial(self, request):
"""Partial restore a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_partial, backup, **body
)
if background and not restore_task.done() or await restore_task:
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
@api_process
async def freeze(self, request):
@@ -343,9 +269,9 @@ class APIBackups(CoreSysAttributes):
_LOGGER.info("Downloading backup %s", backup.slug)
response = web.FileResponse(backup.tarfile)
response.content_type = CONTENT_TYPE_TAR
response.headers[CONTENT_DISPOSITION] = (
f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
)
response.headers[
CONTENT_DISPOSITION
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
return response
@api_process

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HA cli RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,14 +1,11 @@
"""Const for API."""
from enum import StrEnum
CONTENT_TYPE_BINARY = "application/octet-stream"
CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_PNG = "image/png"
CONTENT_TYPE_TAR = "application/tar"
CONTENT_TYPE_TEXT = "text/plain"
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
CONTENT_TYPE_X_LOG = "text/x-log"
COOKIE_INGRESS = "ingress_session"
@@ -16,10 +13,6 @@ ATTR_AGENT_VERSION = "agent_version"
ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes"
ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BACKGROUND = "background"
ATTR_BOOT_CONFIG = "boot_config"
ATTR_BOOT_SLOT = "boot_slot"
ATTR_BOOT_SLOTS = "boot_slots"
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
ATTR_BOOTS = "boots"
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
@@ -37,43 +30,25 @@ ATTR_DT_UTC = "dt_utc"
ATTR_EJECTABLE = "ejectable"
ATTR_FALLBACK = "fallback"
ATTR_FILESYSTEMS = "filesystems"
ATTR_FORCE = "force"
ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active"
ATTR_IS_OWNER = "is_owner"
ATTR_JOB_ID = "job_id"
ATTR_JOBS = "jobs"
ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_LOCAL_ONLY = "local_only"
ATTR_MDNS = "mdns"
ATTR_MODEL = "model"
ATTR_MOUNTS = "mounts"
ATTR_MOUNT_POINTS = "mount_points"
ATTR_PANEL_PATH = "panel_path"
ATTR_REMOVABLE = "removable"
ATTR_REMOVE_CONFIG = "remove_config"
ATTR_REVISION = "revision"
ATTR_SAFE_MODE = "safe_mode"
ATTR_SEAT = "seat"
ATTR_SIGNED = "signed"
ATTR_STARTUP_TIME = "startup_time"
ATTR_STATUS = "status"
ATTR_SUBSYSTEM = "subsystem"
ATTR_SYSFS = "sysfs"
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
ATTR_TIME_DETECTED = "time_detected"
ATTR_UPDATE_TYPE = "update_type"
ATTR_USAGE = "usage"
ATTR_USE_NTP = "use_ntp"
ATTR_USERS = "users"
ATTR_USAGE = "usage"
ATTR_VENDOR = "vendor"
ATTR_VIRTUALIZATION = "virtualization"
class BootSlot(StrEnum):
"""Boot slots used by HAOS."""
A = "A"
B = "B"

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor network RESTful API."""
import logging
import voluptuous as vol
@@ -16,6 +15,7 @@ from ..const import (
AddonState,
)
from ..coresys import CoreSysAttributes
from ..discovery.validate import valid_discovery_service
from ..exceptions import APIError, APIForbidden
from .utils import api_process, api_validate, require_home_assistant
@@ -24,7 +24,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_DISCOVERY = vol.Schema(
{
vol.Required(ATTR_SERVICE): str,
vol.Required(ATTR_CONFIG): dict,
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
}
)
@@ -71,6 +71,15 @@ class APIDiscovery(CoreSysAttributes):
addon: Addon = request[REQUEST_FROM]
service = body[ATTR_SERVICE]
try:
valid_discovery_service(service)
except vol.Invalid:
_LOGGER.warning(
"Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on",
service,
addon.name,
)
# Access?
if body[ATTR_SERVICE] not in addon.discovery:
_LOGGER.error(

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor DNS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -27,8 +26,8 @@ from ..const import (
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import dns_server_list, version_tag
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
from .utils import api_process, api_validate
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -106,6 +105,11 @@ class APICoreDNS(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.dns.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return DNS Docker logs."""
return self.sys_plugins.dns.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart CoreDNS plugin."""

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor hardware RESTful API."""
import logging
from typing import Any
@@ -17,7 +16,7 @@ from ..const import (
ATTR_SYSTEM,
)
from ..coresys import CoreSysAttributes
from ..dbus.udisks2 import UDisks2Manager
from ..dbus.udisks2 import UDisks2
from ..dbus.udisks2.block import UDisks2Block
from ..dbus.udisks2.drive import UDisks2Drive
from ..hardware.data import Device
@@ -73,7 +72,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
}
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
"""Return a dict with information of a disk to be used in the API."""
return {
ATTR_VENDOR: drive.vendor,

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -35,10 +34,10 @@ from ..const import (
ATTR_WATCHDOG,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError
from ..exceptions import APIError
from ..validate import docker_image, network_port, version_tag
from .const import ATTR_FORCE, ATTR_SAFE_MODE
from .utils import api_process, api_validate
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -64,34 +63,10 @@ SCHEMA_UPDATE = vol.Schema(
}
)
SCHEMA_RESTART = vol.Schema(
{
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
SCHEMA_STOP = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions."""
async def _check_offline_migration(self, force: bool = False) -> None:
"""Check and raise if there's an offline DB migration in progress."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Offline database migration in progress, try again after it has completed"
)
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return host information."""
@@ -119,9 +94,6 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_IMAGE in body:
self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.override_image = (
self.sys_homeassistant.image != self.sys_homeassistant.default_image
)
if ATTR_BOOT in body:
self.sys_homeassistant.boot = body[ATTR_BOOT]
@@ -173,7 +145,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def update(self, request: web.Request) -> None:
"""Update Home Assistant."""
body = await api_validate(SCHEMA_UPDATE, request)
await self._check_offline_migration()
await asyncio.shield(
self.sys_homeassistant.core.update(
@@ -183,12 +154,9 @@ class APIHomeAssistant(CoreSysAttributes):
)
@api_process
async def stop(self, request: web.Request) -> Awaitable[None]:
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop Home Assistant."""
body = await api_validate(SCHEMA_STOP, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_homeassistant.core.stop())
return asyncio.shield(self.sys_homeassistant.core.stop())
@api_process
def start(self, request: web.Request) -> Awaitable[None]:
@@ -196,24 +164,19 @@ class APIHomeAssistant(CoreSysAttributes):
return asyncio.shield(self.sys_homeassistant.core.start())
@api_process
async def restart(self, request: web.Request) -> None:
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield(
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
)
return asyncio.shield(self.sys_homeassistant.core.restart())
@api_process
async def rebuild(self, request: web.Request) -> None:
def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
return asyncio.shield(self.sys_homeassistant.core.rebuild())
await asyncio.shield(
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
)
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Home Assistant Docker logs."""
return self.sys_homeassistant.core.logs()
@api_process
async def check(self, request: web.Request) -> None:

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor host RESTful API."""
import asyncio
from contextlib import suppress
import logging
@@ -28,15 +27,8 @@ from ..const import (
ATTR_TIMEZONE,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError
from ..host.const import (
PARAM_BOOT_ID,
PARAM_FOLLOW,
PARAM_SYSLOG_IDENTIFIER,
LogFormat,
LogFormatter,
)
from ..utils.systemd_journal import journal_logs_reader
from ..exceptions import APIError, HostLogError
from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER
from .const import (
ATTR_AGENT_VERSION,
ATTR_APPARMOR_VERSION,
@@ -46,48 +38,26 @@ from .const import (
ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC,
ATTR_FORCE,
ATTR_IDENTIFIERS,
ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME,
ATTR_USE_NTP,
ATTR_VIRTUALIZATION,
CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
)
from .utils import api_process, api_process_raw, api_validate
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
IDENTIFIER = "identifier"
BOOTID = "bootid"
DEFAULT_LINES = 100
DEFAULT_RANGE = 100
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
# pylint: disable=no-value-for-parameter
SCHEMA_SHUTDOWN = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
class APIHost(CoreSysAttributes):
"""Handle RESTful API for host functions."""
async def _check_ha_offline_migration(self, force: bool) -> None:
"""Check if HA has an offline migration in progress and raise if not forced."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
)
@api_process
async def info(self, request):
"""Return host information."""
@@ -95,7 +65,6 @@ class APIHost(CoreSysAttributes):
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
ATTR_CPE: self.sys_host.info.cpe,
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_DISK_FREE: self.sys_host.info.free_space,
@@ -129,20 +98,14 @@ class APIHost(CoreSysAttributes):
)
@api_process
async def reboot(self, request):
def reboot(self, request):
"""Reboot host."""
body = await api_validate(SCHEMA_SHUTDOWN, request)
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.reboot())
return asyncio.shield(self.sys_host.control.reboot())
@api_process
async def shutdown(self, request):
def shutdown(self, request):
"""Poweroff host."""
body = await api_validate(SCHEMA_SHUTDOWN, request)
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.shutdown())
return asyncio.shield(self.sys_host.control.shutdown())
@api_process
def reload(self, request):
@@ -190,11 +153,11 @@ class APIHost(CoreSysAttributes):
raise APIError() from err
return possible_offset
async def advanced_logs_handler(
@api_process
async def advanced_logs(
self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse:
"""Return systemd-journald logs."""
log_formatter = LogFormatter.PLAIN
params = {}
if identifier:
params[PARAM_SYSLOG_IDENTIFIER] = identifier
@@ -202,8 +165,6 @@ class APIHost(CoreSysAttributes):
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
else:
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
# host logs should be always verbose, no matter what Accept header is used
log_formatter = LogFormatter.VERBOSE
if BOOTID in request.match_info:
params[PARAM_BOOT_ID] = await self._get_boot_id(
@@ -214,62 +175,28 @@ class APIHost(CoreSysAttributes):
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
"*/*",
]:
raise APIError(
"Invalid content type requested. Only text/plain and text/x-log "
"supported for now."
"Invalid content type requested. Only text/plain supported for now."
)
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
log_formatter = LogFormatter.VERBOSE
if "lines" in request.query:
lines = request.query.get("lines", DEFAULT_LINES)
try:
lines = int(lines)
except ValueError:
# If the user passed a non-integer value, just use the default instead of error.
lines = DEFAULT_LINES
finally:
# We can't use the entries= Range header syntax to refer to the last 1 line,
# and passing 1 to the calculation below would return the 1st line of the logs
# instead. Since this is really an edge case that doesn't matter much, we'll just
# return 2 lines at minimum.
lines = max(2, lines)
# entries=cursor[[:num_skip]:num_entries]
range_header = f"entries=:-{lines-1}:{'' if follow else lines}"
elif RANGE in request.headers:
if RANGE in request.headers:
range_header = request.headers.get(RANGE)
else:
range_header = (
f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}"
)
range_header = f"entries=:-{DEFAULT_RANGE}:"
async with self.sys_host.logs.journald_logs(
params=params, range_header=range_header, accept=LogFormat.JOURNAL
params=params, range_header=range_header
) as resp:
try:
response = web.StreamResponse()
response.content_type = CONTENT_TYPE_TEXT
headers_returned = False
async for cursor, line in journal_logs_reader(resp, log_formatter):
if not headers_returned:
if cursor:
response.headers["X-First-Cursor"] = cursor
await response.prepare(request)
headers_returned = True
await response.write(line.encode("utf-8") + b"\n")
await response.prepare(request)
async for data in resp.content:
await response.write(data)
except ConnectionResetError as ex:
raise APIError(
"Connection reset when trying to fetch data from systemd-journald."
) from ex
return response
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def advanced_logs(
self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse:
"""Return systemd-journald logs. Wrapped as standard API handler."""
return await self.advanced_logs_handler(request, identifier, follow)

View File

@@ -1,5 +1,4 @@
"""Supervisor Add-on ingress service."""
import asyncio
from ipaddress import ip_address
import logging

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Jobs RESTful API."""
import logging
from typing import Any
@@ -7,7 +6,6 @@ from aiohttp import web
import voluptuous as vol
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..jobs import SupervisorJob
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
from .const import ATTR_JOBS
@@ -23,7 +21,7 @@ SCHEMA_OPTIONS = vol.Schema(
class APIJobs(CoreSysAttributes):
"""Handle RESTful API for OS functions."""
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
def _list_jobs(self) -> list[dict[str, Any]]:
"""Return current job tree."""
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
for job in self.sys_jobs.jobs:
@@ -36,11 +34,9 @@ class APIJobs(CoreSysAttributes):
jobs_by_parent[job.parent_id].append(job)
job_list: list[dict[str, Any]] = []
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
[(job_list, start)]
if start
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
)
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = [
(job_list, job) for job in jobs_by_parent.get(None, [])
]
while queue:
(current_list, current_job) = queue.pop(0)
@@ -82,19 +78,3 @@ class APIJobs(CoreSysAttributes):
async def reset(self, request: web.Request) -> None:
"""Reset options for JobManager."""
self.sys_jobs.reset_data()
@api_process
async def job_info(self, request: web.Request) -> dict[str, Any]:
"""Get details of a job by ID."""
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
return self._list_jobs(job)[0]
@api_process
async def remove_job(self, request: web.Request) -> None:
"""Remove a completed job."""
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
if not job.done:
raise APIError(f"Job {job.uuid} is not done!")
self.sys_jobs.remove_job(job)

View File

@@ -1,5 +1,4 @@
"""Handle security part of this API."""
import logging
import re
from typing import Final
@@ -9,8 +8,6 @@ from aiohttp.web import Request, RequestHandler, Response, middleware
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
from awesomeversion import AwesomeVersion
from supervisor.homeassistant.const import LANDINGPAGE
from ...addons.const import RE_SLUG
from ...const import (
REQUEST_FROM,
@@ -80,13 +77,6 @@ ADDONS_API_BYPASS: Final = re.compile(
r")$"
)
# Home Assistant only
CORE_ONLY_PATHS: Final = re.compile(
r"^(?:"
r"/addons/" + RE_SLUG + "/sys_options"
r")$"
)
# Policy role add-on API access
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
ROLE_DEFAULT: re.compile(
@@ -113,8 +103,6 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
r"|/audio/.+"
r"|/auth/cache"
r"|/available_updates"
r"|/backups.*"
r"|/cli/.+"
r"|/core/.+"
r"|/dns/.+"
@@ -124,17 +112,16 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
r"|/hassos/.+"
r"|/homeassistant/.+"
r"|/host/.+"
r"|/mounts.*"
r"|/multicast/.+"
r"|/network/.+"
r"|/observer/.+"
r"|/os/(?!datadisk/wipe).+"
r"|/refresh_updates"
r"|/os/.+"
r"|/resolution/.+"
r"|/security/.+"
r"|/backups.*"
r"|/snapshots.*"
r"|/store.*"
r"|/supervisor/.+"
r"|/security/.+"
r")$"
),
ROLE_ADMIN: re.compile(
@@ -242,9 +229,6 @@ class SecurityMiddleware(CoreSysAttributes):
if supervisor_token == self.sys_homeassistant.supervisor_token:
_LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant
elif CORE_ONLY_PATHS.match(request.path):
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
raise HTTPForbidden()
# Host
if supervisor_token == self.sys_plugins.cli.supervisor_token:
@@ -290,10 +274,8 @@ class SecurityMiddleware(CoreSysAttributes):
@middleware
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy."""
if (
request[REQUEST_FROM] != self.sys_homeassistant
or self.sys_homeassistant.version == LANDINGPAGE
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough(
self.sys_homeassistant.version, _CORE_VERSION
):
return await handler(request)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Multicast RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -24,7 +23,8 @@ from ..const import (
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import version_tag
from .utils import api_process, api_validate
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -69,6 +69,11 @@ class APIMulticast(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.multicast.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Multicast Docker logs."""
return self.sys_plugins.multicast.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Multicast plugin."""

View File

@@ -1,8 +1,8 @@
"""REST API for network."""
import asyncio
from collections.abc import Awaitable
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface
from dataclasses import replace
from ipaddress import ip_address, ip_interface
from typing import Any
from aiohttp import web
@@ -48,28 +48,18 @@ from ..host.configuration import (
Interface,
InterfaceMethod,
IpConfig,
IpSetting,
VlanConfig,
WifiConfig,
)
from ..host.const import AuthMethod, InterfaceType, WifiMode
from .utils import api_process, api_validate
_SCHEMA_IPV4_CONFIG = vol.Schema(
_SCHEMA_IP_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)],
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)],
}
)
_SCHEMA_IPV6_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
}
)
@@ -86,18 +76,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
# pylint: disable=no-value-for-parameter
SCHEMA_UPDATE = vol.Schema(
{
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
vol.Optional(ATTR_ENABLED): vol.Boolean(),
}
)
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
"""Return a dict with information about ip configuration."""
return {
ATTR_METHOD: setting.method,
ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
@@ -132,8 +122,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac,
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
}
@@ -207,26 +197,24 @@ class APINetwork(CoreSysAttributes):
# Apply config
for key, config in body.items():
if key == ATTR_IPV4:
interface.ipv4setting = IpSetting(
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
config.get(ATTR_ADDRESS, []),
config.get(ATTR_GATEWAY),
config.get(ATTR_NAMESERVERS, []),
interface.ipv4 = replace(
interface.ipv4
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
**config,
)
elif key == ATTR_IPV6:
interface.ipv6setting = IpSetting(
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
config.get(ATTR_ADDRESS, []),
config.get(ATTR_GATEWAY),
config.get(ATTR_NAMESERVERS, []),
interface.ipv6 = replace(
interface.ipv6
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
**config,
)
elif key == ATTR_WIFI:
interface.wifi = WifiConfig(
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
config.get(ATTR_SSID, ""),
config.get(ATTR_AUTH, AuthMethod.OPEN),
config.get(ATTR_PSK, None),
None,
interface.wifi = replace(
interface.wifi
or WifiConfig(
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
),
**config,
)
elif key == ATTR_ENABLED:
interface.enabled = config
@@ -268,22 +256,24 @@ class APINetwork(CoreSysAttributes):
vlan_config = VlanConfig(vlan, interface.name)
ipv4_setting = None
ipv4_config = None
if ATTR_IPV4 in body:
ipv4_setting = IpSetting(
ipv4_config = IpConfig(
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
None,
)
ipv6_setting = None
ipv6_config = None
if ATTR_IPV6 in body:
ipv6_setting = IpSetting(
ipv6_config = IpConfig(
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
None,
)
vlan_interface = Interface(
@@ -294,10 +284,8 @@ class APINetwork(CoreSysAttributes):
True,
False,
InterfaceType.VLAN,
None,
ipv4_setting,
None,
ipv6_setting,
ipv4_config,
ipv6_config,
None,
vlan_config,
)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Observer RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HassOS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -20,7 +19,6 @@ from ..const import (
ATTR_POWER_LED,
ATTR_SERIAL,
ATTR_SIZE,
ATTR_STATE,
ATTR_UPDATE_AVAILABLE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
@@ -30,17 +28,13 @@ from ..exceptions import BoardInvalidError
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..validate import version_tag
from .const import (
ATTR_BOOT_SLOT,
ATTR_BOOT_SLOTS,
ATTR_DATA_DISK,
ATTR_DEV_PATH,
ATTR_DEVICE,
ATTR_DISKS,
ATTR_MODEL,
ATTR_STATUS,
ATTR_SYSTEM_HEALTH_LED,
ATTR_VENDOR,
BootSlot,
)
from .utils import api_process, api_validate
@@ -48,7 +42,6 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
SCHEMA_YELLOW_OPTIONS = vol.Schema(
@@ -81,15 +74,6 @@ class APIOS(CoreSysAttributes):
ATTR_BOARD: self.sys_os.board,
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
ATTR_BOOT_SLOTS: {
slot.bootname: {
ATTR_STATE: slot.state,
ATTR_STATUS: slot.boot_status,
ATTR_VERSION: slot.bundle_version,
}
for slot in self.sys_os.slots
if slot.bootname
},
}
@api_process
@@ -112,17 +96,6 @@ class APIOS(CoreSysAttributes):
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
@api_process
def wipe_data(self, request: web.Request) -> Awaitable[None]:
"""Trigger data disk wipe on Host."""
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
@api_process
async def set_boot_slot(self, request: web.Request) -> None:
"""Change the active boot slot and reboot into it."""
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
@api_process
async def list_data(self, request: web.Request) -> dict[str, Any]:
"""Return possible data targets."""
@@ -157,17 +130,13 @@ class APIOS(CoreSysAttributes):
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
if ATTR_ACTIVITY_LED in body:
await self.sys_dbus.agent.board.green.set_activity_led(
body[ATTR_ACTIVITY_LED]
)
self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED]
if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED]
if ATTR_SYSTEM_HEALTH_LED in body:
await self.sys_dbus.agent.board.green.set_user_led(
body[ATTR_SYSTEM_HEALTH_LED]
)
self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED]
self.sys_dbus.agent.board.green.save_data()
@@ -186,15 +155,13 @@ class APIOS(CoreSysAttributes):
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
if ATTR_DISK_LED in body:
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
if ATTR_HEARTBEAT_LED in body:
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
body[ATTR_HEARTBEAT_LED]
)
self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
self.sys_dbus.agent.board.yellow.save_data()
self.sys_resolution.create_issue(

View File

@@ -1,5 +1,4 @@
"""Utils for Home Assistant Proxy."""
import asyncio
from contextlib import asynccontextmanager
import logging
@@ -15,7 +14,6 @@ from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError
from ..utils.json import json_dumps
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -147,8 +145,7 @@ class APIProxy(CoreSysAttributes):
{
"type": "auth",
"access_token": self.sys_homeassistant.api.access_token,
},
dumps=json_dumps,
}
)
data = await client.receive_json()
@@ -187,9 +184,6 @@ class APIProxy(CoreSysAttributes):
return await target.send_str(msg.data)
if msg.type == WSMsgType.BINARY:
return await target.send_bytes(msg.data)
if msg.type == WSMsgType.CLOSE:
_LOGGER.debug("Received close message from WebSocket.")
return await target.close()
raise TypeError(
f"Cannot proxy websocket message of unsupported type: {msg.type}"
@@ -204,13 +198,11 @@ class APIProxy(CoreSysAttributes):
# init server
server = web.WebSocketResponse(heartbeat=30)
await server.prepare(request)
addon_name = None
# handle authentication
try:
await server.send_json(
{"type": "auth_required", "ha_version": self.sys_homeassistant.version},
dumps=json_dumps,
{"type": "auth_required", "ha_version": self.sys_homeassistant.version}
)
# Check API access
@@ -223,17 +215,14 @@ class APIProxy(CoreSysAttributes):
if not addon or not addon.access_homeassistant_api:
_LOGGER.warning("Unauthorized WebSocket access!")
await server.send_json(
{"type": "auth_invalid", "message": "Invalid access"},
dumps=json_dumps,
{"type": "auth_invalid", "message": "Invalid access"}
)
return server
addon_name = addon.slug
_LOGGER.info("WebSocket access from %s", addon_name)
_LOGGER.info("WebSocket access from %s", addon.slug)
await server.send_json(
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version},
dumps=json_dumps,
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version}
)
except (RuntimeError, ValueError) as err:
_LOGGER.error("Can't initialize handshake: %s", err)
@@ -288,5 +277,5 @@ class APIProxy(CoreSysAttributes):
if not server.closed:
await server.close()
_LOGGER.info("Home Assistant WebSocket API for %s closed", addon_name)
_LOGGER.info("Home Assistant WebSocket API connection is closed")
return server

View File

@@ -1,5 +1,4 @@
"""Handle REST API for resoulution."""
import asyncio
from collections.abc import Awaitable
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Root RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Security RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
from typing import Any
@@ -250,14 +249,9 @@ class APIStore(CoreSysAttributes):
@api_process_raw(CONTENT_TYPE_TEXT)
async def addons_addon_changelog(self, request: web.Request) -> str:
"""Return changelog from add-on."""
# Frontend can't handle error response here, need to return 200 and error as text for now
try:
addon = self._extract_addon(request)
except APIError as err:
return str(err)
addon = self._extract_addon(request)
if not addon.with_changelog:
return f"No changelog found for add-on {addon.slug}!"
raise APIError(f"No changelog found for add-on {addon.slug}!")
with addon.path_changelog.open("r") as changelog:
return changelog.read()
@@ -265,14 +259,9 @@ class APIStore(CoreSysAttributes):
@api_process_raw(CONTENT_TYPE_TEXT)
async def addons_addon_documentation(self, request: web.Request) -> str:
"""Return documentation from add-on."""
# Frontend can't handle error response here, need to return 200 and error as text for now
try:
addon = self._extract_addon(request)
except APIError as err:
return str(err)
addon = self._extract_addon(request)
if not addon.with_documentation:
return f"No documentation found for add-on {addon.slug}!"
raise APIError(f"No documentation found for add-on {addon.slug}!")
with addon.path_documentation.open("r") as documentation:
return documentation.read()

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Supervisor RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -50,7 +49,7 @@ from ..store.validate import repositories
from ..utils.sentry import close_sentry, init_sentry
from ..utils.validate import validate_timezone
from ..validate import version_tag, wait_boot
from .const import CONTENT_TYPE_TEXT
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -141,7 +140,7 @@ class APISupervisor(CoreSysAttributes):
if ATTR_DIAGNOSTICS in body:
self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS]
await self.sys_dbus.agent.set_diagnostics(body[ATTR_DIAGNOSTICS])
self.sys_dbus.agent.diagnostics = body[ATTR_DIAGNOSTICS]
if body[ATTR_DIAGNOSTICS]:
init_sentry(self.coresys)
@@ -230,7 +229,7 @@ class APISupervisor(CoreSysAttributes):
"""Soft restart Supervisor."""
return asyncio.shield(self.sys_supervisor.restart())
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return supervisor Docker logs."""
return self.sys_supervisor.logs()

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor util for RESTful API."""
import json
from typing import Any
@@ -14,7 +13,6 @@ from ..const import (
HEADER_TOKEN,
HEADER_TOKEN_OLD,
JSON_DATA,
JSON_JOB_ID,
JSON_MESSAGE,
JSON_RESULT,
REQUEST_FROM,
@@ -26,7 +24,7 @@ from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError
from ..utils import check_exception_chain, get_message_from_exception_chain
from ..utils.json import json_dumps, json_loads as json_loads_util
from ..utils.log_format import format_message
from . import const
from .const import CONTENT_TYPE_BINARY
def excract_supervisor_token(request: web.Request) -> str | None:
@@ -92,7 +90,7 @@ def require_home_assistant(method):
return wrap_api
def api_process_raw(content, *, error_type=None):
def api_process_raw(content):
"""Wrap content_type into function."""
def wrap_method(method):
@@ -102,15 +100,15 @@ def api_process_raw(content, *, error_type=None):
"""Return api information."""
try:
msg_data = await method(api, *args, **kwargs)
except HassioError as err:
return api_return_error(
err, error_type=error_type or const.CONTENT_TYPE_BINARY
)
msg_type = content
except (APIError, APIForbidden) as err:
msg_data = str(err).encode()
msg_type = CONTENT_TYPE_BINARY
except HassioError:
msg_data = b""
msg_type = CONTENT_TYPE_BINARY
if isinstance(msg_data, (web.Response, web.StreamResponse)):
return msg_data
return web.Response(body=msg_data, content_type=content)
return web.Response(body=msg_data, content_type=msg_type)
return wrap_api
@@ -118,40 +116,20 @@ def api_process_raw(content, *, error_type=None):
def api_return_error(
error: Exception | None = None,
message: str | None = None,
error_type: str | None = None,
error: Exception | None = None, message: str | None = None
) -> web.Response:
"""Return an API error message."""
if error and not message:
message = get_message_from_exception_chain(error)
if check_exception_chain(error, DockerAPIError):
message = format_message(message)
if not message:
message = "Unknown error, see supervisor"
status = 400
if is_api_error := isinstance(error, APIError):
status = error.status
match error_type:
case const.CONTENT_TYPE_TEXT:
return web.Response(body=message, content_type=error_type, status=status)
case const.CONTENT_TYPE_BINARY:
return web.Response(
body=message.encode(), content_type=error_type, status=status
)
case _:
result = {
JSON_RESULT: RESULT_ERROR,
JSON_MESSAGE: message,
}
if is_api_error and error.job_id:
result[JSON_JOB_ID] = error.job_id
return web.json_response(
result,
status=status,
{
JSON_RESULT: RESULT_ERROR,
JSON_MESSAGE: message or "Unknown error, see supervisor",
},
status=400,
dumps=json_dumps,
)

View File

@@ -1,5 +1,4 @@
"""Handle Arch for underlay maschine/platforms."""
import logging
from pathlib import Path
import platform

View File

@@ -1,20 +1,12 @@
"""Manage SSO for Add-ons with Home Assistant user."""
import asyncio
import hashlib
import logging
from typing import Any
from .addons.addon import Addon
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_TYPE, ATTR_USERNAME, FILE_HASSIO_AUTH
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH
from .coresys import CoreSys, CoreSysAttributes
from .exceptions import (
AuthError,
AuthListUsersError,
AuthPasswordResetError,
HomeAssistantAPIError,
HomeAssistantWSError,
)
from .exceptions import AuthError, AuthPasswordResetError, HomeAssistantAPIError
from .utils.common import FileConfiguration
from .validate import SCHEMA_AUTH_CONFIG
@@ -140,17 +132,6 @@ class Auth(FileConfiguration, CoreSysAttributes):
raise AuthPasswordResetError()
async def list_users(self) -> list[dict[str, Any]]:
"""List users on the Home Assistant instance."""
try:
return await self.sys_homeassistant.websocket.async_send_command(
{ATTR_TYPE: "config/auth/list"}
)
except HomeAssistantWSError:
_LOGGER.error("Can't request listing users on Home Assistant!")
raise AuthListUsersError()
@staticmethod
def _rehash(value: str, salt2: str = "") -> str:
"""Rehash a value."""

View File

@@ -1,19 +1,14 @@
"""Representation of a backup file."""
import asyncio
from base64 import b64decode, b64encode
from collections import defaultdict
from collections.abc import Awaitable
from copy import deepcopy
from datetime import timedelta
from functools import cached_property
import io
import json
import logging
from pathlib import Path
import tarfile
from tempfile import TemporaryDirectory
import time
from typing import Any
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
@@ -47,14 +42,11 @@ from ..const import (
ATTR_VERSION,
CRYPTO_AES128,
)
from ..coresys import CoreSys
from ..exceptions import AddonsError, BackupError, BackupInvalidError
from ..jobs.const import JOB_GROUP_BACKUP
from ..jobs.decorator import Job
from ..jobs.job_group import JobGroup
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import AddonsError, BackupError
from ..utils import remove_folder
from ..utils.dt import parse_datetime, utcnow
from ..utils.json import json_bytes
from ..utils.json import write_json_file
from .const import BUF_SIZE, BackupType
from .utils import key_to_iv, password_to_key
from .validate import SCHEMA_BACKUP
@@ -62,25 +54,15 @@ from .validate import SCHEMA_BACKUP
_LOGGER: logging.Logger = logging.getLogger(__name__)
class Backup(JobGroup):
class Backup(CoreSysAttributes):
"""A single Supervisor backup."""
def __init__(
self,
coresys: CoreSys,
tar_file: Path,
slug: str,
data: dict[str, Any] | None = None,
):
def __init__(self, coresys: CoreSys, tar_file: Path):
"""Initialize a backup."""
super().__init__(
coresys, JOB_GROUP_BACKUP.format_map(defaultdict(str, slug=slug)), slug
)
self.coresys: CoreSys = coresys
self._tarfile: Path = tar_file
self._data: dict[str, Any] = data or {ATTR_SLUG: slug}
self._data: dict[str, Any] = {}
self._tmp = None
self._outer_secure_tarfile: SecureTarFile | None = None
self._outer_secure_tarfile_tarfile: tarfile.TarFile | None = None
self._key: bytes | None = None
self._aes: Cipher | None = None
@@ -105,7 +87,7 @@ class Backup(JobGroup):
return self._data[ATTR_NAME]
@property
def date(self) -> str:
def date(self):
"""Return backup date."""
return self._data[ATTR_DATE]
@@ -120,32 +102,32 @@ class Backup(JobGroup):
return self._data[ATTR_COMPRESSED]
@property
def addons(self) -> list[dict[str, Any]]:
def addons(self):
"""Return backup date."""
return self._data[ATTR_ADDONS]
@property
def addon_list(self) -> list[str]:
def addon_list(self):
"""Return a list of add-ons slugs."""
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
@property
def folders(self) -> list[str]:
def folders(self):
"""Return list of saved folders."""
return self._data[ATTR_FOLDERS]
@property
def repositories(self) -> list[str]:
def repositories(self):
"""Return backup date."""
return self._data[ATTR_REPOSITORIES]
@repositories.setter
def repositories(self, value: list[str]) -> None:
def repositories(self, value):
"""Set backup date."""
self._data[ATTR_REPOSITORIES] = value
@property
def homeassistant_version(self) -> AwesomeVersion:
def homeassistant_version(self):
"""Return backup Home Assistant version."""
if self.homeassistant is None:
return None
@@ -159,7 +141,7 @@ class Backup(JobGroup):
return self.homeassistant[ATTR_EXCLUDE_DATABASE]
@property
def homeassistant(self) -> dict[str, Any]:
def homeassistant(self):
"""Return backup Home Assistant data."""
return self._data[ATTR_HOMEASSISTANT]
@@ -169,12 +151,12 @@ class Backup(JobGroup):
return self._data[ATTR_SUPERVISOR_VERSION]
@property
def docker(self) -> dict[str, Any]:
def docker(self):
"""Return backup Docker config data."""
return self._data.get(ATTR_DOCKER, {})
@docker.setter
def docker(self, value: dict[str, Any]) -> None:
def docker(self, value):
"""Set the Docker config data."""
self._data[ATTR_DOCKER] = value
@@ -187,36 +169,32 @@ class Backup(JobGroup):
return None
@property
def size(self) -> float:
def size(self):
"""Return backup size."""
if not self.tarfile.is_file():
return 0
return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte
@property
def is_new(self) -> bool:
def is_new(self):
"""Return True if there is new."""
return not self.tarfile.exists()
@property
def tarfile(self) -> Path:
def tarfile(self):
"""Return path to backup tarfile."""
return self._tarfile
@property
def is_current(self) -> bool:
def is_current(self):
"""Return true if backup is current, false if stale."""
return parse_datetime(self.date) >= utcnow() - timedelta(
days=self.sys_backups.days_until_stale
)
@property
def data(self) -> dict[str, Any]:
"""Returns a copy of the data."""
return deepcopy(self._data)
def new(
self,
slug: str,
name: str,
date: str,
sys_type: BackupType,
@@ -226,6 +204,7 @@ class Backup(JobGroup):
"""Initialize a new backup."""
# Init metadata
self._data[ATTR_VERSION] = 2
self._data[ATTR_SLUG] = slug
self._data[ATTR_NAME] = name
self._data[ATTR_DATE] = date
self._data[ATTR_TYPE] = sys_type
@@ -326,21 +305,13 @@ class Backup(JobGroup):
async def __aenter__(self):
"""Async context to open a backup."""
self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent))
# create a backup
if not self.tarfile.is_file():
self._outer_secure_tarfile = SecureTarFile(
self.tarfile,
"w",
gzip=False,
bufsize=BUF_SIZE,
)
self._outer_secure_tarfile_tarfile = self._outer_secure_tarfile.__enter__()
return
return self
# extract an existing backup
self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent))
def _extract_backup():
"""Extract a backup."""
with tarfile.open(self.tarfile, "r:") as tar:
@@ -355,26 +326,8 @@ class Backup(JobGroup):
async def __aexit__(self, exception_type, exception_value, traceback):
"""Async context to close a backup."""
# exists backup or exception on build
try:
await self._aexit(exception_type, exception_value, traceback)
finally:
if self._tmp:
self._tmp.cleanup()
if self._outer_secure_tarfile:
self._outer_secure_tarfile.__exit__(
exception_type, exception_value, traceback
)
self._outer_secure_tarfile = None
self._outer_secure_tarfile_tarfile = None
async def _aexit(self, exception_type, exception_value, traceback):
"""Cleanup after backup creation.
This is a separate method to allow it to be called from __aexit__ to ensure
that cleanup is always performed, even if an exception is raised.
"""
# If we're not creating a new backup, or if an exception was raised, we're done
if not self._outer_secure_tarfile or exception_type is not None:
if self.tarfile.is_file() or exception_type is not None:
self._tmp.cleanup()
return
# validate data
@@ -387,249 +340,161 @@ class Backup(JobGroup):
raise ValueError("Invalid config") from None
# new backup, build it
def _add_backup_json():
def _create_backup():
"""Create a new backup."""
raw_bytes = json_bytes(self._data)
fileobj = io.BytesIO(raw_bytes)
tar_info = tarfile.TarInfo(name="./backup.json")
tar_info.size = len(raw_bytes)
tar_info.mtime = int(time.time())
self._outer_secure_tarfile_tarfile.addfile(tar_info, fileobj=fileobj)
with tarfile.open(self.tarfile, "w:") as tar:
tar.add(self._tmp.name, arcname=".")
try:
await self.sys_run_in_executor(_add_backup_json)
write_json_file(Path(self._tmp.name, "backup.json"), self._data)
await self.sys_run_in_executor(_create_backup)
except (OSError, json.JSONDecodeError) as err:
self.sys_jobs.current.capture_error(BackupError("Can't write backup"))
_LOGGER.error("Can't write backup: %s", err)
finally:
self._tmp.cleanup()
@Job(name="backup_addon_save", cleanup=False)
async def _addon_save(self, addon: Addon) -> asyncio.Task | None:
"""Store an add-on into backup."""
self.sys_jobs.current.reference = addon.slug
tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}"
addon_file = self._outer_secure_tarfile.create_inner_tar(
f"./{tar_name}",
gzip=self.compressed,
key=self._key,
)
# Take backup
try:
start_task = await addon.backup(addon_file)
except AddonsError as err:
raise BackupError(
f"Can't create backup for {addon.slug}", _LOGGER.error
) from err
# Store to config
self._data[ATTR_ADDONS].append(
{
ATTR_SLUG: addon.slug,
ATTR_NAME: addon.name,
ATTR_VERSION: addon.version,
ATTR_SIZE: addon_file.size,
}
)
return start_task
@Job(name="backup_store_addons", cleanup=False)
async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]:
"""Add a list of add-ons into backup.
For each addon that needs to be started after backup, returns a Task which
completes when that addon has state 'started' (see addon.start).
"""
# Save Add-ons sequential avoid issue on slow IO
async def _addon_save(addon: Addon) -> asyncio.Task | None:
"""Task to store an add-on into backup."""
tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}"
addon_file = SecureTarFile(
Path(self._tmp.name, tar_name),
"w",
key=self._key,
gzip=self.compressed,
bufsize=BUF_SIZE,
)
# Take backup
try:
start_task = await addon.backup(addon_file)
except AddonsError:
_LOGGER.error("Can't create backup for %s", addon.slug)
return
# Store to config
self._data[ATTR_ADDONS].append(
{
ATTR_SLUG: addon.slug,
ATTR_NAME: addon.name,
ATTR_VERSION: addon.version,
ATTR_SIZE: addon_file.size,
}
)
return start_task
# Save Add-ons sequential
# avoid issue on slow IO
start_tasks: list[asyncio.Task] = []
for addon in addon_list:
try:
if start_task := await self._addon_save(addon):
if start_task := await _addon_save(addon):
start_tasks.append(start_task)
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err)
return start_tasks
@Job(name="backup_addon_restore", cleanup=False)
async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None:
"""Restore an add-on from backup."""
self.sys_jobs.current.reference = addon_slug
tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}"
addon_file = SecureTarFile(
Path(self._tmp.name, tar_name),
"r",
key=self._key,
gzip=self.compressed,
bufsize=BUF_SIZE,
)
# If exists inside backup
if not addon_file.path.exists():
raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error)
# Perform a restore
try:
return await self.sys_addons.restore(addon_slug, addon_file)
except AddonsError as err:
raise BackupError(
f"Can't restore backup {addon_slug}", _LOGGER.error
) from err
@Job(name="backup_restore_addons", cleanup=False)
async def restore_addons(
self, addon_list: list[str]
) -> tuple[bool, list[asyncio.Task]]:
"""Restore a list add-on from backup."""
# Save Add-ons sequential avoid issue on slow IO
async def _addon_restore(addon_slug: str) -> tuple[bool, asyncio.Task | None]:
"""Task to restore an add-on into backup."""
tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}"
addon_file = SecureTarFile(
Path(self._tmp.name, tar_name),
"r",
key=self._key,
gzip=self.compressed,
bufsize=BUF_SIZE,
)
# If exists inside backup
if not addon_file.path.exists():
_LOGGER.error("Can't find backup %s", addon_slug)
return (False, None)
# Perform a restore
try:
return (True, await self.sys_addons.restore(addon_slug, addon_file))
except AddonsError:
_LOGGER.error("Can't restore backup %s", addon_slug)
return (False, None)
# Save Add-ons sequential
# avoid issue on slow IO
start_tasks: list[asyncio.Task] = []
success = True
for slug in addon_list:
try:
start_task = await self._addon_restore(slug)
addon_success, start_task = await _addon_restore(slug)
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't restore Add-on %s: %s", slug, err)
success = False
else:
success = success and addon_success
if start_task:
start_tasks.append(start_task)
return (success, start_tasks)
@Job(name="backup_remove_delta_addons", cleanup=False)
async def remove_delta_addons(self) -> bool:
"""Remove addons which are not in this backup."""
success = True
for addon in self.sys_addons.installed:
if addon.slug in self.addon_list:
continue
# Remove Add-on because it's not a part of the new env
# Do it sequential avoid issue on slow IO
try:
await self.sys_addons.uninstall(addon.slug)
except AddonsError as err:
self.sys_jobs.current.capture_error(err)
_LOGGER.warning("Can't uninstall Add-on %s: %s", addon.slug, err)
success = False
return success
@Job(name="backup_folder_save", cleanup=False)
async def _folder_save(self, name: str):
"""Take backup of a folder."""
self.sys_jobs.current.reference = name
slug_name = name.replace("/", "_")
tar_name = f"{slug_name}.tar{'.gz' if self.compressed else ''}"
origin_dir = Path(self.sys_config.path_supervisor, name)
# Check if exists
if not origin_dir.is_dir():
_LOGGER.warning("Can't find backup folder %s", name)
return
def _save() -> None:
# Take backup
_LOGGER.info("Backing up folder %s", name)
with self._outer_secure_tarfile.create_inner_tar(
f"./{tar_name}",
gzip=self.compressed,
key=self._key,
) as tar_file:
atomic_contents_add(
tar_file,
origin_dir,
excludes=[
bound.bind_mount.local_where.as_posix()
for bound in self.sys_mounts.bound_mounts
if bound.bind_mount.local_where
],
arcname=".",
)
_LOGGER.info("Backup folder %s done", name)
try:
await self.sys_run_in_executor(_save)
except (tarfile.TarError, OSError) as err:
raise BackupError(
f"Can't backup folder {name}: {str(err)}", _LOGGER.error
) from err
self._data[ATTR_FOLDERS].append(name)
@Job(name="backup_store_folders", cleanup=False)
async def store_folders(self, folder_list: list[str]):
"""Backup Supervisor data into backup."""
# Save folder sequential avoid issue on slow IO
for folder in folder_list:
await self._folder_save(folder)
@Job(name="backup_folder_restore", cleanup=False)
async def _folder_restore(self, name: str) -> None:
"""Restore a folder."""
self.sys_jobs.current.reference = name
slug_name = name.replace("/", "_")
tar_name = Path(
self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}"
)
origin_dir = Path(self.sys_config.path_supervisor, name)
# Check if exists inside backup
if not tar_name.exists():
raise BackupInvalidError(
f"Can't find restore folder {name}", _LOGGER.warning
async def _folder_save(name: str):
"""Take backup of a folder."""
slug_name = name.replace("/", "_")
tar_name = Path(
self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}"
)
origin_dir = Path(self.sys_config.path_supervisor, name)
# Unmount any mounts within folder
bind_mounts = [
bound.bind_mount
for bound in self.sys_mounts.bound_mounts
if bound.bind_mount.local_where
and bound.bind_mount.local_where.is_relative_to(origin_dir)
]
if bind_mounts:
await asyncio.gather(*[bind_mount.unmount() for bind_mount in bind_mounts])
# Check if exists
if not origin_dir.is_dir():
_LOGGER.warning("Can't find backup folder %s", name)
return
# Clean old stuff
if origin_dir.is_dir():
await remove_folder(origin_dir, content_only=True)
# Perform a restore
def _restore() -> bool:
try:
_LOGGER.info("Restore folder %s", name)
def _save() -> None:
# Take backup
_LOGGER.info("Backing up folder %s", name)
with SecureTarFile(
tar_name,
"r",
key=self._key,
gzip=self.compressed,
bufsize=BUF_SIZE,
tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE
) as tar_file:
tar_file.extractall(
path=origin_dir, members=tar_file, filter="fully_trusted"
atomic_contents_add(
tar_file,
origin_dir,
excludes=[
bound.bind_mount.local_where.as_posix()
for bound in self.sys_mounts.bound_mounts
if bound.bind_mount.local_where
],
arcname=".",
)
_LOGGER.info("Restore folder %s done", name)
_LOGGER.info("Backup folder %s done", name)
await self.sys_run_in_executor(_save)
self._data[ATTR_FOLDERS].append(name)
# Save folder sequential
# avoid issue on slow IO
for folder in folder_list:
try:
await _folder_save(folder)
except (tarfile.TarError, OSError) as err:
raise BackupError(
f"Can't restore folder {name}: {err}", _LOGGER.warning
f"Can't backup folder {folder}: {str(err)}", _LOGGER.error
) from err
return True
try:
return await self.sys_run_in_executor(_restore)
finally:
if bind_mounts:
await asyncio.gather(
*[bind_mount.mount() for bind_mount in bind_mounts]
)
@Job(name="backup_restore_folders", cleanup=False)
async def restore_folders(self, folder_list: list[str]) -> bool:
"""Backup Supervisor data into backup."""
success = True
@@ -691,16 +556,16 @@ class Backup(JobGroup):
*[bind_mount.mount() for bind_mount in bind_mounts]
)
# Restore folder sequential avoid issue on slow IO
# Restore folder sequential
# avoid issue on slow IO
for folder in folder_list:
try:
await self._folder_restore(folder)
success = success and await _folder_restore(folder)
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't restore folder %s: %s", folder, err)
success = False
return success
@Job(name="backup_store_homeassistant", cleanup=False)
async def store_homeassistant(self, exclude_database: bool = False):
"""Backup Home Assistant Core configuration folder."""
self._data[ATTR_HOMEASSISTANT] = {
@@ -708,12 +573,12 @@ class Backup(JobGroup):
ATTR_EXCLUDE_DATABASE: exclude_database,
}
tar_name = f"homeassistant.tar{'.gz' if self.compressed else ''}"
# Backup Home Assistant Core config directory
homeassistant_file = self._outer_secure_tarfile.create_inner_tar(
f"./{tar_name}",
gzip=self.compressed,
key=self._key,
tar_name = Path(
self._tmp.name, f"homeassistant.tar{'.gz' if self.compressed else ''}"
)
homeassistant_file = SecureTarFile(
tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE
)
await self.sys_homeassistant.backup(homeassistant_file, exclude_database)
@@ -721,7 +586,6 @@ class Backup(JobGroup):
# Store size
self.homeassistant[ATTR_SIZE] = homeassistant_file.size
@Job(name="backup_restore_homeassistant", cleanup=False)
async def restore_homeassistant(self) -> Awaitable[None]:
"""Restore Home Assistant Core configuration folder."""
await self.sys_homeassistant.core.stop()
@@ -755,7 +619,7 @@ class Backup(JobGroup):
return self.sys_create_task(_core_update())
def store_repositories(self) -> None:
def store_repositories(self):
"""Store repository list into backup."""
self.repositories = self.sys_store.repository_urls

View File

@@ -1,5 +1,4 @@
"""Backup consts."""
from enum import StrEnum
BUF_SIZE = 2**20 * 4 # 4MB

View File

@@ -1,5 +1,4 @@
"""Backup manager."""
from __future__ import annotations
import asyncio
@@ -10,23 +9,13 @@ from pathlib import Path
from ..addons.addon import Addon
from ..const import (
ATTR_DATA,
ATTR_DAYS_UNTIL_STALE,
ATTR_SLUG,
ATTR_TYPE,
FILE_HASSIO_BACKUPS,
FOLDER_HOMEASSISTANT,
CoreState,
)
from ..dbus.const import UnitActiveState
from ..exceptions import (
BackupError,
BackupInvalidError,
BackupJobError,
BackupMountDownError,
HomeAssistantWSError,
)
from ..homeassistant.const import WSType
from ..exceptions import AddonsError, BackupError, BackupInvalidError, BackupJobError
from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit
from ..jobs.decorator import Job
from ..jobs.job_group import JobGroup
@@ -85,16 +74,12 @@ class BackupManager(FileConfiguration, JobGroup):
def _get_base_path(self, location: Mount | type[DEFAULT] | None = DEFAULT) -> Path:
"""Get base path for backup using location or default location."""
if location == DEFAULT and self.sys_mounts.default_backup_mount:
location = self.sys_mounts.default_backup_mount
if location:
if not location.local_where.is_mount():
raise BackupMountDownError(
f"{location.name} is down, cannot back-up to it", _LOGGER.error
)
return location.local_where
if location == DEFAULT and self.sys_mounts.default_backup_mount:
return self.sys_mounts.default_backup_mount.local_where
return self.sys_config.path_backup
def _change_stage(
@@ -154,8 +139,8 @@ class BackupManager(FileConfiguration, JobGroup):
tar_file = Path(self._get_base_path(location), f"{slug}.tar")
# init object
backup = Backup(self.coresys, tar_file, slug)
backup.new(name, date_str, sys_type, password, compressed)
backup = Backup(self.coresys, tar_file)
backup.new(slug, name, date_str, sys_type, password, compressed)
# Add backup ID to job
self.sys_jobs.current.reference = backup.slug
@@ -180,11 +165,9 @@ class BackupManager(FileConfiguration, JobGroup):
async def _load_backup(tar_file):
"""Load the backup."""
backup = Backup(self.coresys, tar_file, "temp")
backup = Backup(self.coresys, tar_file)
if await backup.load():
self._backups[backup.slug] = Backup(
self.coresys, tar_file, backup.slug, backup.data
)
self._backups[backup.slug] = backup
tasks = [
self.sys_create_task(_load_backup(tar_file))
@@ -216,7 +199,7 @@ class BackupManager(FileConfiguration, JobGroup):
async def import_backup(self, tar_file: Path) -> Backup | None:
"""Check backup tarfile and import it."""
backup = Backup(self.coresys, tar_file, "temp")
backup = Backup(self.coresys, tar_file)
# Read meta data
if not await backup.load():
@@ -239,7 +222,7 @@ class BackupManager(FileConfiguration, JobGroup):
return None
# Load new backup
backup = Backup(self.coresys, tar_origin, backup.slug, backup.data)
backup = Backup(self.coresys, tar_origin)
if not await backup.load():
return None
_LOGGER.info("Successfully imported %s", backup.slug)
@@ -265,6 +248,11 @@ class BackupManager(FileConfiguration, JobGroup):
self.sys_core.state = CoreState.FREEZE
async with backup:
# Backup add-ons
if addon_list:
self._change_stage(BackupJobStage.ADDONS, backup)
addon_start_tasks = await backup.store_addons(addon_list)
# HomeAssistant Folder is for v1
if homeassistant:
self._change_stage(BackupJobStage.HOME_ASSISTANT, backup)
@@ -274,11 +262,6 @@ class BackupManager(FileConfiguration, JobGroup):
else homeassistant_exclude_database
)
# Backup add-ons
if addon_list:
self._change_stage(BackupJobStage.ADDONS, backup)
addon_start_tasks = await backup.store_addons(addon_list)
# Backup folders
if folder_list:
self._change_stage(BackupJobStage.FOLDERS, backup)
@@ -286,15 +269,9 @@ class BackupManager(FileConfiguration, JobGroup):
self._change_stage(BackupJobStage.FINISHING_FILE, backup)
except BackupError as err:
self.sys_jobs.current.capture_error(err)
return None
except Exception as err: # pylint: disable=broad-except
_LOGGER.exception("Backup %s error", backup.slug)
capture_exception(err)
self.sys_jobs.current.capture_error(
BackupError(f"Backup {backup.slug} error, see supervisor logs")
)
return None
else:
self._backups[backup.slug] = backup
@@ -304,18 +281,6 @@ class BackupManager(FileConfiguration, JobGroup):
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
try:
await self.sys_homeassistant.websocket.async_send_command(
{
ATTR_TYPE: WSType.BACKUP_SYNC,
ATTR_DATA: {
ATTR_SLUG: backup.slug,
},
},
)
except HomeAssistantWSError as err:
_LOGGER.error("Can't send backup sync to Home Assistant: %s", err)
return backup
finally:
self.sys_core.state = CoreState.RUNNING
@@ -325,7 +290,6 @@ class BackupManager(FileConfiguration, JobGroup):
conditions=[JobCondition.RUNNING],
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=BackupJobError,
cleanup=False,
)
async def do_backup_full(
self,
@@ -362,7 +326,6 @@ class BackupManager(FileConfiguration, JobGroup):
conditions=[JobCondition.RUNNING],
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=BackupJobError,
cleanup=False,
)
async def do_backup_partial(
self,
@@ -447,7 +410,17 @@ class BackupManager(FileConfiguration, JobGroup):
# Delete delta add-ons
if replace:
self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup)
success = success and await backup.remove_delta_addons()
for addon in self.sys_addons.installed:
if addon.slug in backup.addon_list:
continue
# Remove Add-on because it's not a part of the new env
# Do it sequential avoid issue on slow IO
try:
await self.sys_addons.uninstall(addon.slug)
except AddonsError:
_LOGGER.warning("Can't uninstall Add-on %s", addon.slug)
success = False
if addon_list:
self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup)
@@ -471,7 +444,7 @@ class BackupManager(FileConfiguration, JobGroup):
_LOGGER.exception("Restore %s error", backup.slug)
capture_exception(err)
raise BackupError(
f"Restore {backup.slug} error, see supervisor logs"
f"Restore {backup.slug} error, check logs for details"
) from err
else:
if addon_start_tasks:
@@ -490,16 +463,12 @@ class BackupManager(FileConfiguration, JobGroup):
# Do we need start Home Assistant Core?
if not await self.sys_homeassistant.core.is_running():
await self.sys_homeassistant.core.start(
_job_override__cleanup=False
)
await self.sys_homeassistant.core.start()
# Check If we can access to API / otherwise restart
if not await self.sys_homeassistant.api.check_api_state():
_LOGGER.warning("Need restart HomeAssistant for API")
await self.sys_homeassistant.core.restart(
_job_override__cleanup=False
)
await self.sys_homeassistant.core.restart()
@Job(
name="backup_manager_full_restore",
@@ -512,7 +481,6 @@ class BackupManager(FileConfiguration, JobGroup):
],
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=BackupJobError,
cleanup=False,
)
async def do_restore_full(
self, backup: Backup, password: str | None = None
@@ -566,7 +534,6 @@ class BackupManager(FileConfiguration, JobGroup):
],
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=BackupJobError,
cleanup=False,
)
async def do_restore_partial(
self,

View File

@@ -1,5 +1,4 @@
"""Util add-on functions."""
import hashlib
import re

View File

@@ -1,5 +1,4 @@
"""Validate some things around restore."""
from __future__ import annotations
from typing import Any
@@ -54,7 +53,7 @@ def unique_addons(addons_list):
def v1_homeassistant(
homeassistant_data: dict[str, Any] | None,
homeassistant_data: dict[str, Any] | None
) -> dict[str, Any] | None:
"""Cleanup homeassistant artefacts from v1."""
if not homeassistant_data:

View File

@@ -1,6 +1,4 @@
"""Bootstrap Supervisor."""
# ruff: noqa: T100
import logging
import os
from pathlib import Path
@@ -117,7 +115,7 @@ async def initialize_coresys() -> CoreSys:
_LOGGER.warning(
"Missing SUPERVISOR_MACHINE environment variable. Fallback to deprecated extraction!"
)
_LOGGER.info("Setting up coresys for machine: %s", coresys.machine)
_LOGGER.info("Seting up coresys for machine: %s", coresys.machine)
return coresys
@@ -258,11 +256,9 @@ def migrate_system_env(coresys: CoreSys) -> None:
def initialize_logging() -> None:
"""Initialize the logging."""
logging.basicConfig(level=logging.INFO)
fmt = (
"%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
)
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
colorfmt = f"%(log_color)s{fmt}%(reset)s"
datefmt = "%Y-%m-%d %H:%M:%S"
datefmt = "%y-%m-%d %H:%M:%S"
# suppress overly verbose logs from libraries that aren't helpful
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)

View File

@@ -1,5 +1,4 @@
"""Bus event system."""
from __future__ import annotations
from collections.abc import Awaitable, Callable

View File

@@ -1,5 +1,4 @@
"""Bootstrap Supervisor."""
from datetime import UTC, datetime
import logging
import os

View File

@@ -1,5 +1,4 @@
"""Constants file for Supervisor."""
from dataclasses import dataclass
from enum import StrEnum
from ipaddress import ip_network
@@ -69,7 +68,6 @@ META_SUPERVISOR = "supervisor"
JSON_DATA = "data"
JSON_MESSAGE = "message"
JSON_RESULT = "result"
JSON_JOB_ID = "job_id"
RESULT_ERROR = "error"
RESULT_OK = "ok"
@@ -310,8 +308,6 @@ ATTR_SUPERVISOR_VERSION = "supervisor_version"
ATTR_SUPPORTED = "supported"
ATTR_SUPPORTED_ARCH = "supported_arch"
ATTR_SYSTEM = "system"
ATTR_SYSTEM_MANAGED = "system_managed"
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY = "system_managed_config_entry"
ATTR_TIMEOUT = "timeout"
ATTR_TIMEZONE = "timezone"
ATTR_TITLE = "title"
@@ -335,7 +331,6 @@ ATTR_UUID = "uuid"
ATTR_VALID = "valid"
ATTR_VALUE = "value"
ATTR_VERSION = "version"
ATTR_VERSION_TIMESTAMP = "version_timestamp"
ATTR_VERSION_LATEST = "version_latest"
ATTR_VIDEO = "video"
ATTR_VLAN = "vlan"
@@ -382,27 +377,12 @@ ROLE_ADMIN = "admin"
ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN]
class AddonBootConfig(StrEnum):
"""Boot mode config for the add-on."""
AUTO = "auto"
MANUAL = "manual"
MANUAL_ONLY = "manual_only"
class AddonBoot(StrEnum):
"""Boot mode for the add-on."""
AUTO = "auto"
MANUAL = "manual"
@classmethod
def _missing_(cls, value: str) -> Self | None:
"""Convert 'forced' config values to their counterpart."""
if value == AddonBootConfig.MANUAL_ONLY:
return AddonBoot.MANUAL
return None
class AddonStartup(StrEnum):
"""Startup types of Add-on."""
@@ -478,11 +458,9 @@ class HostFeature(StrEnum):
class BusEvent(StrEnum):
"""Bus event type."""
DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change"
HARDWARE_NEW_DEVICE = "hardware_new_device"
HARDWARE_REMOVE_DEVICE = "hardware_remove_device"
SUPERVISOR_JOB_END = "supervisor_job_end"
SUPERVISOR_JOB_START = "supervisor_job_start"
DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change"
SUPERVISOR_STATE_CHANGE = "supervisor_state_change"

View File

@@ -1,11 +1,12 @@
"""Main file for Supervisor."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
from datetime import timedelta
import logging
import async_timeout
from .const import (
ATTR_STARTUP,
RUN_SUPERVISOR_STATE,
@@ -178,15 +179,7 @@ class Core(CoreSysAttributes):
and not self.sys_dev
and self.supported
):
try:
await self.sys_dbus.agent.set_diagnostics(self.sys_config.diagnostics)
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning(
"Could not set diagnostics to %s due to %s",
self.sys_config.diagnostics,
err,
)
capture_exception(err)
self.sys_dbus.agent.diagnostics = self.sys_config.diagnostics
# Evaluate the system
await self.sys_resolution.evaluate.evaluate_system()
@@ -305,7 +298,7 @@ class Core(CoreSysAttributes):
# Stage 1
try:
async with asyncio.timeout(10):
async with async_timeout.timeout(10):
await asyncio.wait(
[
self.sys_create_task(coro)
@@ -321,7 +314,7 @@ class Core(CoreSysAttributes):
# Stage 2
try:
async with asyncio.timeout(10):
async with async_timeout.timeout(10):
await asyncio.wait(
[
self.sys_create_task(coro)
@@ -346,6 +339,9 @@ class Core(CoreSysAttributes):
if self.state == CoreState.RUNNING:
self.state = CoreState.SHUTDOWN
# Stop docker monitoring
await self.sys_docker.unload()
# Shutdown Application Add-ons, using Home Assistant API
await self.sys_addons.shutdown(AddonStartup.APPLICATION)

View File

@@ -1,5 +1,4 @@
"""Handle core shared data."""
from __future__ import annotations
import asyncio
@@ -63,7 +62,7 @@ class CoreSys:
# External objects
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
self._websession = None
self._websession: aiohttp.ClientSession = aiohttp.ClientSession()
# Global objects
self._config: CoreConfig = CoreConfig()
@@ -96,8 +95,10 @@ class CoreSys:
self._bus: Bus | None = None
self._mounts: MountManager | None = None
# Setup aiohttp session
self.create_websession()
# Set default header for aiohttp
self._websession._default_headers = MappingProxyType(
{aiohttp.hdrs.USER_AGENT: SERVER_SOFTWARE}
)
# Task factory attributes
self._set_task_context: list[Callable[[Context], Context]] = []
@@ -112,11 +113,8 @@ class CoreSys:
"""Return system timezone."""
if self.config.timezone:
return self.config.timezone
# pylint bug with python 3.12.4 (https://github.com/pylint-dev/pylint/issues/9811)
# pylint: disable=no-member
if self.host.info.timezone:
return self.host.info.timezone
# pylint: enable=no-member
return "UTC"
@property
@@ -546,54 +544,13 @@ class CoreSys:
return self.loop.run_in_executor(None, funct, *args)
def create_websession(self) -> None:
"""Create a new aiohttp session."""
if self._websession:
self.create_task(self._websession.close())
# Create session and set default header for aiohttp
self._websession: aiohttp.ClientSession = aiohttp.ClientSession(
headers=MappingProxyType({aiohttp.hdrs.USER_AGENT: SERVER_SOFTWARE})
)
def _create_context(self) -> Context:
"""Create a new context for a task."""
def create_task(self, coroutine: Coroutine) -> asyncio.Task:
"""Create an async task."""
context = copy_context()
for callback in self._set_task_context:
context = callback(context)
return context
def create_task(self, coroutine: Coroutine) -> asyncio.Task:
"""Create an async task."""
return self.loop.create_task(coroutine, context=self._create_context())
def call_later(
self,
delay: float,
funct: Callable[..., Coroutine[Any, Any, T]],
*args: tuple[Any],
**kwargs: dict[str, Any],
) -> asyncio.TimerHandle:
"""Start a task after a delay."""
if kwargs:
funct = partial(funct, **kwargs)
return self.loop.call_later(delay, funct, *args, context=self._create_context())
def call_at(
self,
when: datetime,
funct: Callable[..., Coroutine[Any, Any, T]],
*args: tuple[Any],
**kwargs: dict[str, Any],
) -> asyncio.TimerHandle:
"""Start a task at the specified datetime."""
if kwargs:
funct = partial(funct, **kwargs)
return self.loop.call_at(
when.timestamp(), funct, *args, context=self._create_context()
)
return self.loop.create_task(coroutine, context=context)
class CoreSysAttributes:
@@ -774,23 +731,3 @@ class CoreSysAttributes:
def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task:
"""Create an async task."""
return self.coresys.create_task(coroutine)
def sys_call_later(
self,
delay: float,
funct: Callable[..., Coroutine[Any, Any, T]],
*args: tuple[Any],
**kwargs: dict[str, Any],
) -> asyncio.TimerHandle:
"""Start a task after a delay."""
return self.coresys.call_later(delay, funct, *args, **kwargs)
def sys_call_at(
self,
when: datetime,
funct: Callable[..., Coroutine[Any, Any, T]],
*args: tuple[Any],
**kwargs: dict[str, Any],
) -> asyncio.TimerHandle:
"""Start a task at the specified datetime."""
return self.coresys.call_at(when, funct, *args, **kwargs)

View File

@@ -1,14 +1,12 @@
"""OS-Agent implementation for DBUS."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from awesomeversion import AwesomeVersion
from dbus_fast.aio.message_bus import MessageBus
from ...exceptions import DBusInterfaceError, DBusServiceUnkownError
from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
from ..const import (
DBUS_ATTR_DIAGNOSTICS,
DBUS_ATTR_VERSION,
@@ -82,9 +80,11 @@ class OSAgent(DBusInterfaceProxy):
"""Return if diagnostics is enabled on OS-Agent."""
return self.properties[DBUS_ATTR_DIAGNOSTICS]
def set_diagnostics(self, value: bool) -> Awaitable[None]:
@diagnostics.setter
@dbus_property
def diagnostics(self, value: bool) -> None:
"""Enable or disable OS-Agent diagnostics."""
return self.dbus.set_diagnostics(value)
asyncio.create_task(self.dbus.set_diagnostics(value))
@property
def all(self) -> list[DBusInterface]:
@@ -96,25 +96,13 @@ class OSAgent(DBusInterfaceProxy):
_LOGGER.info("Load dbus interface %s", self.name)
try:
await super().connect(bus)
await asyncio.gather(*[dbus.connect(bus) for dbus in self.all])
except DBusError:
_LOGGER.warning("Can't connect to OS-Agent")
except (DBusServiceUnkownError, DBusInterfaceError):
_LOGGER.error(
_LOGGER.warning(
"No OS-Agent support on the host. Some Host functions have been disabled."
)
return
errors = await asyncio.gather(
*[dbus.connect(bus) for dbus in self.all], return_exceptions=True
)
for err in errors:
if err:
dbus = self.all[errors.index(err)]
_LOGGER.error(
"Can't load OS Agent dbus interface %s %s: %s",
dbus.bus_name,
dbus.object_path,
err,
)
@dbus_connected
async def update(self, changed: dict[str, Any] | None = None) -> None:

View File

@@ -1,5 +1,4 @@
"""AppArmor object for OS-Agent."""
from pathlib import Path
from awesomeversion import AwesomeVersion

View File

@@ -1,10 +1,9 @@
"""Board management for OS Agent."""
import logging
from dbus_fast.aio.message_bus import MessageBus
from ....exceptions import BoardInvalidError, DBusInterfaceError, DBusServiceUnkownError
from ....exceptions import BoardInvalidError
from ...const import (
DBUS_ATTR_BOARD,
DBUS_IFACE_HAOS_BOARDS,
@@ -75,10 +74,6 @@ class BoardManager(DBusInterfaceProxy):
self._board_proxy = Green()
elif self.board == BOARD_NAME_SUPERVISED:
self._board_proxy = Supervised()
else:
return
try:
if self._board_proxy:
await self._board_proxy.connect(bus)
except (DBusServiceUnkownError, DBusInterfaceError) as ex:
_LOGGER.warning("OS-Agent board support initialization failed: %s", ex)

View File

@@ -1,7 +1,6 @@
"""Green board management."""
import asyncio
from collections.abc import Awaitable
from dbus_fast.aio.message_bus import MessageBus
@@ -26,10 +25,11 @@ class Green(BoardProxy):
"""Get activity LED enabled."""
return self.properties[DBUS_ATTR_ACTIVITY_LED]
def set_activity_led(self, enabled: bool) -> Awaitable[None]:
@activity_led.setter
def activity_led(self, enabled: bool) -> None:
"""Enable/disable activity LED."""
self._data[ATTR_ACTIVITY_LED] = enabled
return self.dbus.Boards.Green.set_activity_led(enabled)
asyncio.create_task(self.dbus.Boards.Green.set_activity_led(enabled))
@property
@dbus_property
@@ -37,10 +37,11 @@ class Green(BoardProxy):
"""Get power LED enabled."""
return self.properties[DBUS_ATTR_POWER_LED]
def set_power_led(self, enabled: bool) -> Awaitable[None]:
@power_led.setter
def power_led(self, enabled: bool) -> None:
"""Enable/disable power LED."""
self._data[ATTR_POWER_LED] = enabled
return self.dbus.Boards.Green.set_power_led(enabled)
asyncio.create_task(self.dbus.Boards.Green.set_power_led(enabled))
@property
@dbus_property
@@ -48,18 +49,17 @@ class Green(BoardProxy):
"""Get user LED enabled."""
return self.properties[DBUS_ATTR_USER_LED]
def set_user_led(self, enabled: bool) -> Awaitable[None]:
@user_led.setter
def user_led(self, enabled: bool) -> None:
"""Enable/disable disk LED."""
self._data[ATTR_USER_LED] = enabled
return self.dbus.Boards.Green.set_user_led(enabled)
asyncio.create_task(self.dbus.Boards.Green.set_user_led(enabled))
async def connect(self, bus: MessageBus) -> None:
"""Connect to D-Bus."""
await super().connect(bus)
# Set LEDs based on settings on connect
await asyncio.gather(
self.set_activity_led(self._data[ATTR_ACTIVITY_LED]),
self.set_power_led(self._data[ATTR_POWER_LED]),
self.set_user_led(self._data[ATTR_USER_LED]),
)
self.activity_led = self._data[ATTR_ACTIVITY_LED]
self.power_led = self._data[ATTR_POWER_LED]
self.user_led = self._data[ATTR_USER_LED]

View File

@@ -1,9 +1,5 @@
"""Supervised board management."""
from typing import Any
from supervisor.dbus.utils import dbus_connected
from .const import BOARD_NAME_SUPERVISED
from .interface import BoardProxy
@@ -15,11 +11,3 @@ class Supervised(BoardProxy):
"""Initialize properties."""
super().__init__(BOARD_NAME_SUPERVISED)
self.sync_properties: bool = False
@dbus_connected
async def update(self, changed: dict[str, Any] | None = None) -> None:
"""Do nothing as there are no properties.
Currently unused, avoid using the Properties interface to avoid a bug in
Go D-Bus, see: https://github.com/home-assistant/os-agent/issues/206
"""

View File

@@ -1,7 +1,6 @@
"""Yellow board management."""
import asyncio
from collections.abc import Awaitable
from dbus_fast.aio.message_bus import MessageBus
@@ -26,10 +25,11 @@ class Yellow(BoardProxy):
"""Get heartbeat LED enabled."""
return self.properties[DBUS_ATTR_HEARTBEAT_LED]
def set_heartbeat_led(self, enabled: bool) -> Awaitable[None]:
@heartbeat_led.setter
def heartbeat_led(self, enabled: bool) -> None:
"""Enable/disable heartbeat LED."""
self._data[ATTR_HEARTBEAT_LED] = enabled
return self.dbus.Boards.Yellow.set_heartbeat_led(enabled)
asyncio.create_task(self.dbus.Boards.Yellow.set_heartbeat_led(enabled))
@property
@dbus_property
@@ -37,10 +37,11 @@ class Yellow(BoardProxy):
"""Get power LED enabled."""
return self.properties[DBUS_ATTR_POWER_LED]
def set_power_led(self, enabled: bool) -> Awaitable[None]:
@power_led.setter
def power_led(self, enabled: bool) -> None:
"""Enable/disable power LED."""
self._data[ATTR_POWER_LED] = enabled
return self.dbus.Boards.Yellow.set_power_led(enabled)
asyncio.create_task(self.dbus.Boards.Yellow.set_power_led(enabled))
@property
@dbus_property
@@ -48,18 +49,17 @@ class Yellow(BoardProxy):
"""Get disk LED enabled."""
return self.properties[DBUS_ATTR_DISK_LED]
def set_disk_led(self, enabled: bool) -> Awaitable[None]:
@disk_led.setter
def disk_led(self, enabled: bool) -> None:
"""Enable/disable disk LED."""
self._data[ATTR_DISK_LED] = enabled
return self.dbus.Boards.Yellow.set_disk_led(enabled)
asyncio.create_task(self.dbus.Boards.Yellow.set_disk_led(enabled))
async def connect(self, bus: MessageBus) -> None:
"""Connect to D-Bus."""
await super().connect(bus)
# Set LEDs based on settings on connect
await asyncio.gather(
self.set_disk_led(self._data[ATTR_DISK_LED]),
self.set_heartbeat_led(self._data[ATTR_HEARTBEAT_LED]),
self.set_power_led(self._data[ATTR_POWER_LED]),
)
self.disk_led = self._data[ATTR_DISK_LED]
self.heartbeat_led = self._data[ATTR_HEARTBEAT_LED]
self.power_led = self._data[ATTR_POWER_LED]

View File

@@ -1,5 +1,4 @@
"""DataDisk object for OS-Agent."""
from pathlib import Path
from ..const import (

View File

@@ -12,6 +12,6 @@ class System(DBusInterface):
object_path: str = DBUS_OBJECT_HAOS_SYSTEM
@dbus_connected
async def schedule_wipe_device(self) -> bool:
async def schedule_wipe_device(self) -> None:
"""Schedule a factory reset on next system boot."""
return await self.dbus.System.call_schedule_wipe_device()
await self.dbus.System.call_schedule_wipe_device()

View File

@@ -1,5 +1,4 @@
"""Constants for DBUS."""
from enum import IntEnum, StrEnum
from socket import AF_INET, AF_INET6
@@ -37,14 +36,12 @@ DBUS_IFACE_RAUC_INSTALLER = "de.pengutronix.rauc.Installer"
DBUS_IFACE_RESOLVED_MANAGER = "org.freedesktop.resolve1.Manager"
DBUS_IFACE_SETTINGS_CONNECTION = "org.freedesktop.NetworkManager.Settings.Connection"
DBUS_IFACE_SYSTEMD_MANAGER = "org.freedesktop.systemd1.Manager"
DBUS_IFACE_SYSTEMD_UNIT = "org.freedesktop.systemd1.Unit"
DBUS_IFACE_TIMEDATE = "org.freedesktop.timedate1"
DBUS_IFACE_UDISKS2_MANAGER = "org.freedesktop.UDisks2.Manager"
DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED = (
"org.freedesktop.NetworkManager.Connection.Active.StateChanged"
)
DBUS_SIGNAL_PROPERTIES_CHANGED = "org.freedesktop.DBus.Properties.PropertiesChanged"
DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED = "de.pengutronix.rauc.Installer.Completed"
DBUS_OBJECT_BASE = "/"
@@ -62,13 +59,11 @@ DBUS_OBJECT_RESOLVED = "/org/freedesktop/resolve1"
DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings"
DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1"
DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1"
DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2"
DBUS_OBJECT_UDISKS2_MANAGER = "/org/freedesktop/UDisks2/Manager"
DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2/Manager"
DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint"
DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection"
DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections"
DBUS_ATTR_ACTIVE_STATE = "ActiveState"
DBUS_ATTR_ACTIVITY_LED = "ActivityLED"
DBUS_ATTR_ADDRESS_DATA = "AddressData"
DBUS_ATTR_BITRATE = "Bitrate"
@@ -182,7 +177,6 @@ DBUS_ATTR_UUID = "Uuid"
DBUS_ATTR_VARIANT = "Variant"
DBUS_ATTR_VENDOR = "Vendor"
DBUS_ATTR_VERSION = "Version"
DBUS_ATTR_VIRTUALIZATION = "Virtualization"
DBUS_ATTR_WHAT = "What"
DBUS_ATTR_WWN = "WWN"

View File

@@ -1,5 +1,4 @@
"""D-Bus interface for hostname."""
import logging
from dbus_fast.aio.message_bus import MessageBus

View File

@@ -1,5 +1,4 @@
"""Interface class for D-Bus wrappers."""
from abc import ABC
from collections.abc import Callable
from functools import wraps

View File

@@ -1,5 +1,4 @@
"""Interface to Logind over D-Bus."""
import logging
from dbus_fast.aio.message_bus import MessageBus

View File

@@ -1,5 +1,4 @@
"""D-Bus interface objects."""
import asyncio
import logging
@@ -18,7 +17,7 @@ from .rauc import Rauc
from .resolved import Resolved
from .systemd import Systemd
from .timedate import TimeDate
from .udisks2 import UDisks2Manager
from .udisks2 import UDisks2
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -38,7 +37,7 @@ class DBusManager(CoreSysAttributes):
self._agent: OSAgent = OSAgent()
self._timedate: TimeDate = TimeDate()
self._resolved: Resolved = Resolved()
self._udisks2: UDisks2Manager = UDisks2Manager()
self._udisks2: UDisks2 = UDisks2()
self._bus: MessageBus | None = None
@property
@@ -82,7 +81,7 @@ class DBusManager(CoreSysAttributes):
return self._resolved
@property
def udisks2(self) -> UDisks2Manager:
def udisks2(self) -> UDisks2:
"""Return the udisks2 interface."""
return self._udisks2
@@ -129,11 +128,9 @@ class DBusManager(CoreSysAttributes):
for err in errors:
if err:
dbus = self.all[errors.index(err)]
_LOGGER.warning(
"Can't load dbus interface %s %s: %s",
dbus.name,
dbus.object_path,
"Can't load dbus interface %s: %s",
self.all[errors.index(err)].name,
err,
)

View File

@@ -1,5 +1,4 @@
"""Network Manager implementation for DBUS."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""NetworkConnection objects for Network Manager."""
from dataclasses import dataclass
from ipaddress import IPv4Address, IPv6Address
@@ -59,22 +58,11 @@ class VlanProperties:
parent: str | None
@dataclass(slots=True)
class IpAddress:
"""IP address object for Network Manager."""
address: str
prefix: int
@dataclass(slots=True)
class IpProperties:
"""IP properties object for Network Manager."""
method: str | None
address_data: list[IpAddress] | None
gateway: str | None
dns: list[bytes | int] | None
@dataclass(slots=True)

View File

@@ -1,5 +1,4 @@
"""Network Manager DNS Manager object."""
from ipaddress import ip_address
import logging
from typing import Any

View File

@@ -1,18 +1,17 @@
"""Connection object for Network Manager."""
import logging
from typing import Any
from dbus_fast import Variant
from dbus_fast.aio.message_bus import MessageBus
from ....const import ATTR_METHOD, ATTR_MODE, ATTR_PSK, ATTR_SSID
from ...const import DBUS_NAME_NM
from ...interface import DBusInterface
from ...utils import dbus_connected
from ..configuration import (
ConnectionProperties,
EthernetProperties,
IpAddress,
IpProperties,
MatchProperties,
VlanProperties,
@@ -21,52 +20,30 @@ from ..configuration import (
)
CONF_ATTR_CONNECTION = "connection"
CONF_ATTR_MATCH = "match"
CONF_ATTR_802_ETHERNET = "802-3-ethernet"
CONF_ATTR_802_WIRELESS = "802-11-wireless"
CONF_ATTR_802_WIRELESS_SECURITY = "802-11-wireless-security"
CONF_ATTR_VLAN = "vlan"
CONF_ATTR_IPV4 = "ipv4"
CONF_ATTR_IPV6 = "ipv6"
CONF_ATTR_MATCH = "match"
CONF_ATTR_PATH = "path"
CONF_ATTR_CONNECTION_ID = "id"
CONF_ATTR_CONNECTION_UUID = "uuid"
CONF_ATTR_CONNECTION_TYPE = "type"
CONF_ATTR_CONNECTION_LLMNR = "llmnr"
CONF_ATTR_CONNECTION_MDNS = "mdns"
CONF_ATTR_CONNECTION_AUTOCONNECT = "autoconnect"
CONF_ATTR_CONNECTION_INTERFACE_NAME = "interface-name"
CONF_ATTR_MATCH_PATH = "path"
CONF_ATTR_VLAN_ID = "id"
CONF_ATTR_VLAN_PARENT = "parent"
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC = "assigned-mac-address"
CONF_ATTR_802_WIRELESS_MODE = "mode"
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC = "assigned-mac-address"
CONF_ATTR_802_WIRELESS_SSID = "ssid"
CONF_ATTR_802_WIRELESS_POWERSAVE = "powersave"
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG = "auth-alg"
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT = "key-mgmt"
CONF_ATTR_802_WIRELESS_SECURITY_PSK = "psk"
CONF_ATTR_IPV4_METHOD = "method"
CONF_ATTR_IPV4_ADDRESS_DATA = "address-data"
CONF_ATTR_IPV4_GATEWAY = "gateway"
CONF_ATTR_IPV4_DNS = "dns"
CONF_ATTR_IPV6_METHOD = "method"
CONF_ATTR_IPV6_ADDRESS_DATA = "address-data"
CONF_ATTR_IPV6_GATEWAY = "gateway"
CONF_ATTR_IPV6_DNS = "dns"
ATTR_ID = "id"
ATTR_UUID = "uuid"
ATTR_TYPE = "type"
ATTR_PARENT = "parent"
ATTR_ASSIGNED_MAC = "assigned-mac-address"
ATTR_POWERSAVE = "powersave"
ATTR_AUTH_ALG = "auth-alg"
ATTR_KEY_MGMT = "key-mgmt"
ATTR_INTERFACE_NAME = "interface-name"
ATTR_PATH = "path"
IPV4_6_IGNORE_FIELDS = [
"addresses",
"address-data",
"dns",
"dns-data",
"gateway",
"method",
]
@@ -96,7 +73,7 @@ def _merge_settings_attribute(
class NetworkSetting(DBusInterface):
"""Network connection setting object for Network Manager.
https://networkmanager.dev/docs/api/1.48.0/gdbus-org.freedesktop.NetworkManager.Settings.Connection.html
https://developer.gnome.org/NetworkManager/stable/gdbus-org.freedesktop.NetworkManager.Settings.Connection.html
"""
bus_name: str = DBUS_NAME_NM
@@ -170,7 +147,7 @@ class NetworkSetting(DBusInterface):
new_settings,
settings,
CONF_ATTR_CONNECTION,
ignore_current_value=[CONF_ATTR_CONNECTION_INTERFACE_NAME],
ignore_current_value=[ATTR_INTERFACE_NAME],
)
_merge_settings_attribute(new_settings, settings, CONF_ATTR_802_ETHERNET)
_merge_settings_attribute(new_settings, settings, CONF_ATTR_802_WIRELESS)
@@ -215,69 +192,47 @@ class NetworkSetting(DBusInterface):
# See: https://developer-old.gnome.org/NetworkManager/stable/ch01.html
if CONF_ATTR_CONNECTION in data:
self._connection = ConnectionProperties(
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_ID),
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_UUID),
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_TYPE),
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_INTERFACE_NAME),
data[CONF_ATTR_CONNECTION].get(ATTR_ID),
data[CONF_ATTR_CONNECTION].get(ATTR_UUID),
data[CONF_ATTR_CONNECTION].get(ATTR_TYPE),
data[CONF_ATTR_CONNECTION].get(ATTR_INTERFACE_NAME),
)
if CONF_ATTR_802_ETHERNET in data:
self._ethernet = EthernetProperties(
data[CONF_ATTR_802_ETHERNET].get(CONF_ATTR_802_ETHERNET_ASSIGNED_MAC),
data[CONF_ATTR_802_ETHERNET].get(ATTR_ASSIGNED_MAC),
)
if CONF_ATTR_802_WIRELESS in data:
self._wireless = WirelessProperties(
bytes(
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_SSID, [])
).decode(),
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_ASSIGNED_MAC),
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_MODE),
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_POWERSAVE),
bytes(data[CONF_ATTR_802_WIRELESS].get(ATTR_SSID, [])).decode(),
data[CONF_ATTR_802_WIRELESS].get(ATTR_ASSIGNED_MAC),
data[CONF_ATTR_802_WIRELESS].get(ATTR_MODE),
data[CONF_ATTR_802_WIRELESS].get(ATTR_POWERSAVE),
)
if CONF_ATTR_802_WIRELESS_SECURITY in data:
self._wireless_security = WirelessSecurityProperties(
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG
),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT
),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
CONF_ATTR_802_WIRELESS_SECURITY_PSK
),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_AUTH_ALG),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_KEY_MGMT),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_PSK),
)
if CONF_ATTR_VLAN in data:
self._vlan = VlanProperties(
data[CONF_ATTR_VLAN].get(CONF_ATTR_VLAN_ID),
data[CONF_ATTR_VLAN].get(CONF_ATTR_VLAN_PARENT),
data[CONF_ATTR_VLAN].get(ATTR_ID),
data[CONF_ATTR_VLAN].get(ATTR_PARENT),
)
if CONF_ATTR_IPV4 in data:
address_data = None
if ips := data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_ADDRESS_DATA):
address_data = [IpAddress(ip["address"], ip["prefix"]) for ip in ips]
self._ipv4 = IpProperties(
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_METHOD),
address_data,
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_GATEWAY),
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_DNS),
data[CONF_ATTR_IPV4].get(ATTR_METHOD),
)
if CONF_ATTR_IPV6 in data:
address_data = None
if ips := data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_ADDRESS_DATA):
address_data = [IpAddress(ip["address"], ip["prefix"]) for ip in ips]
self._ipv6 = IpProperties(
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_METHOD),
address_data,
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_GATEWAY),
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_DNS),
data[CONF_ATTR_IPV6].get(ATTR_METHOD),
)
if CONF_ATTR_MATCH in data:
self._match = MatchProperties(
data[CONF_ATTR_MATCH].get(CONF_ATTR_MATCH_PATH)
)
self._match = MatchProperties(data[CONF_ATTR_MATCH].get(ATTR_PATH))

View File

@@ -1,5 +1,4 @@
"""Payload generators for DBUS communication."""
from __future__ import annotations
import socket
@@ -8,131 +7,25 @@ from uuid import uuid4
from dbus_fast import Variant
from ....host.const import InterfaceMethod, InterfaceType
from .. import NetworkManager
from . import (
ATTR_ASSIGNED_MAC,
CONF_ATTR_802_ETHERNET,
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC,
CONF_ATTR_802_WIRELESS,
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC,
CONF_ATTR_802_WIRELESS_MODE,
CONF_ATTR_802_WIRELESS_POWERSAVE,
CONF_ATTR_802_WIRELESS_SECURITY,
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG,
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT,
CONF_ATTR_802_WIRELESS_SECURITY_PSK,
CONF_ATTR_802_WIRELESS_SSID,
CONF_ATTR_CONNECTION,
CONF_ATTR_CONNECTION_AUTOCONNECT,
CONF_ATTR_CONNECTION_ID,
CONF_ATTR_CONNECTION_LLMNR,
CONF_ATTR_CONNECTION_MDNS,
CONF_ATTR_CONNECTION_TYPE,
CONF_ATTR_CONNECTION_UUID,
CONF_ATTR_IPV4,
CONF_ATTR_IPV4_ADDRESS_DATA,
CONF_ATTR_IPV4_DNS,
CONF_ATTR_IPV4_GATEWAY,
CONF_ATTR_IPV4_METHOD,
CONF_ATTR_IPV6,
CONF_ATTR_IPV6_ADDRESS_DATA,
CONF_ATTR_IPV6_DNS,
CONF_ATTR_IPV6_GATEWAY,
CONF_ATTR_IPV6_METHOD,
CONF_ATTR_MATCH,
CONF_ATTR_MATCH_PATH,
CONF_ATTR_PATH,
CONF_ATTR_VLAN,
CONF_ATTR_VLAN_ID,
CONF_ATTR_VLAN_PARENT,
)
from .. import NetworkManager
from ....host.const import InterfaceMethod, InterfaceType
if TYPE_CHECKING:
from ....host.configuration import Interface
def _get_ipv4_connection_settings(ipv4setting) -> dict:
ipv4 = {}
if not ipv4setting or ipv4setting.method == InterfaceMethod.AUTO:
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "auto")
elif ipv4setting.method == InterfaceMethod.DISABLED:
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "disabled")
elif ipv4setting.method == InterfaceMethod.STATIC:
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "manual")
address_data = []
for address in ipv4setting.address:
address_data.append(
{
"address": Variant("s", str(address.ip)),
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
}
)
ipv4[CONF_ATTR_IPV4_ADDRESS_DATA] = Variant("aa{sv}", address_data)
if ipv4setting.gateway:
ipv4[CONF_ATTR_IPV4_GATEWAY] = Variant("s", str(ipv4setting.gateway))
else:
raise RuntimeError("Invalid IPv4 InterfaceMethod")
if (
ipv4setting
and ipv4setting.nameservers
and ipv4setting.method
in (
InterfaceMethod.AUTO,
InterfaceMethod.STATIC,
)
):
nameservers = ipv4setting.nameservers if ipv4setting else []
ipv4[CONF_ATTR_IPV4_DNS] = Variant(
"au",
[socket.htonl(int(ip_address)) for ip_address in nameservers],
)
return ipv4
def _get_ipv6_connection_settings(ipv6setting) -> dict:
ipv6 = {}
if not ipv6setting or ipv6setting.method == InterfaceMethod.AUTO:
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "auto")
elif ipv6setting.method == InterfaceMethod.DISABLED:
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "link-local")
elif ipv6setting.method == InterfaceMethod.STATIC:
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "manual")
address_data = []
for address in ipv6setting.address:
address_data.append(
{
"address": Variant("s", str(address.ip)),
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
}
)
ipv6[CONF_ATTR_IPV6_ADDRESS_DATA] = Variant("aa{sv}", address_data)
if ipv6setting.gateway:
ipv6[CONF_ATTR_IPV6_GATEWAY] = Variant("s", str(ipv6setting.gateway))
else:
raise RuntimeError("Invalid IPv6 InterfaceMethod")
if (
ipv6setting
and ipv6setting.nameservers
and ipv6setting.method
in (
InterfaceMethod.AUTO,
InterfaceMethod.STATIC,
)
):
nameservers = ipv6setting.nameservers if ipv6setting else []
ipv6[CONF_ATTR_IPV6_DNS] = Variant(
"aay",
[ip_address.packed for ip_address in nameservers],
)
return ipv6
def get_connection_from_interface(
interface: Interface,
network_manager: NetworkManager,
@@ -144,8 +37,8 @@ def get_connection_from_interface(
# Generate/Update ID/name
if not name or not name.startswith("Supervisor"):
name = f"Supervisor {interface.name}"
if interface.type == InterfaceType.VLAN:
name = f"{name}.{interface.vlan.id}"
if interface.type == InterfaceType.VLAN:
name = f"{name}.{interface.vlan.id}"
if interface.type == InterfaceType.ETHERNET:
iftype = "802-3-ethernet"
@@ -160,31 +53,77 @@ def get_connection_from_interface(
conn: dict[str, dict[str, Variant]] = {
CONF_ATTR_CONNECTION: {
CONF_ATTR_CONNECTION_ID: Variant("s", name),
CONF_ATTR_CONNECTION_UUID: Variant("s", uuid),
CONF_ATTR_CONNECTION_TYPE: Variant("s", iftype),
CONF_ATTR_CONNECTION_LLMNR: Variant("i", 2),
CONF_ATTR_CONNECTION_MDNS: Variant("i", 2),
CONF_ATTR_CONNECTION_AUTOCONNECT: Variant("b", True),
"id": Variant("s", name),
"type": Variant("s", iftype),
"uuid": Variant("s", uuid),
"llmnr": Variant("i", 2),
"mdns": Variant("i", 2),
"autoconnect": Variant("b", True),
},
}
if interface.type != InterfaceType.VLAN:
if interface.path:
conn[CONF_ATTR_MATCH] = {
CONF_ATTR_MATCH_PATH: Variant("as", [interface.path])
}
conn[CONF_ATTR_MATCH] = {CONF_ATTR_PATH: Variant("as", [interface.path])}
else:
conn[CONF_ATTR_CONNECTION]["interface-name"] = Variant("s", interface.name)
conn[CONF_ATTR_IPV4] = _get_ipv4_connection_settings(interface.ipv4setting)
ipv4 = {}
if not interface.ipv4 or interface.ipv4.method == InterfaceMethod.AUTO:
ipv4["method"] = Variant("s", "auto")
elif interface.ipv4.method == InterfaceMethod.DISABLED:
ipv4["method"] = Variant("s", "disabled")
else:
ipv4["method"] = Variant("s", "manual")
ipv4["dns"] = Variant(
"au",
[
socket.htonl(int(ip_address))
for ip_address in interface.ipv4.nameservers
],
)
conn[CONF_ATTR_IPV6] = _get_ipv6_connection_settings(interface.ipv6setting)
adressdata = []
for address in interface.ipv4.address:
adressdata.append(
{
"address": Variant("s", str(address.ip)),
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
}
)
ipv4["address-data"] = Variant("aa{sv}", adressdata)
ipv4["gateway"] = Variant("s", str(interface.ipv4.gateway))
conn[CONF_ATTR_IPV4] = ipv4
ipv6 = {}
if not interface.ipv6 or interface.ipv6.method == InterfaceMethod.AUTO:
ipv6["method"] = Variant("s", "auto")
elif interface.ipv6.method == InterfaceMethod.DISABLED:
ipv6["method"] = Variant("s", "link-local")
else:
ipv6["method"] = Variant("s", "manual")
ipv6["dns"] = Variant(
"aay", [ip_address.packed for ip_address in interface.ipv6.nameservers]
)
adressdata = []
for address in interface.ipv6.address:
adressdata.append(
{
"address": Variant("s", str(address.ip)),
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
}
)
ipv6["address-data"] = Variant("aa{sv}", adressdata)
ipv6["gateway"] = Variant("s", str(interface.ipv6.gateway))
conn[CONF_ATTR_IPV6] = ipv6
if interface.type == InterfaceType.ETHERNET:
conn[CONF_ATTR_802_ETHERNET] = {
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC: Variant("s", "preserve")
}
conn[CONF_ATTR_802_ETHERNET] = {ATTR_ASSIGNED_MAC: Variant("s", "preserve")}
elif interface.type == "vlan":
parent = interface.vlan.interface
if parent in network_manager and (
@@ -193,44 +132,30 @@ def get_connection_from_interface(
parent = parent_connection.uuid
conn[CONF_ATTR_VLAN] = {
CONF_ATTR_VLAN_ID: Variant("u", interface.vlan.id),
CONF_ATTR_VLAN_PARENT: Variant("s", parent),
"id": Variant("u", interface.vlan.id),
"parent": Variant("s", parent),
}
elif interface.type == InterfaceType.WIRELESS:
wireless = {
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC: Variant("s", "preserve"),
CONF_ATTR_802_WIRELESS_MODE: Variant("s", "infrastructure"),
CONF_ATTR_802_WIRELESS_POWERSAVE: Variant("i", 1),
ATTR_ASSIGNED_MAC: Variant("s", "preserve"),
"ssid": Variant("ay", interface.wifi.ssid.encode("UTF-8")),
"mode": Variant("s", "infrastructure"),
"powersave": Variant("i", 1),
}
if interface.wifi and interface.wifi.ssid:
wireless[CONF_ATTR_802_WIRELESS_SSID] = Variant(
"ay", interface.wifi.ssid.encode("UTF-8")
)
conn[CONF_ATTR_802_WIRELESS] = wireless
if interface.wifi and interface.wifi.auth != "open":
if interface.wifi.auth != "open":
wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY)
wireless_security = {}
if interface.wifi.auth == "wep":
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG] = Variant(
"s", "open"
)
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT] = Variant(
"s", "none"
)
wireless_security["auth-alg"] = Variant("s", "open")
wireless_security["key-mgmt"] = Variant("s", "none")
elif interface.wifi.auth == "wpa-psk":
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG] = Variant(
"s", "open"
)
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT] = Variant(
"s", "wpa-psk"
)
wireless_security["auth-alg"] = Variant("s", "open")
wireless_security["key-mgmt"] = Variant("s", "wpa-psk")
if interface.wifi.psk:
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_PSK] = Variant(
"s", interface.wifi.psk
)
wireless_security["psk"] = Variant("s", interface.wifi.psk)
conn[CONF_ATTR_802_WIRELESS_SECURITY] = wireless_security
return conn

View File

@@ -1,5 +1,4 @@
"""Network Manager implementation for DBUS."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Wireless object for Network Manager."""
import asyncio
import logging
from typing import Any

View File

@@ -1,8 +1,6 @@
"""D-Bus interface for rauc."""
from ctypes import c_uint32, c_uint64
import logging
from typing import Any, NotRequired, TypedDict
from typing import Any
from dbus_fast.aio.message_bus import MessageBus
@@ -25,28 +23,6 @@ from .utils import dbus_connected
_LOGGER: logging.Logger = logging.getLogger(__name__)
SlotStatusDataType = TypedDict(
"SlotStatusDataType",
{
"class": str,
"type": str,
"state": str,
"device": str,
"bundle.compatible": NotRequired[str],
"sha256": NotRequired[str],
"size": NotRequired[c_uint64],
"installed.count": NotRequired[c_uint32],
"bundle.version": NotRequired[str],
"installed.timestamp": NotRequired[str],
"status": NotRequired[str],
"activated.count": NotRequired[c_uint32],
"activated.timestamp": NotRequired[str],
"boot-status": NotRequired[str],
"bootname": NotRequired[str],
"parent": NotRequired[str],
},
)
class Rauc(DBusInterfaceProxy):
"""Handle D-Bus interface for rauc."""
@@ -107,7 +83,7 @@ class Rauc(DBusInterfaceProxy):
await self.dbus.Installer.call_install(str(raucb_file))
@dbus_connected
async def get_slot_status(self) -> list[tuple[str, SlotStatusDataType]]:
async def get_slot_status(self) -> list[tuple[str, dict[str, Any]]]:
"""Get slot status."""
return await self.dbus.Installer.call_get_slot_status()

View File

@@ -1,5 +1,4 @@
"""D-Bus interface for systemd-resolved."""
from __future__ import annotations
import logging

View File

@@ -13,19 +13,16 @@ from ..exceptions import (
DBusServiceUnkownError,
DBusSystemdNoSuchUnit,
)
from ..utils.dbus import DBusSignalWrapper
from .const import (
DBUS_ATTR_FINISH_TIMESTAMP,
DBUS_ATTR_FIRMWARE_TIMESTAMP_MONOTONIC,
DBUS_ATTR_KERNEL_TIMESTAMP_MONOTONIC,
DBUS_ATTR_LOADER_TIMESTAMP_MONOTONIC,
DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC,
DBUS_ATTR_VIRTUALIZATION,
DBUS_ERR_SYSTEMD_NO_SUCH_UNIT,
DBUS_IFACE_SYSTEMD_MANAGER,
DBUS_NAME_SYSTEMD,
DBUS_OBJECT_SYSTEMD,
DBUS_SIGNAL_PROPERTIES_CHANGED,
StartUnitMode,
StopUnitMode,
UnitActiveState,
@@ -45,7 +42,9 @@ def systemd_errors(func):
return await func(*args, **kwds)
except DBusFatalError as err:
if err.type == DBUS_ERR_SYSTEMD_NO_SUCH_UNIT:
raise DBusSystemdNoSuchUnit(str(err)) from None
# pylint: disable=raise-missing-from
raise DBusSystemdNoSuchUnit(str(err))
# pylint: enable=raise-missing-from
raise err
return wrapper
@@ -67,11 +66,6 @@ class SystemdUnit(DBusInterface):
"""Get active state of the unit."""
return await self.dbus.Unit.get_active_state()
@dbus_connected
def properties_changed(self) -> DBusSignalWrapper:
"""Return signal wrapper for properties changed."""
return self.dbus.signal(DBUS_SIGNAL_PROPERTIES_CHANGED)
class Systemd(DBusInterfaceProxy):
"""Systemd function handler.
@@ -115,12 +109,6 @@ class Systemd(DBusInterfaceProxy):
"""Return the boot timestamp."""
return self.properties[DBUS_ATTR_FINISH_TIMESTAMP]
@property
@dbus_property
def virtualization(self) -> str:
"""Return virtualization hypervisor being used."""
return self.properties[DBUS_ATTR_VIRTUALIZATION]
@dbus_connected
async def reboot(self) -> None:
"""Reboot host computer."""

View File

@@ -1,5 +1,4 @@
"""Interface to systemd-timedate over D-Bus."""
from datetime import datetime
import logging

View File

@@ -1,5 +1,4 @@
"""Interface to UDisks2 over D-Bus."""
import asyncio
import logging
from typing import Any
@@ -16,15 +15,12 @@ from ...exceptions import (
from ..const import (
DBUS_ATTR_SUPPORTED_FILESYSTEMS,
DBUS_ATTR_VERSION,
DBUS_IFACE_BLOCK,
DBUS_IFACE_DRIVE,
DBUS_IFACE_UDISKS2_MANAGER,
DBUS_NAME_UDISKS2,
DBUS_OBJECT_BASE,
DBUS_OBJECT_UDISKS2,
DBUS_OBJECT_UDISKS2_MANAGER,
)
from ..interface import DBusInterface, DBusInterfaceProxy, dbus_property
from ..interface import DBusInterfaceProxy, dbus_property
from ..utils import dbus_connected
from .block import UDisks2Block
from .const import UDISKS2_DEFAULT_OPTIONS
@@ -34,15 +30,7 @@ from .drive import UDisks2Drive
_LOGGER: logging.Logger = logging.getLogger(__name__)
class UDisks2(DBusInterface):
"""Handle D-Bus interface for UDisks2 root object."""
name: str = DBUS_NAME_UDISKS2
bus_name: str = DBUS_NAME_UDISKS2
object_path: str = DBUS_OBJECT_UDISKS2
class UDisks2Manager(DBusInterfaceProxy):
class UDisks2(DBusInterfaceProxy):
"""Handle D-Bus interface for UDisks2.
http://storaged.org/doc/udisks2-api/latest/
@@ -50,36 +38,22 @@ class UDisks2Manager(DBusInterfaceProxy):
name: str = DBUS_NAME_UDISKS2
bus_name: str = DBUS_NAME_UDISKS2
object_path: str = DBUS_OBJECT_UDISKS2_MANAGER
object_path: str = DBUS_OBJECT_UDISKS2
properties_interface: str = DBUS_IFACE_UDISKS2_MANAGER
_block_devices: dict[str, UDisks2Block] = {}
_drives: dict[str, UDisks2Drive] = {}
def __init__(self):
"""Initialize object."""
super().__init__()
self.udisks2_object_manager = UDisks2()
async def connect(self, bus: MessageBus):
"""Connect to D-Bus."""
try:
await super().connect(bus)
await self.udisks2_object_manager.connect(bus)
except DBusError:
_LOGGER.warning("Can't connect to udisks2")
except (DBusServiceUnkownError, DBusInterfaceError):
_LOGGER.warning(
"No udisks2 support on the host. Host control has been disabled."
)
else:
# Register for signals on devices added/removed
self.udisks2_object_manager.dbus.object_manager.on_interfaces_added(
self._interfaces_added
)
self.udisks2_object_manager.dbus.object_manager.on_interfaces_removed(
self._interfaces_removed
)
@dbus_connected
async def update(self, changed: dict[str, Any] | None = None) -> None:
@@ -187,47 +161,11 @@ class UDisks2Manager(DBusInterfaceProxy):
]
)
async def _interfaces_added(
self, object_path: str, properties: dict[str, dict[str, Any]]
) -> None:
"""Interfaces added to a UDisks2 object."""
if object_path in self._block_devices:
await self._block_devices[object_path].update()
return
if object_path in self._drives:
await self._drives[object_path].update()
return
if DBUS_IFACE_BLOCK in properties:
self._block_devices[object_path] = await UDisks2Block.new(
object_path, self.dbus.bus
)
return
if DBUS_IFACE_DRIVE in properties:
self._drives[object_path] = await UDisks2Drive.new(
object_path, self.dbus.bus
)
async def _interfaces_removed(
self, object_path: str, interfaces: list[str]
) -> None:
"""Interfaces removed from a UDisks2 object."""
if object_path in self._block_devices and DBUS_IFACE_BLOCK in interfaces:
self._block_devices[object_path].shutdown()
del self._block_devices[object_path]
return
if object_path in self._drives and DBUS_IFACE_DRIVE in interfaces:
self._drives[object_path].shutdown()
del self._drives[object_path]
def shutdown(self) -> None:
"""Shutdown the object and disconnect from D-Bus.
This method is irreversible.
"""
self.udisks2_object_manager.shutdown()
for block_device in self.block_devices:
block_device.shutdown()
for drive in self.drives:

View File

@@ -1,5 +1,4 @@
"""Interface to UDisks2 Block Device over D-Bus."""
import asyncio
from collections.abc import Callable
from pathlib import Path

View File

@@ -1,6 +1,6 @@
"""Interface to UDisks2 Drive over D-Bus."""
from datetime import UTC, datetime
from datetime import datetime, timezone
from dbus_fast.aio import MessageBus
@@ -95,7 +95,7 @@ class UDisks2Drive(DBusInterfaceProxy):
"""Return time drive first detected."""
return datetime.fromtimestamp(
self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6
).astimezone(UTC)
).astimezone(timezone.utc)
@property
@dbus_property

View File

@@ -1,5 +1,4 @@
"""Handle discover message for Home Assistant."""
from __future__ import annotations
from contextlib import suppress
@@ -8,12 +7,14 @@ from typing import TYPE_CHECKING, Any
from uuid import UUID, uuid4
import attr
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import HomeAssistantAPIError
from ..exceptions import DiscoveryError, HomeAssistantAPIError
from ..utils.common import FileConfiguration
from .validate import SCHEMA_DISCOVERY_CONFIG
from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config
if TYPE_CHECKING:
from ..addons.addon import Addon
@@ -74,6 +75,12 @@ class Discovery(CoreSysAttributes, FileConfiguration):
def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message:
"""Send a discovery message to Home Assistant."""
try:
config = valid_discovery_config(service, config)
except vol.Invalid as err:
_LOGGER.error("Invalid discovery %s config", humanize_error(config, err))
raise DiscoveryError() from err
# Create message
message = Message(addon.slug, service, config)

View File

@@ -0,0 +1 @@
"""Discovery service modules."""

View File

@@ -0,0 +1,9 @@
"""Discovery service for AdGuard."""
import voluptuous as vol
from ...validate import network_port
from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port}
)

View File

@@ -0,0 +1,9 @@
"""Discovery service for Almond."""
import voluptuous as vol
from ...validate import network_port
from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port}
)

View File

@@ -0,0 +1,14 @@
"""Discovery service for MQTT."""
import voluptuous as vol
from ...validate import network_port
from ..const import ATTR_API_KEY, ATTR_HOST, ATTR_PORT, ATTR_SERIAL
SCHEMA = vol.Schema(
{
vol.Required(ATTR_HOST): str,
vol.Required(ATTR_PORT): network_port,
vol.Required(ATTR_SERIAL): str,
vol.Required(ATTR_API_KEY): str,
}
)

View File

@@ -0,0 +1,9 @@
"""Discovery service for the ESPHome Dashboard."""
import voluptuous as vol
from ...validate import network_port
from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): str, vol.Required(ATTR_PORT): network_port}
)

Some files were not shown because too many files have changed in this diff Show More