Compare commits

..

2 Commits

Author SHA1 Message Date
ludeeus
de7ef86f52 Disallow ' 2023-06-16 12:33:04 +00:00
ludeeus
6f614c91d7 Quote CIFS password to remove strict requirements 2023-06-15 16:57:53 +00:00
1622 changed files with 5221 additions and 17034 deletions

View File

@@ -7,32 +7,34 @@
"appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_bootstrap",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-python.pylint",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"esbenp.prettier-vscode"
],
"settings": {
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"python.formatting.provider": "black",
"python.formatting.blackArgs": ["--target-version", "py312"],
"python.formatting.blackPath": "/usr/local/bin/black"
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"esbenp.prettier-vscode"
],
"mounts": ["type=volume,target=/var/lib/docker"],
"settings": {
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
}
},
"mounts": ["type=volume,target=/var/lib/docker"]
},
"terminal.integrated.defaultProfile.linux": "zsh",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"python.formatting.blackArgs": ["--target-version", "py310"],
"python.formatting.blackPath": "/usr/local/bin/black",
"python.linting.banditPath": "/usr/local/bin/bandit",
"python.linting.flake8Path": "/usr/local/bin/flake8",
"python.linting.mypyPath": "/usr/local/bin/mypy",
"python.linting.pylintPath": "/usr/local/bin/pylint",
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
}
}

View File

@@ -33,7 +33,7 @@ on:
- setup.py
env:
DEFAULT_PYTHON: "3.12"
DEFAULT_PYTHON: "3.11"
BUILD_NAME: supervisor
BUILD_TYPE: supervisor
@@ -53,7 +53,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
with:
fetch-depth: 0
@@ -70,29 +70,25 @@ jobs:
- name: Get changed files
id: changed_files
if: steps.version.outputs.publish == 'false'
uses: masesgroup/retrieve-changed-files@v3.0.0
uses: jitterbit/get-changed-files@v1
- name: Check if requirements files changed
id: requirements
run: |
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT"
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then
echo "::set-output name=changed::true"
fi
build:
name: Build ${{ matrix.arch }} supervisor
needs: init
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
packages: write
strategy:
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
with:
fetch-depth: 0
@@ -106,13 +102,13 @@ jobs:
- name: Build wheels
if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2024.01.0
uses: home-assistant/wheels@2023.04.0
with:
abi: cp312
abi: cp311
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
apk: "libffi-dev;openssl-dev;yaml-dev"
apk: "libffi-dev;openssl-dev"
skip-binary: aiohttp
env-file: true
requirements: "requirements.txt"
@@ -123,33 +119,16 @@ jobs:
with:
type: ${{ env.BUILD_TYPE }}
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
- name: Login to DockerHub
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.0.0
uses: docker/login-action@v2.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign
if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.3.0
with:
cosign-release: "v2.0.2"
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
run: |
pip3 install setuptools dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "${dir_hash}" > rootfs/supervisor.sha256
- name: Sign supervisor SHA256
if: needs.init.outputs.publish == 'true'
run: |
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.0.0
uses: docker/login-action@v2.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -160,17 +139,55 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2024.01.0
uses: home-assistant/builder@2023.06.0
with:
args: |
$BUILD_ARGS \
--${{ matrix.arch }} \
--target /data \
--cosign \
--generic ${{ needs.init.outputs.version }}
env:
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
codenotary:
name: CAS signature
needs: init
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v3.5.3
with:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v4.6.1
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Set version
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version@master
with:
type: ${{ env.BUILD_TYPE }}
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
id: dirhash
run: |
pip3 install dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "::set-output name=dirhash::${dir_hash}"
- name: Signing Source
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/codenotary@master
with:
source: hash://${{ steps.dirhash.outputs.dirhash }}
asset: supervisor-${{ needs.init.outputs.version }}
token: ${{ secrets.CAS_TOKEN }}
version:
name: Update version
needs: ["init", "run_supervisor"]
@@ -178,7 +195,7 @@ jobs:
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Initialize git
if: needs.init.outputs.publish == 'true'
@@ -199,15 +216,15 @@ jobs:
run_supervisor:
runs-on: ubuntu-latest
name: Run the Supervisor
needs: ["build", "init"]
needs: ["build", "codenotary", "init"]
timeout-minutes: 60
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2024.01.0
uses: home-assistant/builder@2023.06.0
with:
args: |
--test \
@@ -219,7 +236,7 @@ jobs:
if: needs.init.outputs.publish == 'true'
run: |
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} homeassistant/amd64-hassio-supervisor:runner
- name: Create the Supervisor
run: |
@@ -236,7 +253,7 @@ jobs:
-e SUPERVISOR_NAME=hassio_supervisor \
-e SUPERVISOR_DEV=1 \
-e SUPERVISOR_MACHINE="qemux86-64" \
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
homeassistant/amd64-hassio-supervisor:runner
- name: Start the Supervisor
run: docker start hassio_supervisor
@@ -324,7 +341,7 @@ jobs:
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
exit 1
fi
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
- name: Uninstall SSH add-on
run: |

View File

@@ -8,8 +8,9 @@ on:
pull_request: ~
env:
DEFAULT_PYTHON: "3.12"
PRE_COMMIT_CACHE: ~/.cache/pre-commit
DEFAULT_PYTHON: "3.11"
PRE_COMMIT_HOME: ~/.cache/pre-commit
DEFAULT_CAS: v1.0.2
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
@@ -25,15 +26,15 @@ jobs:
name: Prepare Python dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python
id: python
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -47,10 +48,9 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
@@ -67,15 +67,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -88,7 +88,7 @@ jobs:
- name: Run black
run: |
. venv/bin/activate
black --target-version py312 --check supervisor tests setup.py
black --target-version py38 --check supervisor tests setup.py
lint-dockerfile:
name: Check Dockerfile
@@ -96,7 +96,7 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -111,15 +111,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -131,9 +131,9 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
@@ -155,15 +155,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -187,15 +187,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -207,9 +207,9 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
@@ -228,15 +228,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -248,9 +248,9 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
@@ -272,15 +272,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -304,15 +304,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -324,9 +324,9 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
@@ -345,19 +345,19 @@ jobs:
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign
uses: sigstore/cosign-installer@v3.3.0
- name: Install CAS tools
uses: home-assistant/actions/helpers/cas@master
with:
cosign-release: "v2.0.2"
version: ${{ env.DEFAULT_CAS }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -392,7 +392,7 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v4.0.0
uses: actions/upload-artifact@v3.1.2
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
@@ -403,15 +403,15 @@ jobs:
needs: ["pytest", "prepare"]
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v4.6.1
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.3
uses: actions/cache@v3.3.1
with:
path: venv
key: |
@@ -422,7 +422,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.1
uses: actions/download-artifact@v3
- name: Combine coverage results
run: |
. venv/bin/activate

View File

@@ -9,7 +9,7 @@ jobs:
lock:
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v5.0.1
- uses: dessant/lock-threads@v4.0.1
with:
github-token: ${{ github.token }}
issue-inactive-days: "30"

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
with:
fetch-depth: 0
@@ -33,10 +33,10 @@ jobs:
echo Current version: $latest
echo New target version: $datepre.$newpost
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
echo "::set-output name=version::$datepre.$newpost"
- name: Run Release Drafter
uses: release-drafter/release-drafter@v5.25.0
uses: release-drafter/release-drafter@v5.23.0
with:
tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }}

View File

@@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3.5.3
- name: Sentry Release
uses: getsentry/action-release@v1.6.0
uses: getsentry/action-release@v1.4.1
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -9,7 +9,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9.0.0
- uses: actions/stale@v8.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30

View File

@@ -3,5 +3,4 @@ ignored:
- DL3006
- DL3013
- DL3018
- DL3042
- SC2155

View File

@@ -1,16 +1,16 @@
repos:
- repo: https://github.com/psf/black
rev: 23.12.1
rev: 23.1.0
hooks:
- id: black
args:
- --safe
- --quiet
- --target-version
- py312
- py310
files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies:
@@ -18,17 +18,17 @@ repos:
- pydocstyle==6.3.0
files: ^(supervisor|script|tests)/.+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v4.3.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]
- id: check-json
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
rev: v3.4.0
hooks:
- id: pyupgrade
args: [--py312-plus]
args: [--py310-plus]

View File

@@ -7,15 +7,13 @@ ENV \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
ARG \
COSIGN_VERSION \
BUILD_ARCH
CAS_VERSION
# Install base
WORKDIR /usr/src
RUN \
set -x \
&& apk add --no-cache \
findutils \
eudev \
eudev-libs \
git \
@@ -23,23 +21,33 @@ RUN \
libpulse \
musl \
openssl \
yaml \
&& apk add --no-cache --virtual .build-dependencies \
build-base \
go \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign
&& git clone -b "v${CAS_VERSION}" --depth 1 \
https://github.com/codenotary/cas \
&& cd cas \
&& make cas \
&& mv cas /usr/bin/cas \
\
&& apk del .build-dependencies \
&& rm -rf /root/go /root/.cache \
&& rm -rf /usr/src/cas
# Install requirements
COPY requirements.txt .
RUN \
export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --only-binary=:all: \
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
"https://wheels.home-assistant.io/musllinux/" \
-r ./requirements.txt \
&& rm -f requirements.txt
# Install Home Assistant Supervisor
COPY . supervisor
RUN \
pip3 install -e ./supervisor \
pip3 install --no-cache-dir -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor

View File

@@ -1,18 +1,16 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
image: homeassistant/{arch}-hassio-supervisor
shadow_repository: ghcr.io/home-assistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.18
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.18
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.18
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.18
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.18
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.16
armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.16
armv7: ghcr.io/home-assistant/armv7-base-python:3.11-alpine3.16
amd64: ghcr.io/home-assistant/amd64-base-python:3.11-alpine3.16
i386: ghcr.io/home-assistant/i386-base-python:3.11-alpine3.16
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
cosign:
base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.*
args:
COSIGN_VERSION: 2.0.2
CAS_VERSION: 1.0.2
labels:
io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor

45
pylintrc Normal file
View File

@@ -0,0 +1,45 @@
[MASTER]
reports=no
jobs=2
good-names=id,i,j,k,ex,Run,_,fp,T,os
extension-pkg-whitelist=
ciso8601
# Reasons disabled:
# format - handled by black
# locally-disabled - it spams too much
# duplicate-code - unavoidable
# cyclic-import - doesn't test if both import on load
# abstract-class-not-used - is flaky, should not show up but does
# unused-argument - generic callbacks and setup methods create a lot of warnings
# too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-*
# abstract-method - with intro of async there are always methods missing
disable=
format,
abstract-method,
cyclic-import,
duplicate-code,
locally-disabled,
no-else-return,
not-context-manager,
too-few-public-methods,
too-many-arguments,
too-many-branches,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
unused-argument,
consider-using-with
[EXCEPTIONS]
overgeneral-exceptions=builtins.Exception
[TYPECHECK]
ignored-modules = distutils

View File

@@ -1,112 +0,0 @@
[build-system]
requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
build-backend = "setuptools.build_meta"
[project]
name = "Supervisor"
dynamic = ["version", "dependencies"]
license = { text = "Apache-2.0" }
description = "Open-source private cloud os for Home-Assistant based on HassOS"
readme = "README.md"
authors = [
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
]
keywords = ["docker", "home-assistant", "api"]
requires-python = ">=3.12.0"
[project.urls]
"Homepage" = "https://www.home-assistant.io/"
"Source Code" = "https://github.com/home-assistant/supervisor"
"Bug Reports" = "https://github.com/home-assistant/supervisor/issues"
"Docs: Dev" = "https://developers.home-assistant.io/"
"Discord" = "https://www.home-assistant.io/join-chat/"
"Forum" = "https://community.home-assistant.io/"
[tool.setuptools]
platforms = ["any"]
zip-safe = false
include-package-data = true
[tool.setuptools.packages.find]
include = ["supervisor*"]
[tool.pylint.MAIN]
py-version = "3.11"
# Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate.
jobs = 2
persistent = false
extension-pkg-allow-list = ["ciso8601"]
[tool.pylint.BASIC]
class-const-naming-style = "any"
good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
[tool.pylint."MESSAGES CONTROL"]
# Reasons disabled:
# format - handled by black
# abstract-method - with intro of async there are always methods missing
# cyclic-import - doesn't test if both import on load
# duplicate-code - unavoidable
# locally-disabled - it spams too much
# too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-*
# unused-argument - generic callbacks and setup methods create a lot of warnings
disable = [
"format",
"abstract-method",
"cyclic-import",
"duplicate-code",
"locally-disabled",
"no-else-return",
"not-context-manager",
"too-few-public-methods",
"too-many-arguments",
"too-many-branches",
"too-many-instance-attributes",
"too-many-lines",
"too-many-locals",
"too-many-public-methods",
"too-many-return-statements",
"too-many-statements",
"unused-argument",
"consider-using-with",
]
[tool.pylint.REPORTS]
score = false
[tool.pylint.TYPECHECK]
ignored-modules = ["distutils"]
[tool.pylint.FORMAT]
expected-line-ending-format = "LF"
[tool.pylint.EXCEPTIONS]
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pytest.ini_options]
testpaths = ["tests"]
norecursedirs = [".git"]
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
log_date_format = "%Y-%m-%d %H:%M:%S"
asyncio_mode = "auto"
filterwarnings = [
"error",
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
"ignore::pytest.PytestUnraisableExceptionWarning",
]
[tool.isort]
multi_line_output = 3
include_trailing_comma = true
force_grid_wrap = 0
line_length = 88
indent = " "
force_sort_within_sections = true
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
default_section = "THIRDPARTY"
forced_separate = "tests"
combine_as_imports = true
use_parentheses = true
known_first_party = ["supervisor", "tests"]

2
pytest.ini Normal file
View File

@@ -0,0 +1,2 @@
[pytest]
asyncio_mode = auto

View File

@@ -1,30 +1,26 @@
aiodns==3.1.1
aiohttp==3.9.1
aiohttp-fast-url-dispatcher==0.3.0
async_timeout==4.0.3
aiodns==3.0.0
aiohttp==3.8.4
async_timeout==4.0.2
atomicwrites-homeassistant==1.4.1
attrs==23.2.0
awesomeversion==23.11.0
brotli==1.1.0
ciso8601==2.3.1
colorlog==6.8.0
attrs==23.1.0
awesomeversion==23.5.0
brotli==1.0.9
ciso8601==2.3.0
colorlog==6.7.0
cpe==1.2.1
cryptography==41.0.7
debugpy==1.8.0
deepmerge==1.1.1
cryptography==41.0.1
debugpy==1.6.7
deepmerge==1.1.0
dirhash==0.2.1
docker==7.0.0
faust-cchardet==2.1.19
gitpython==3.1.41
jinja2==3.1.3
orjson==3.9.10
docker==6.1.3
faust-cchardet==2.1.18
gitpython==3.1.31
jinja2==3.1.2
pulsectl==23.5.2
pyudev==0.24.1
PyYAML==6.0.1
securetar==2023.12.0
sentry-sdk==1.39.2
setuptools==69.0.3
voluptuous==0.14.1
dbus-fast==2.21.0
typing_extensions==4.9.0
zlib-fast==0.1.0
ruamel.yaml==0.17.21
securetar==2023.3.0
sentry-sdk==1.25.1
voluptuous==0.13.1
dbus-fast==1.86.0
typing_extensions==4.6.3

View File

@@ -1,16 +1,16 @@
black==23.12.1
coverage==7.4.0
black==23.3.0
coverage==7.2.7
flake8-docstrings==1.7.0
flake8==7.0.0
pre-commit==3.6.0
flake8==6.0.0
pre-commit==3.3.3
pydocstyle==6.3.0
pylint==3.0.3
pytest-aiohttp==1.0.5
pytest-asyncio==0.23.3
pylint==2.17.4
pytest-aiohttp==1.0.4
pytest-asyncio==0.18.3
pytest-cov==4.1.0
pytest-timeout==2.2.0
pytest==7.4.4
pyupgrade==3.15.0
time-machine==2.13.0
typing_extensions==4.9.0
urllib3==2.1.0
pytest-timeout==2.1.0
pytest==7.3.2
pyupgrade==3.6.0
time-machine==2.9.0
typing_extensions==4.6.3
urllib3==2.0.3

View File

@@ -15,7 +15,7 @@ do
if [[ "${supervisor_state}" = "running" ]]; then
# Check API
if bashio::supervisor.ping > /dev/null; then
if bashio::supervisor.ping; then
failed_count=0
else
bashio::log.warning "Maybe found an issue on API healthy"

View File

@@ -0,0 +1,4 @@
-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE03LvYuz79GTJx4uKp3w6NrSe5JZI
iBtgzzYi0YQYtZO/r+xFpgDJEa0gLHkXtl94fpqrFiN89In83lzaszbZtA==
-----END PUBLIC KEY-----

View File

@@ -0,0 +1,8 @@
{
"currentcontext": {
"LcHost": "cas.codenotary.com",
"LcPort": "443"
},
"schemaversion": 3,
"users": null
}

View File

@@ -1,3 +1,17 @@
[isort]
multi_line_output = 3
include_trailing_comma=True
force_grid_wrap=0
line_length=88
indent = " "
force_sort_within_sections = true
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
default_section = THIRDPARTY
forced_separate = tests
combine_as_imports = true
use_parentheses = true
known_first_party = supervisor,tests
[flake8]
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
doctests = True

View File

@@ -1,27 +1,60 @@
"""Home Assistant Supervisor setup."""
from pathlib import Path
import re
from setuptools import setup
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$")
SUPERVISOR_DIR = Path(__file__).parent
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py"
REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8")
CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
def _get_supervisor_version():
for line in CONSTANTS.split("/n"):
if match := RE_SUPERVISOR_VERSION.match(line):
return match.group(1)
return "99.9.9dev"
from supervisor.const import SUPERVISOR_VERSION
setup(
version=_get_supervisor_version(),
dependencies=REQUIREMENTS.split("/n"),
name="Supervisor",
version=SUPERVISOR_VERSION,
license="BSD License",
author="The Home Assistant Authors",
author_email="hello@home-assistant.io",
url="https://home-assistant.io/",
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
long_description=(
"A maintainless private cloud operator system that"
"setup a Home-Assistant instance. Based on HassOS"
),
classifiers=[
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Home Automation",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Scientific/Engineering :: Atmospheric Science",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.8",
],
keywords=["docker", "home-assistant", "api"],
zip_safe=False,
platforms="any",
packages=[
"supervisor.addons",
"supervisor.api",
"supervisor.backups",
"supervisor.dbus.network",
"supervisor.dbus.network.setting",
"supervisor.dbus",
"supervisor.discovery.services",
"supervisor.discovery",
"supervisor.docker",
"supervisor.homeassistant",
"supervisor.host",
"supervisor.jobs",
"supervisor.misc",
"supervisor.plugins",
"supervisor.resolution.checks",
"supervisor.resolution.evaluations",
"supervisor.resolution.fixups",
"supervisor.resolution",
"supervisor.security",
"supervisor.services.modules",
"supervisor.services",
"supervisor.store",
"supervisor.utils",
"supervisor",
],
include_package_data=True,
)

View File

@@ -5,13 +5,7 @@ import logging
from pathlib import Path
import sys
import zlib_fast
# Enable fast zlib before importing supervisor
zlib_fast.enable()
from supervisor import bootstrap # noqa: E402
from supervisor.utils.logging import activate_log_queue_handler # noqa: E402
from supervisor import bootstrap
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -44,8 +38,6 @@ if __name__ == "__main__":
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
loop.set_default_executor(executor)
activate_log_queue_handler()
_LOGGER.info("Initializing Supervisor setup")
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
loop.set_debug(coresys.config.debug)

View File

@@ -1 +1,427 @@
"""Init file for Supervisor add-ons."""
import asyncio
from contextlib import suppress
import logging
import tarfile
from typing import Union
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonConfigurationError,
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
DockerError,
DockerNotFound,
HomeAssistantAPIError,
HostAppArmorError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils import check_exception_chain
from ..utils.sentry import capture_exception
from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyAddon = Union[Addon, AddonStore]
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AddonsData = AddonsData(coresys)
self.local: dict[str, Addon] = {}
self.store: dict[str, AddonStore] = {}
@property
def all(self) -> list[AnyAddon]:
"""Return a list of all add-ons."""
addons: dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> list[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(addon_slug)
return None
def from_token(self, token: str) -> Addon | None:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load(self) -> None:
"""Start up add-on management."""
tasks = []
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(self.sys_create_task(addon.load()))
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(tasks))
if tasks:
await asyncio.wait(tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.start()
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except Exception as err: # pylint: disable=broad-except
capture_exception(err)
else:
continue
_LOGGER.warning("Can't start Add-on %s", addon.slug)
await asyncio.sleep(self.sys_config.wait_boot)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
capture_exception(err)
@Job(
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def install(self, slug: str) -> None:
"""Install an add-on."""
if slug in self.local:
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
store.validate_availability()
self.data.install(store)
addon = Addon(self.coresys, slug)
await addon.load()
if not addon.path_data.is_dir():
_LOGGER.info(
"Creating Home Assistant add-on data folder %s", addon.path_data
)
addon.path_data.mkdir()
# Setup/Fix AppArmor profile
await addon.install_apparmor()
try:
await addon.instance.install(store.version, store.image, arch=addon.arch)
except DockerError as err:
self.data.uninstall(addon)
raise AddonsError() from err
self.local[slug] = addon
# Reload ingress tokens
if addon.with_ingress:
await self.sys_ingress.reload()
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
addon = self.local[slug]
try:
await addon.instance.remove()
except DockerError as err:
raise AddonsError() from err
addon.state = AddonState.UNKNOWN
await addon.unload()
# Cleanup audio settings
if addon.path_pulse.exists():
with suppress(OSError):
addon.path_pulse.unlink()
# Cleanup AppArmor profile
with suppress(HostAppArmorError):
await addon.uninstall_apparmor()
# Cleanup Ingress panel from sidebar
if addon.ingress_panel:
addon.ingress_panel = False
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
# Cleanup Ingress dynamic port assignment
if addon.with_ingress:
self.sys_create_task(self.sys_ingress.reload())
self.sys_ingress.del_dynamic_port(slug)
# Cleanup discovery data
for message in self.sys_discovery.list_messages:
if message.addon != addon.slug:
continue
self.sys_discovery.remove(message)
# Cleanup services data
for service in self.sys_services.list_services:
if addon.slug not in service.active:
continue
service.del_service_data(addon)
self.data.uninstall(addon)
self.local.pop(slug)
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def update(self, slug: str, backup: bool | None = False) -> None:
"""Update add-on."""
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
store.validate_availability()
if backup:
await self.sys_backups.do_backup_partial(
name=f"addon_{addon.slug}_{addon.version}",
homeassistant=False,
addons=[addon.slug],
)
# Update instance
last_state: AddonState = addon.state
old_image = addon.image
try:
await addon.instance.update(store.version, store.image)
except DockerError as err:
raise AddonsError() from err
_LOGGER.info("Add-on '%s' successfully updated", slug)
self.data.update(store)
# Cleanup
with suppress(DockerError):
await addon.instance.cleanup(old_image=old_image)
# Setup/Fix AppArmor profile
await addon.install_apparmor()
# restore state
if last_state == AddonState.STARTED:
await addon.start()
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str) -> None:
"""Perform a rebuild of local build add-on."""
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:
raise AddonsError(
"Version changed, use Update instead Rebuild", _LOGGER.error
)
if not addon.need_build:
raise AddonsNotSupportedError(
"Can't rebuild a image based add-on", _LOGGER.error
)
# remove docker container but not addon config
last_state: AddonState = addon.state
try:
await addon.instance.remove()
await addon.instance.install(addon.version)
except DockerError as err:
raise AddonsError() from err
self.data.update(store)
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
# restore state
if last_state == AddonState.STARTED:
await addon.start()
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
"""Restore state of an add-on."""
if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
else:
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if addon.with_ingress:
await self.sys_ingress.reload()
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: list[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
for addon in self.installed:
try:
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
capture_exception(err)
else:
self.sys_plugins.dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
# Write hosts files
with suppress(CoreDNSError):
self.sys_plugins.dns.write_hosts()

View File

@@ -3,7 +3,6 @@ import asyncio
from collections.abc import Awaitable
from contextlib import suppress
from copy import deepcopy
import errno
from ipaddress import IPv4Address
import logging
from pathlib import Path, PurePath
@@ -65,15 +64,12 @@ from ..exceptions import (
AddonsNotSupportedError,
ConfigurationFileError,
DockerError,
HomeAssistantAPIError,
HostAppArmorError,
)
from ..hardware.data import Device
from ..homeassistant.const import WSEvent, WSType
from ..jobs.const import JobExecutionLimit
from ..jobs.decorator import Job
from ..resolution.const import UnhealthyReason
from ..store.addon import AddonStore
from ..utils import check_port
from ..utils.apparmor import adjust_profile
from ..utils.json import read_json_file, write_json_file
@@ -84,7 +80,6 @@ from .const import (
WATCHDOG_THROTTLE_MAX_CALLS,
WATCHDOG_THROTTLE_PERIOD,
AddonBackupMode,
MappingType,
)
from .model import AddonModel, Data
from .options import AddonOptions
@@ -104,7 +99,6 @@ RE_WATCHDOG = re.compile(
)
WATCHDOG_TIMEOUT = aiohttp.ClientTimeout(total=10)
STARTUP_TIMEOUT = 120
_OPTIONS_MERGER: Final = Merger(
type_strategies=[(dict, ["merge"])],
@@ -112,14 +106,6 @@ _OPTIONS_MERGER: Final = Merger(
type_conflict_strategies=["override"],
)
# Backups just need to know if an addon was running or not
# Map other addon states to those two
_MAP_ADDON_STATE = {
AddonState.STARTUP: AddonState.STARTED,
AddonState.ERROR: AddonState.STOPPED,
AddonState.UNKNOWN: AddonState.STOPPED,
}
class Addon(AddonModel):
"""Hold data for add-on inside Supervisor."""
@@ -133,8 +119,54 @@ class Addon(AddonModel):
self.sys_hardware.helper.last_boot != self.sys_config.last_boot
)
self._listeners: list[EventListener] = []
self._startup_event = asyncio.Event()
self._startup_task: asyncio.Task | None = None
@Job(
name=f"addon_{slug}_restart_after_problem",
limit=JobExecutionLimit.THROTTLE_RATE_LIMIT,
throttle_period=WATCHDOG_THROTTLE_PERIOD,
throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS,
on_condition=AddonsJobError,
)
async def restart_after_problem(addon: Addon, state: ContainerState):
"""Restart unhealthy or failed addon."""
attempts = 0
while await addon.instance.current_state() == state:
if not addon.in_progress:
_LOGGER.warning(
"Watchdog found addon %s is %s, restarting...",
addon.name,
state.value,
)
try:
if state == ContainerState.FAILED:
# Ensure failed container is removed before attempting reanimation
if attempts == 0:
with suppress(DockerError):
await addon.instance.stop(remove_container=True)
await addon.start()
else:
await addon.restart()
except AddonsError as err:
attempts = attempts + 1
_LOGGER.error(
"Watchdog restart of addon %s failed!", addon.name
)
capture_exception(err)
else:
break
if attempts >= WATCHDOG_MAX_ATTEMPTS:
_LOGGER.critical(
"Watchdog cannot restart addon %s, failed all %s attempts",
addon.name,
attempts,
)
break
await asyncio.sleep(WATCHDOG_RETRY_SECONDS)
self._restart_after_problem = restart_after_problem
def __repr__(self) -> str:
"""Return internal representation."""
@@ -150,13 +182,7 @@ class Addon(AddonModel):
"""Set the add-on into new state."""
if self._state == new_state:
return
old_state = self._state
self._state = new_state
# Signal listeners about addon state change
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
self._startup_event.set()
self.sys_homeassistant.websocket.send_message(
{
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
@@ -186,7 +212,6 @@ class Addon(AddonModel):
)
)
await self._check_ingress_port()
with suppress(DockerError):
await self.instance.attach(version=self.version)
@@ -205,11 +230,6 @@ class Addon(AddonModel):
"""Return add-on data from store."""
return self.sys_store.data.addons.get(self.slug, self.data)
@property
def addon_store(self) -> AddonStore | None:
"""Return store representation of addon."""
return self.sys_addons.store.get(self.slug)
@property
def persist(self) -> Data:
"""Return add-on data/config."""
@@ -398,7 +418,7 @@ class Addon(AddonModel):
port = self.data[ATTR_INGRESS_PORT]
if port == 0:
raise RuntimeError(f"No port set for add-on {self.slug}")
return self.sys_ingress.get_dynamic_port(self.slug)
return port
@property
@@ -464,21 +484,6 @@ class Addon(AddonModel):
"""Return add-on data path external for Docker."""
return PurePath(self.sys_config.path_extern_addons_data, self.slug)
@property
def addon_config_used(self) -> bool:
"""Add-on is using its public config folder."""
return MappingType.ADDON_CONFIG in self.map_volumes
@property
def path_config(self) -> Path:
"""Return add-on config path inside Supervisor."""
return Path(self.sys_config.path_addon_configs, self.slug)
@property
def path_extern_config(self) -> PurePath:
"""Return add-on config path external for Docker."""
return PurePath(self.sys_config.path_extern_addon_configs, self.slug)
@property
def path_options(self) -> Path:
"""Return path to add-on options."""
@@ -542,7 +547,7 @@ class Addon(AddonModel):
# TCP monitoring
if s_prefix == "tcp":
return await check_port(self.ip_address, port)
return await self.sys_run_in_executor(check_port, self.ip_address, port)
# lookup the correct protocol from config
if t_proto:
@@ -558,7 +563,7 @@ class Addon(AddonModel):
) as req:
if req.status < 300:
return True
except (TimeoutError, aiohttp.ClientError):
except (asyncio.TimeoutError, aiohttp.ClientError):
pass
return False
@@ -585,201 +590,16 @@ class Addon(AddonModel):
raise AddonConfigurationError()
@Job(
name="addon_unload",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def unload(self) -> None:
"""Unload add-on and remove data."""
if self._startup_task:
# If we were waiting on startup, cancel that and let the task finish before proceeding
self._startup_task.cancel(f"Removing add-on {self.name} from system")
with suppress(asyncio.CancelledError):
await self._startup_task
for listener in self._listeners:
self.sys_bus.remove_listener(listener)
if self.path_data.is_dir():
_LOGGER.info("Removing add-on data folder %s", self.path_data)
await remove_data(self.path_data)
async def _check_ingress_port(self):
"""Assign a ingress port if dynamic port selection is used."""
if not self.with_ingress:
if not self.path_data.is_dir():
return
if self.data[ATTR_INGRESS_PORT] == 0:
self.data[ATTR_INGRESS_PORT] = await self.sys_ingress.get_dynamic_port(
self.slug
)
@Job(
name="addon_install",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def install(self) -> None:
"""Install and setup this addon."""
self.sys_addons.data.install(self.addon_store)
await self.load()
if not self.path_data.is_dir():
_LOGGER.info(
"Creating Home Assistant add-on data folder %s", self.path_data
)
self.path_data.mkdir()
# Setup/Fix AppArmor profile
await self.install_apparmor()
# Install image
try:
await self.instance.install(
self.latest_version, self.addon_store.image, arch=self.arch
)
except DockerError as err:
self.sys_addons.data.uninstall(self)
raise AddonsError() from err
# Add to addon manager
self.sys_addons.local[self.slug] = self
# Reload ingress tokens
if self.with_ingress:
await self.sys_ingress.reload()
@Job(
name="addon_uninstall",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def uninstall(self) -> None:
"""Uninstall and cleanup this addon."""
try:
await self.instance.remove()
except DockerError as err:
raise AddonsError() from err
self.state = AddonState.UNKNOWN
await self.unload()
# Cleanup audio settings
if self.path_pulse.exists():
with suppress(OSError):
self.path_pulse.unlink()
# Cleanup AppArmor profile
with suppress(HostAppArmorError):
await self.uninstall_apparmor()
# Cleanup Ingress panel from sidebar
if self.ingress_panel:
self.ingress_panel = False
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(self)
# Cleanup Ingress dynamic port assignment
if self.with_ingress:
self.sys_create_task(self.sys_ingress.reload())
self.sys_ingress.del_dynamic_port(self.slug)
# Cleanup discovery data
for message in self.sys_discovery.list_messages:
if message.addon != self.slug:
continue
self.sys_discovery.remove(message)
# Cleanup services data
for service in self.sys_services.list_services:
if self.slug not in service.active:
continue
service.del_service_data(self)
# Remove from addon manager
self.sys_addons.data.uninstall(self)
self.sys_addons.local.pop(self.slug)
@Job(
name="addon_update",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def update(self) -> asyncio.Task | None:
"""Update this addon to latest version.
Returns a Task that completes when addon has state 'started' (see start)
if it was running. Else nothing is returned.
"""
old_image = self.image
# Cache data to prevent races with other updates to global
store = self.addon_store.clone()
try:
await self.instance.update(store.version, store.image, arch=self.arch)
except DockerError as err:
raise AddonsError() from err
# Stop the addon if running
if (last_state := self.state) in {AddonState.STARTED, AddonState.STARTUP}:
await self.stop()
try:
_LOGGER.info("Add-on '%s' successfully updated", self.slug)
self.sys_addons.data.update(store)
await self._check_ingress_port()
# Cleanup
with suppress(DockerError):
await self.instance.cleanup(
old_image=old_image, image=store.image, version=store.version
)
# Setup/Fix AppArmor profile
await self.install_apparmor()
finally:
# restore state. Return Task for caller if no exception
out = (
await self.start()
if last_state in {AddonState.STARTED, AddonState.STARTUP}
else None
)
return out
@Job(
name="addon_rebuild",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def rebuild(self) -> asyncio.Task | None:
"""Rebuild this addons container and image.
Returns a Task that completes when addon has state 'started' (see start)
if it was running. Else nothing is returned.
"""
last_state: AddonState = self.state
try:
# remove docker container but not addon config
try:
await self.instance.remove()
await self.instance.install(self.version)
except DockerError as err:
raise AddonsError() from err
self.sys_addons.data.update(self.addon_store)
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
finally:
# restore state
out = (
await self.start()
if last_state in [AddonState.STARTED, AddonState.STARTUP]
else None
)
return out
_LOGGER.info("Removing add-on data folder %s", self.path_data)
await remove_data(self.path_data)
def write_pulse(self) -> None:
"""Write asound config to file and return True on success."""
@@ -795,8 +615,6 @@ class Addon(AddonModel):
try:
self.path_pulse.write_text(pulse_config, encoding="utf-8")
except OSError as err:
if err.errno == errno.EBADMSG:
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
_LOGGER.error(
"Add-on %s can't write pulse/client.config: %s", self.slug, err
)
@@ -862,37 +680,11 @@ class Addon(AddonModel):
return False
return True
async def _wait_for_startup(self) -> None:
"""Wait for startup event to be set with timeout."""
try:
self._startup_task = self.sys_create_task(self._startup_event.wait())
await asyncio.wait_for(self._startup_task, STARTUP_TIMEOUT)
except TimeoutError:
_LOGGER.warning(
"Timeout while waiting for addon %s to start, took more than %s seconds",
self.name,
STARTUP_TIMEOUT,
)
except asyncio.CancelledError as err:
_LOGGER.info("Wait for addon startup task cancelled due to: %s", err)
finally:
self._startup_task = None
@Job(
name="addon_start",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def start(self) -> asyncio.Task:
"""Set options and start add-on.
Returns a Task that completes when addon has state 'started'.
For addons with a healthcheck, that is when they become healthy or unhealthy.
Addons without a healthcheck have state 'started' immediately.
"""
async def start(self) -> None:
"""Set options and start add-on."""
if await self.instance.is_running():
_LOGGER.warning("%s is already running!", self.slug)
return self.sys_create_task(self._wait_for_startup())
return
# Access Token
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
@@ -905,33 +697,13 @@ class Addon(AddonModel):
if self.with_audio:
self.write_pulse()
def _check_addon_config_dir():
if self.path_config.is_dir():
return
_LOGGER.info(
"Creating Home Assistant add-on config folder %s", self.path_config
)
self.path_config.mkdir()
if self.addon_config_used:
await self.sys_run_in_executor(_check_addon_config_dir)
# Start Add-on
self._startup_event.clear()
try:
await self.instance.run()
except DockerError as err:
self.state = AddonState.ERROR
raise AddonsError() from err
return self.sys_create_task(self._wait_for_startup())
@Job(
name="addon_stop",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def stop(self) -> None:
"""Stop add-on."""
self._manual_stop = True
@@ -941,19 +713,11 @@ class Addon(AddonModel):
self.state = AddonState.ERROR
raise AddonsError() from err
@Job(
name="addon_restart",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def restart(self) -> asyncio.Task:
"""Restart add-on.
Returns a Task that completes when addon has state 'started' (see start).
"""
async def restart(self) -> None:
"""Restart add-on."""
with suppress(AddonsError):
await self.stop()
return await self.start()
await self.start()
def logs(self) -> Awaitable[bytes]:
"""Return add-ons log output.
@@ -976,13 +740,11 @@ class Addon(AddonModel):
except DockerError as err:
raise AddonsError() from err
@Job(
name="addon_write_stdin",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def write_stdin(self, data) -> None:
"""Write data to add-on stdin."""
"""Write data to add-on stdin.
Return a coroutine.
"""
if not self.with_stdin:
raise AddonsNotSupportedError(
f"Add-on {self.slug} does not support writing to stdin!", _LOGGER.error
@@ -1010,59 +772,9 @@ class Addon(AddonModel):
_LOGGER.error,
) from err
@Job(
name="addon_begin_backup",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def begin_backup(self) -> bool:
"""Execute pre commands or stop addon if necessary.
Returns value of `is_running`. Caller should not call `end_backup` if return is false.
"""
if not await self.is_running():
return False
if self.backup_mode == AddonBackupMode.COLD:
_LOGGER.info("Shutdown add-on %s for cold backup", self.slug)
await self.stop()
elif self.backup_pre is not None:
await self._backup_command(self.backup_pre)
return True
@Job(
name="addon_end_backup",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def end_backup(self) -> asyncio.Task | None:
"""Execute post commands or restart addon if necessary.
Returns a Task that completes when addon has state 'started' (see start)
for cold backup. Else nothing is returned.
"""
if self.backup_mode is AddonBackupMode.COLD:
_LOGGER.info("Starting add-on %s again", self.slug)
return await self.start()
if self.backup_post is not None:
await self._backup_command(self.backup_post)
return None
@Job(
name="addon_backup",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def backup(self, tar_file: tarfile.TarFile) -> asyncio.Task | None:
"""Backup state of an add-on.
Returns a Task that completes when addon has state 'started' (see start)
for cold backup. Else nothing is returned.
"""
wait_for_start: Awaitable[None] | None = None
async def backup(self, tar_file: tarfile.TarFile) -> None:
"""Backup state of an add-on."""
is_running = await self.is_running()
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
@@ -1078,7 +790,7 @@ class Addon(AddonModel):
ATTR_USER: self.persist,
ATTR_SYSTEM: self.data,
ATTR_VERSION: self.version,
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
ATTR_STATE: self.state,
}
# Store local configs/state
@@ -1114,16 +826,16 @@ class Addon(AddonModel):
arcname="data",
)
# Backup config
if self.addon_config_used:
atomic_contents_add(
backup,
self.path_config,
excludes=self.backup_exclude,
arcname="config",
)
if (
is_running
and self.backup_mode == AddonBackupMode.HOT
and self.backup_pre is not None
):
await self._backup_command(self.backup_pre)
elif is_running and self.backup_mode == AddonBackupMode.COLD:
_LOGGER.info("Shutdown add-on %s for cold backup", self.slug)
await self.instance.stop()
is_running = await self.begin_backup()
try:
_LOGGER.info("Building backup for add-on %s", self.slug)
await self.sys_run_in_executor(_write_tarfile)
@@ -1132,34 +844,26 @@ class Addon(AddonModel):
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
) from err
finally:
if is_running:
wait_for_start = await self.end_backup()
if (
is_running
and self.backup_mode == AddonBackupMode.HOT
and self.backup_post is not None
):
await self._backup_command(self.backup_post)
elif is_running and self.backup_mode is AddonBackupMode.COLD:
_LOGGER.info("Starting add-on %s again", self.slug)
await self.start()
_LOGGER.info("Finish backup for addon %s", self.slug)
return wait_for_start
@Job(
name="addon_restore",
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def restore(self, tar_file: tarfile.TarFile) -> asyncio.Task | None:
"""Restore state of an add-on.
Returns a Task that completes when addon has state 'started' (see start)
if addon is started after restore. Else nothing is returned.
"""
wait_for_start: Awaitable[None] | None = None
async def restore(self, tar_file: tarfile.TarFile) -> None:
"""Restore state of an add-on."""
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
# extract backup
def _extract_tarfile():
"""Extract tar backup."""
with tar_file as backup:
backup.extractall(
path=Path(temp),
members=secure_path(backup),
filter="fully_trusted",
)
backup.extractall(path=Path(temp), members=secure_path(backup))
try:
await self.sys_run_in_executor(_extract_tarfile)
@@ -1197,84 +901,66 @@ class Addon(AddonModel):
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
)
# Stop it first if its running
if await self.instance.is_running():
await self.stop()
# Check version / restore image
version = data[ATTR_VERSION]
if not await self.instance.exists():
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
try:
# Check version / restore image
version = data[ATTR_VERSION]
if not await self.instance.exists():
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
image_file = Path(temp, "image.tar")
if image_file.is_file():
with suppress(DockerError):
await self.instance.import_image(image_file)
else:
with suppress(DockerError):
await self.instance.install(
version, restore_image, self.arch
)
await self.instance.cleanup()
elif self.instance.version != version or self.legacy:
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
image_file = Path(temp, "image.tar")
if image_file.is_file():
with suppress(DockerError):
await self.instance.update(version, restore_image, self.arch)
self._check_ingress_port()
await self.instance.import_image(image_file)
else:
with suppress(DockerError):
await self.instance.install(version, restore_image)
await self.instance.cleanup()
elif self.instance.version != version or self.legacy:
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
with suppress(DockerError):
await self.instance.update(version, restore_image)
else:
with suppress(DockerError):
await self.instance.stop()
# Restore data and config
def _restore_data():
"""Restore data and config."""
temp_data = Path(temp, "data")
if temp_data.is_dir():
shutil.copytree(temp_data, self.path_data, symlinks=True)
else:
self.path_data.mkdir()
# Restore data
def _restore_data():
"""Restore data."""
temp_data = Path(temp, "data")
if temp_data.is_dir():
shutil.copytree(temp_data, self.path_data, symlinks=True)
else:
self.path_data.mkdir()
temp_config = Path(temp, "config")
if temp_config.is_dir():
shutil.copytree(temp_config, self.path_config, symlinks=True)
elif self.addon_config_used:
self.path_config.mkdir()
_LOGGER.info("Restoring data and config for addon %s", self.slug)
if self.path_data.is_dir():
await remove_data(self.path_data)
if self.path_config.is_dir():
await remove_data(self.path_config)
_LOGGER.info("Restoring data for addon %s", self.slug)
if self.path_data.is_dir():
await remove_data(self.path_data)
try:
await self.sys_run_in_executor(_restore_data)
except shutil.Error as err:
raise AddonsError(
f"Can't restore origin data: {err}", _LOGGER.error
) from err
# Restore AppArmor
profile_file = Path(temp, "apparmor.txt")
if profile_file.exists():
try:
await self.sys_run_in_executor(_restore_data)
except shutil.Error as err:
raise AddonsError(
f"Can't restore origin data: {err}", _LOGGER.error
) from err
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
except HostAppArmorError as err:
_LOGGER.error(
"Can't restore AppArmor profile for add-on %s", self.slug
)
raise AddonsError() from err
# Restore AppArmor
profile_file = Path(temp, "apparmor.txt")
if profile_file.exists():
try:
await self.sys_host.apparmor.load_profile(
self.slug, profile_file
)
except HostAppArmorError as err:
_LOGGER.error(
"Can't restore AppArmor profile for add-on %s", self.slug
)
raise AddonsError() from err
# Is add-on loaded
if not self.loaded:
await self.load()
# Is add-on loaded
if not self.loaded:
await self.load()
finally:
# Run add-on
if data[ATTR_STATE] == AddonState.STARTED:
wait_for_start = await self.start()
# Run add-on
if data[ATTR_STATE] == AddonState.STARTED:
return await self.start()
_LOGGER.info("Finished restore for add-on %s", self.slug)
return wait_for_start
def check_trust(self) -> Awaitable[None]:
"""Calculate Addon docker content trust.
@@ -1283,64 +969,17 @@ class Addon(AddonModel):
"""
return self.instance.check_trust()
@Job(
name="addon_restart_after_problem",
limit=JobExecutionLimit.GROUP_THROTTLE_RATE_LIMIT,
throttle_period=WATCHDOG_THROTTLE_PERIOD,
throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS,
on_condition=AddonsJobError,
)
async def _restart_after_problem(self, state: ContainerState):
"""Restart unhealthy or failed addon."""
attempts = 0
while await self.instance.current_state() == state:
if not self.in_progress:
_LOGGER.warning(
"Watchdog found addon %s is %s, restarting...",
self.name,
state,
)
try:
if state == ContainerState.FAILED:
# Ensure failed container is removed before attempting reanimation
if attempts == 0:
with suppress(DockerError):
await self.instance.stop(remove_container=True)
await (await self.start())
else:
await (await self.restart())
except AddonsError as err:
attempts = attempts + 1
_LOGGER.error("Watchdog restart of addon %s failed!", self.name)
capture_exception(err)
else:
break
if attempts >= WATCHDOG_MAX_ATTEMPTS:
_LOGGER.critical(
"Watchdog cannot restart addon %s, failed all %s attempts",
self.name,
attempts,
)
break
await asyncio.sleep(WATCHDOG_RETRY_SECONDS)
async def container_state_changed(self, event: DockerContainerStateEvent) -> None:
"""Set addon state from container state."""
if event.name != self.instance.name:
return
if event.state == ContainerState.RUNNING:
self._manual_stop = False
self.state = (
AddonState.STARTUP if self.instance.healthcheck else AddonState.STARTED
)
elif event.state in [
if event.state in [
ContainerState.RUNNING,
ContainerState.HEALTHY,
ContainerState.UNHEALTHY,
]:
self._manual_stop = False
self.state = AddonState.STARTED
elif event.state == ContainerState.STOPPED:
self.state = AddonState.STOPPED
@@ -1361,4 +1000,4 @@ class Addon(AddonModel):
ContainerState.STOPPED,
ContainerState.UNHEALTHY,
]:
await self._restart_after_problem(event.state)
await self._restart_after_problem(self, event.state)

View File

@@ -1,11 +0,0 @@
"""Confgiuration Objects for Addon Config."""
from dataclasses import dataclass
@dataclass(slots=True)
class FolderMapping:
"""Represent folder mapping configuration."""
path: str | None
read_only: bool

View File

@@ -1,36 +1,19 @@
"""Add-on static data."""
from datetime import timedelta
from enum import StrEnum
from enum import Enum
from ..jobs.const import JobCondition
class AddonBackupMode(StrEnum):
class AddonBackupMode(str, Enum):
"""Backup mode of an Add-on."""
HOT = "hot"
COLD = "cold"
class MappingType(StrEnum):
"""Mapping type of an Add-on Folder."""
DATA = "data"
CONFIG = "config"
SSL = "ssl"
ADDONS = "addons"
BACKUP = "backup"
SHARE = "share"
MEDIA = "media"
HOMEASSISTANT_CONFIG = "homeassistant_config"
ALL_ADDON_CONFIGS = "all_addon_configs"
ADDON_CONFIG = "addon_config"
ATTR_BACKUP = "backup"
ATTR_CODENOTARY = "codenotary"
ATTR_READ_ONLY = "read_only"
ATTR_PATH = "path"
WATCHDOG_RETRY_SECONDS = 10
WATCHDOG_MAX_ATTEMPTS = 5
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)

View File

@@ -1,374 +0,0 @@
"""Supervisor add-on manager."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
import logging
import tarfile
from typing import Union
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonConfigurationError,
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
DockerError,
DockerNotFound,
HassioError,
HomeAssistantAPIError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils import check_exception_chain
from ..utils.sentry import capture_exception
from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyAddon = Union[Addon, AddonStore]
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AddonsData = AddonsData(coresys)
self.local: dict[str, Addon] = {}
self.store: dict[str, AddonStore] = {}
@property
def all(self) -> list[AnyAddon]:
"""Return a list of all add-ons."""
addons: dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> list[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(addon_slug)
return None
def from_token(self, token: str) -> Addon | None:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load(self) -> None:
"""Start up add-on management."""
tasks = []
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(self.sys_create_task(addon.load()))
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(tasks))
if tasks:
await asyncio.wait(tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Add-ons sequential
# avoid issue on slow IO
# Config.wait_boot is deprecated. Until addons update with healthchecks,
# add a sleep task for it to keep the same minimum amount of wait time
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
for addon in tasks:
try:
if start_task := await addon.start():
wait_boot.append(start_task)
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except HassioError:
pass # These are already handled
else:
continue
_LOGGER.warning("Can't start Add-on %s", addon.slug)
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
capture_exception(err)
@Job(
name="addon_manager_install",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def install(self, slug: str) -> None:
"""Install an add-on."""
self.sys_jobs.current.reference = slug
if slug in self.local:
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
store.validate_availability()
await Addon(self.coresys, slug).install()
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
await self.local[slug].uninstall()
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
name="addon_manager_update",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def update(
self, slug: str, backup: bool | None = False
) -> asyncio.Task | None:
"""Update add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after update. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
store.validate_availability()
if backup:
await self.sys_backups.do_backup_partial(
name=f"addon_{addon.slug}_{addon.version}",
homeassistant=False,
addons=[addon.slug],
)
return await addon.update()
@Job(
name="addon_manager_rebuild",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str) -> asyncio.Task | None:
"""Perform a rebuild of local build add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after rebuild. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:
raise AddonsError(
"Version changed, use Update instead Rebuild", _LOGGER.error
)
if not addon.need_build:
raise AddonsNotSupportedError(
"Can't rebuild a image based add-on", _LOGGER.error
)
return await addon.rebuild()
@Job(
name="addon_manager_restore",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def restore(
self, slug: str, tar_file: tarfile.TarFile
) -> asyncio.Task | None:
"""Restore state of an add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after restore. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
had_ingress = False
else:
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
had_ingress = addon.ingress_panel
wait_for_start = await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if had_ingress != addon.ingress_panel:
await self.sys_ingress.reload()
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
return wait_for_start
@Job(
name="addon_manager_repair",
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
)
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: list[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
add_host_coros: list[Awaitable[None]] = []
for addon in self.installed:
try:
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
capture_exception(err)
else:
add_host_coros.append(
self.sys_plugins.dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
)
await asyncio.gather(*add_host_coros)
# Write hosts files
with suppress(CoreDNSError):
await self.sys_plugins.dns.write_hosts()

View File

@@ -1,7 +1,6 @@
"""Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod
from collections import defaultdict
from collections.abc import Callable
from collections.abc import Awaitable, Callable
from contextlib import suppress
import logging
from pathlib import Path
@@ -65,7 +64,6 @@ from ..const import (
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
ATTR_TYPE,
ATTR_UART,
ATTR_UDEV,
ATTR_URL,
@@ -81,37 +79,24 @@ from ..const import (
AddonStage,
AddonStartup,
)
from ..coresys import CoreSys
from ..coresys import CoreSys, CoreSysAttributes
from ..docker.const import Capabilities
from ..exceptions import AddonsNotSupportedError
from ..jobs.const import JOB_GROUP_ADDON
from ..jobs.job_group import JobGroup
from ..utils import version_is_new_enough
from .configuration import FolderMapping
from .const import (
ATTR_BACKUP,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
AddonBackupMode,
MappingType,
)
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
from .options import AddonOptions, UiOptions
from .validate import RE_SERVICE
from .validate import RE_SERVICE, RE_VOLUME
_LOGGER: logging.Logger = logging.getLogger(__name__)
Data = dict[str, Any]
class AddonModel(JobGroup, ABC):
class AddonModel(CoreSysAttributes, ABC):
"""Add-on Data layout."""
def __init__(self, coresys: CoreSys, slug: str):
"""Initialize data holder."""
super().__init__(
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
)
self.coresys: CoreSys = coresys
self.slug: str = slug
@property
@@ -547,13 +532,14 @@ class AddonModel(JobGroup, ABC):
return ATTR_IMAGE not in self.data
@property
def map_volumes(self) -> dict[MappingType, FolderMapping]:
"""Return a dict of {MappingType: FolderMapping} from add-on."""
def map_volumes(self) -> dict[str, bool]:
"""Return a dict of {volume: read-only} from add-on."""
volumes = {}
for volume in self.data[ATTR_MAP]:
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY]
)
result = RE_VOLUME.match(volume)
if not result:
continue
volumes[result.group(1)] = result.group(2) != "rw"
return volumes
@@ -654,9 +640,7 @@ class AddonModel(JobGroup, ABC):
# Home Assistant
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
with suppress(AwesomeVersionException, TypeError):
if version and not version_is_new_enough(
self.sys_homeassistant.version, version
):
if self.sys_homeassistant.version < version:
raise AddonsNotSupportedError(
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
logger,
@@ -680,3 +664,19 @@ class AddonModel(JobGroup, ABC):
# local build
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
def install(self) -> Awaitable[None]:
"""Install this add-on."""
return self.sys_addons.install(self.slug)
def uninstall(self) -> Awaitable[None]:
"""Uninstall this add-on."""
return self.sys_addons.uninstall(self.slug)
def update(self, backup: bool | None = False) -> Awaitable[None]:
"""Update this add-on."""
return self.sys_addons.update(self.slug, backup=backup)
def rebuild(self) -> Awaitable[None]:
"""Rebuild this add-on."""
return self.sys_addons.rebuild(self.slug)

View File

@@ -81,7 +81,6 @@ from ..const import (
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
ATTR_TYPE,
ATTR_UART,
ATTR_UDEV,
ATTR_URL,
@@ -110,22 +109,12 @@ from ..validate import (
uuid_match,
version_tag,
)
from .const import (
ATTR_BACKUP,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
RE_SLUG,
AddonBackupMode,
MappingType,
)
from .const import ATTR_BACKUP, ATTR_CODENOTARY, RE_SLUG, AddonBackupMode
from .options import RE_SCHEMA_ELEMENT
_LOGGER: logging.Logger = logging.getLogger(__name__)
RE_VOLUME = re.compile(
r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$"
)
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
@@ -154,9 +143,6 @@ RE_MACHINE = re.compile(
r"|raspberrypi3"
r"|raspberrypi4-64"
r"|raspberrypi4"
r"|raspberrypi5-64"
r"|yellow"
r"|green"
r"|tinker"
r")$"
)
@@ -189,20 +175,6 @@ def _warn_addon_config(config: dict[str, Any]):
name,
)
invalid_services: list[str] = []
for service in config.get(ATTR_DISCOVERY, []):
try:
valid_discovery_service(service)
except vol.Invalid:
invalid_services.append(service)
if invalid_services:
_LOGGER.warning(
"Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s",
", ".join(invalid_services),
name,
)
return config
@@ -224,9 +196,9 @@ def _migrate_addon_config(protocol=False):
name,
)
if value == "before":
config[ATTR_STARTUP] = AddonStartup.SERVICES
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
elif value == "after":
config[ATTR_STARTUP] = AddonStartup.APPLICATION
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
# UART 2021-01-20
if "auto_uart" in config:
@@ -272,48 +244,6 @@ def _migrate_addon_config(protocol=False):
name,
)
# 2023-11 "map" entries can also be dict to allow path configuration
volumes = []
for entry in config.get(ATTR_MAP, []):
if isinstance(entry, dict):
volumes.append(entry)
if isinstance(entry, str):
result = RE_VOLUME.match(entry)
if not result:
continue
volumes.append(
{
ATTR_TYPE: result.group(1),
ATTR_READ_ONLY: result.group(2) != "rw",
}
)
if volumes:
config[ATTR_MAP] = volumes
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
if any(
volume
and volume[ATTR_TYPE]
in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG}
for volume in volumes
):
_LOGGER.warning(
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
MappingType.ADDON_CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
MappingType.CONFIG,
name,
)
else:
_LOGGER.debug(
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
MappingType.CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
name,
)
return config
return _migrate
@@ -362,15 +292,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_DEVICES): [str],
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
vol.Optional(ATTR_MAP, default=list): [
vol.Schema(
{
vol.Required(ATTR_TYPE): vol.Coerce(MappingType),
vol.Optional(ATTR_READ_ONLY, default=True): bool,
vol.Optional(ATTR_PATH): str,
}
)
],
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
@@ -391,7 +313,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
vol.Optional(ATTR_DISCOVERY): [str],
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
vol.Optional(ATTR_BACKUP_PRE): str,
vol.Optional(ATTR_BACKUP_POST): str,

View File

@@ -5,7 +5,6 @@ from pathlib import Path
from typing import Any
from aiohttp import web
from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher
from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes
@@ -65,10 +64,9 @@ class RestAPI(CoreSysAttributes):
"max_field_size": MAX_LINE_SIZE,
},
)
attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher())
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
self._runner: web.AppRunner = web.AppRunner(self.webapp)
self._site: web.TCPSite | None = None
async def load(self) -> None:
@@ -188,8 +186,6 @@ class RestAPI(CoreSysAttributes):
# Boards endpoints
self.webapp.add_routes(
[
web.get("/os/boards/green", api_os.boards_green_info),
web.post("/os/boards/green", api_os.boards_green_options),
web.get("/os/boards/yellow", api_os.boards_yellow_info),
web.post("/os/boards/yellow", api_os.boards_yellow_options),
web.get("/os/boards/{board}", api_os.boards_other_info),
@@ -489,8 +485,6 @@ class RestAPI(CoreSysAttributes):
web.get("/backups/info", api_backups.info),
web.post("/backups/options", api_backups.options),
web.post("/backups/reload", api_backups.reload),
web.post("/backups/freeze", api_backups.freeze),
web.post("/backups/thaw", api_backups.thaw),
web.post("/backups/new/full", api_backups.backup_full),
web.post("/backups/new/partial", api_backups.backup_partial),
web.post("/backups/new/upload", api_backups.upload),
@@ -673,7 +667,9 @@ class RestAPI(CoreSysAttributes):
async def start(self) -> None:
"""Run RESTful API webserver."""
await self._runner.setup()
self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80)
self._site = web.TCPSite(
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
)
try:
await self._site.start()

View File

@@ -8,8 +8,8 @@ from aiohttp import web
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..addons import AnyAddon
from ..addons.addon import Addon
from ..addons.manager import AnyAddon
from ..addons.utils import rating_security
from ..const import (
ATTR_ADDONS,
@@ -388,14 +388,13 @@ class APIAddons(CoreSysAttributes):
def uninstall(self, request: web.Request) -> Awaitable[None]:
"""Uninstall add-on."""
addon = self._extract_addon(request)
return asyncio.shield(self.sys_addons.uninstall(addon.slug))
return asyncio.shield(addon.uninstall())
@api_process
async def start(self, request: web.Request) -> None:
def start(self, request: web.Request) -> Awaitable[None]:
"""Start add-on."""
addon = self._extract_addon(request)
if start_task := await asyncio.shield(addon.start()):
await start_task
return asyncio.shield(addon.start())
@api_process
def stop(self, request: web.Request) -> Awaitable[None]:
@@ -404,18 +403,16 @@ class APIAddons(CoreSysAttributes):
return asyncio.shield(addon.stop())
@api_process
async def restart(self, request: web.Request) -> None:
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart add-on."""
addon: Addon = self._extract_addon(request)
if start_task := await asyncio.shield(addon.restart()):
await start_task
return asyncio.shield(addon.restart())
@api_process
async def rebuild(self, request: web.Request) -> None:
def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild local build add-on."""
addon = self._extract_addon(request)
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
await start_task
return asyncio.shield(addon.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:

View File

@@ -1,11 +1,11 @@
"""Init file for Supervisor Audio RESTful API."""
import asyncio
from collections.abc import Awaitable
from dataclasses import asdict
import logging
from typing import Any
from aiohttp import web
import attr
import voluptuous as vol
from ..const import (
@@ -76,11 +76,15 @@ class APIAudio(CoreSysAttributes):
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
ATTR_HOST: str(self.sys_docker.network.audio),
ATTR_AUDIO: {
ATTR_CARD: [asdict(card) for card in self.sys_host.sound.cards],
ATTR_INPUT: [asdict(stream) for stream in self.sys_host.sound.inputs],
ATTR_OUTPUT: [asdict(stream) for stream in self.sys_host.sound.outputs],
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
ATTR_INPUT: [
attr.asdict(stream) for stream in self.sys_host.sound.inputs
],
ATTR_OUTPUT: [
attr.asdict(stream) for stream in self.sys_host.sound.outputs
],
ATTR_APPLICATION: [
asdict(stream) for stream in self.sys_host.sound.applications
attr.asdict(stream) for stream in self.sys_host.sound.applications
],
},
}

View File

@@ -11,7 +11,6 @@ from ..addons.addon import Addon
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden
from ..utils.json import json_loads
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
from .utils import api_process, api_validate
@@ -68,7 +67,7 @@ class APIAuth(CoreSysAttributes):
# Json
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
data = await request.json(loads=json_loads)
data = await request.json()
return await self._process_dict(request, addon, data)
# URL encoded

View File

@@ -1,6 +1,5 @@
"""Backups RESTful API."""
import asyncio
import errno
import logging
from pathlib import Path
import re
@@ -21,7 +20,6 @@ from ..const import (
ATTR_DAYS_UNTIL_STALE,
ATTR_FOLDERS,
ATTR_HOMEASSISTANT,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
ATTR_LOCATON,
ATTR_NAME,
ATTR_PASSWORD,
@@ -30,14 +28,12 @@ from ..const import (
ATTR_SIZE,
ATTR_SLUG,
ATTR_SUPERVISOR_VERSION,
ATTR_TIMEOUT,
ATTR_TYPE,
ATTR_VERSION,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..mounts.const import MountUsage
from ..resolution.const import UnhealthyReason
from .const import CONTENT_TYPE_TAR
from .utils import api_process, api_validate
@@ -67,7 +63,6 @@ SCHEMA_BACKUP_FULL = vol.Schema(
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
}
)
@@ -85,12 +80,6 @@ SCHEMA_OPTIONS = vol.Schema(
}
)
SCHEMA_FREEZE = vol.Schema(
{
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
}
)
class APIBackups(CoreSysAttributes):
"""Handle RESTful API for backups functions."""
@@ -153,7 +142,7 @@ class APIBackups(CoreSysAttributes):
self.sys_backups.save_data()
@api_process
async def reload(self, _):
async def reload(self, request):
"""Reload backup list."""
await asyncio.shield(self.sys_backups.reload())
return True
@@ -188,7 +177,6 @@ class APIBackups(CoreSysAttributes):
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
}
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
@@ -245,17 +233,6 @@ class APIBackups(CoreSysAttributes):
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
@api_process
async def freeze(self, request):
"""Initiate manual freeze for external backup."""
body = await api_validate(SCHEMA_FREEZE, request)
await asyncio.shield(self.sys_backups.freeze_all(**body))
@api_process
async def thaw(self, request):
"""Begin thaw after manual freeze."""
await self.sys_backups.thaw_all()
@api_process
async def remove(self, request):
"""Remove a backup."""
@@ -290,8 +267,6 @@ class APIBackups(CoreSysAttributes):
backup.write(chunk)
except OSError as err:
if err.errno == errno.EBADMSG:
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
_LOGGER.error("Can't write new backup file: %s", err)
return False

View File

@@ -23,6 +23,7 @@ ATTR_CONNECTION_BUS = "connection_bus"
ATTR_DATA_DISK = "data_disk"
ATTR_DEVICE = "device"
ATTR_DEV_PATH = "dev_path"
ATTR_DISK_LED = "disk_led"
ATTR_DISKS = "disks"
ATTR_DRIVES = "drives"
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
@@ -30,8 +31,8 @@ ATTR_DT_UTC = "dt_utc"
ATTR_EJECTABLE = "ejectable"
ATTR_FALLBACK = "fallback"
ATTR_FILESYSTEMS = "filesystems"
ATTR_HEARTBEAT_LED = "heartbeat_led"
ATTR_IDENTIFIERS = "identifiers"
ATTR_JOBS = "jobs"
ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_MDNS = "mdns"
@@ -39,6 +40,7 @@ ATTR_MODEL = "model"
ATTR_MOUNTS = "mounts"
ATTR_MOUNT_POINTS = "mount_points"
ATTR_PANEL_PATH = "panel_path"
ATTR_POWER_LED = "power_led"
ATTR_REMOVABLE = "removable"
ATTR_REVISION = "revision"
ATTR_SEAT = "seat"
@@ -46,7 +48,6 @@ ATTR_SIGNED = "signed"
ATTR_STARTUP_TIME = "startup_time"
ATTR_SUBSYSTEM = "subsystem"
ATTR_SYSFS = "sysfs"
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
ATTR_TIME_DETECTED = "time_detected"
ATTR_UPDATE_TYPE = "update_type"
ATTR_USE_NTP = "use_ntp"

View File

@@ -1,9 +1,6 @@
"""Init file for Supervisor network RESTful API."""
import logging
import voluptuous as vol
from ..addons.addon import Addon
from ..const import (
ATTR_ADDON,
ATTR_CONFIG,
@@ -12,18 +9,15 @@ from ..const import (
ATTR_SERVICES,
ATTR_UUID,
REQUEST_FROM,
AddonState,
)
from ..coresys import CoreSysAttributes
from ..discovery.validate import valid_discovery_service
from ..exceptions import APIError, APIForbidden
from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_DISCOVERY = vol.Schema(
{
vol.Required(ATTR_SERVICE): str,
vol.Required(ATTR_SERVICE): valid_discovery_service,
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
}
)
@@ -42,19 +36,19 @@ class APIDiscovery(CoreSysAttributes):
@api_process
@require_home_assistant
async def list(self, request):
"""Show registered and available services."""
"""Show register services."""
# Get available discovery
discovery = [
{
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
for message in self.sys_discovery.list_messages
if (addon := self.sys_addons.get(message.addon, local_only=True))
and addon.state == AddonState.STARTED
]
discovery = []
for message in self.sys_discovery.list_messages:
discovery.append(
{
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
)
# Get available services/add-ons
services = {}
@@ -68,28 +62,11 @@ class APIDiscovery(CoreSysAttributes):
async def set_discovery(self, request):
"""Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request)
addon: Addon = request[REQUEST_FROM]
service = body[ATTR_SERVICE]
try:
valid_discovery_service(service)
except vol.Invalid:
_LOGGER.warning(
"Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on",
service,
addon.name,
)
addon = request[REQUEST_FROM]
# Access?
if body[ATTR_SERVICE] not in addon.discovery:
_LOGGER.error(
"Add-on %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the add-on",
addon.name,
service,
)
raise APIForbidden(
"Add-ons must list services they provide via discovery in their config!"
)
raise APIForbidden("Can't use discovery!")
# Process discovery message
message = self.sys_discovery.send(addon, **body)

View File

@@ -12,7 +12,6 @@ from ..const import (
ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT,
ATTR_BACKUP,
ATTR_BACKUPS_EXCLUDE_DATABASE,
ATTR_BLK_READ,
ATTR_BLK_WRITE,
ATTR_BOOT,
@@ -52,7 +51,6 @@ SCHEMA_OPTIONS = vol.Schema(
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(),
}
)
@@ -84,7 +82,6 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database,
}
@api_process
@@ -116,11 +113,6 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_AUDIO_OUTPUT in body:
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
if ATTR_BACKUPS_EXCLUDE_DATABASE in body:
self.sys_homeassistant.backups_exclude_database = body[
ATTR_BACKUPS_EXCLUDE_DATABASE
]
self.sys_homeassistant.save_data()
@api_process

View File

@@ -21,18 +21,11 @@ from ..const import (
ATTR_ICON,
ATTR_PANELS,
ATTR_SESSION,
ATTR_SESSION_DATA_USER_ID,
ATTR_TITLE,
HEADER_REMOTE_USER_DISPLAY_NAME,
HEADER_REMOTE_USER_ID,
HEADER_REMOTE_USER_NAME,
HEADER_TOKEN,
HEADER_TOKEN_OLD,
IngressSessionData,
IngressSessionDataUser,
)
from ..coresys import CoreSysAttributes
from ..exceptions import HomeAssistantAPIError
from .const import COOKIE_INGRESS
from .utils import api_process, api_validate, require_home_assistant
@@ -40,46 +33,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
"""Expected optional payload of create session request"""
SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
{
vol.Optional(ATTR_SESSION_DATA_USER_ID): str,
}
)
# from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1
def must_be_empty_body(method: str, code: int) -> bool:
"""Check if a request must return an empty body."""
return (
status_code_must_be_empty_body(code)
or method_must_be_empty_body(method)
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
)
def method_must_be_empty_body(method: str) -> bool:
"""Check if a method must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
return method.upper() == hdrs.METH_HEAD
def status_code_must_be_empty_body(code: int) -> bool:
"""Check if a status code must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
return code in {204, 304} or 100 <= code < 200
class APIIngress(CoreSysAttributes):
"""Ingress view to handle add-on webui routing."""
_list_of_users: list[IngressSessionDataUser]
def __init__(self) -> None:
"""Initialize APIIngress."""
self._list_of_users = []
def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception it it doesn't exist."""
token = request.match_info.get("token")
@@ -114,19 +71,7 @@ class APIIngress(CoreSysAttributes):
@require_home_assistant
async def create_session(self, request: web.Request) -> dict[str, Any]:
"""Create a new session."""
schema_ingress_config_session_data = await api_validate(
SCHEMA_INGRESS_CREATE_SESSION_DATA, request
)
data: IngressSessionData | None = None
if ATTR_SESSION_DATA_USER_ID in schema_ingress_config_session_data:
user = await self._find_user_by_id(
schema_ingress_config_session_data[ATTR_SESSION_DATA_USER_ID]
)
if user:
data = IngressSessionData(user)
session = self.sys_ingress.create_session(data)
session = self.sys_ingress.create_session()
return {ATTR_SESSION: session}
@api_process
@@ -154,14 +99,13 @@ class APIIngress(CoreSysAttributes):
# Process requests
addon = self._extract_addon(request)
path = request.match_info.get("path")
session_data = self.sys_ingress.get_session_data(session)
try:
# Websocket
if _is_websocket(request):
return await self._handle_websocket(request, addon, path, session_data)
return await self._handle_websocket(request, addon, path)
# Request
return await self._handle_request(request, addon, path, session_data)
return await self._handle_request(request, addon, path)
except aiohttp.ClientError as err:
_LOGGER.error("Ingress error: %s", err)
@@ -169,11 +113,7 @@ class APIIngress(CoreSysAttributes):
raise HTTPBadGateway()
async def _handle_websocket(
self,
request: web.Request,
addon: Addon,
path: str,
session_data: IngressSessionData | None,
self, request: web.Request, addon: Addon, path: str
) -> web.WebSocketResponse:
"""Ingress route for websocket."""
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
@@ -191,7 +131,7 @@ class APIIngress(CoreSysAttributes):
# Preparing
url = self._create_url(addon, path)
source_header = _init_header(request, addon, session_data)
source_header = _init_header(request, addon)
# Support GET query
if request.query_string:
@@ -217,15 +157,11 @@ class APIIngress(CoreSysAttributes):
return ws_server
async def _handle_request(
self,
request: web.Request,
addon: Addon,
path: str,
session_data: IngressSessionData | None,
self, request: web.Request, addon: Addon, path: str
) -> web.Response | web.StreamResponse:
"""Ingress route for request."""
url = self._create_url(addon, path)
source_header = _init_header(request, addon, session_data)
source_header = _init_header(request, addon)
# Passing the raw stream breaks requests for some webservers
# since we just need it for POST requests really, for all other methods
@@ -248,18 +184,10 @@ class APIIngress(CoreSysAttributes):
skip_auto_headers={hdrs.CONTENT_TYPE},
) as result:
headers = _response_header(result)
# Avoid parsing content_type in simple cases for better performance
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
content_type = (maybe_content_type.partition(";"))[0].strip()
else:
content_type = result.content_type
# Simple request
if (
# empty body responses should not be streamed,
# otherwise aiohttp < 3.9.0 may generate
# an invalid "0\r\n\r\n" chunk instead of an empty response.
must_be_empty_body(request.method, result.status)
or hdrs.CONTENT_LENGTH in result.headers
hdrs.CONTENT_LENGTH in result.headers
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
):
# Return Response
@@ -267,13 +195,13 @@ class APIIngress(CoreSysAttributes):
return web.Response(
headers=headers,
status=result.status,
content_type=content_type,
content_type=result.content_type,
body=body,
)
# Stream response
response = web.StreamResponse(status=result.status, headers=headers)
response.content_type = content_type
response.content_type = result.content_type
try:
await response.prepare(request)
@@ -289,35 +217,11 @@ class APIIngress(CoreSysAttributes):
return response
async def _find_user_by_id(self, user_id: str) -> IngressSessionDataUser | None:
"""Find user object by the user's ID."""
try:
list_of_users = await self.sys_homeassistant.get_users()
except (HomeAssistantAPIError, TypeError) as err:
_LOGGER.error(
"%s error occurred while requesting list of users: %s", type(err), err
)
return None
if list_of_users is not None:
self._list_of_users = list_of_users
return next((user for user in self._list_of_users if user.id == user_id), None)
def _init_header(
request: web.Request, addon: Addon, session_data: IngressSessionData | None
) -> CIMultiDict | dict[str, str]:
def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, str]:
"""Create initial header."""
headers = {}
if session_data is not None:
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
if session_data.user.username is not None:
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
if session_data.user.display_name is not None:
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
# filter flags
for name, value in request.headers.items():
if name in (
@@ -330,9 +234,6 @@ def _init_header(
hdrs.SEC_WEBSOCKET_KEY,
istr(HEADER_TOKEN),
istr(HEADER_TOKEN_OLD),
istr(HEADER_REMOTE_USER_ID),
istr(HEADER_REMOTE_USER_NAME),
istr(HEADER_REMOTE_USER_DISPLAY_NAME),
):
continue
headers[name] = value

View File

@@ -6,9 +6,7 @@ from aiohttp import web
import voluptuous as vol
from ..coresys import CoreSysAttributes
from ..jobs import SupervisorJob
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
from .const import ATTR_JOBS
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -21,45 +19,11 @@ SCHEMA_OPTIONS = vol.Schema(
class APIJobs(CoreSysAttributes):
"""Handle RESTful API for OS functions."""
def _list_jobs(self) -> list[dict[str, Any]]:
"""Return current job tree."""
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
for job in self.sys_jobs.jobs:
if job.internal:
continue
if job.parent_id not in jobs_by_parent:
jobs_by_parent[job.parent_id] = [job]
else:
jobs_by_parent[job.parent_id].append(job)
job_list: list[dict[str, Any]] = []
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = [
(job_list, job) for job in jobs_by_parent.get(None, [])
]
while queue:
(current_list, current_job) = queue.pop(0)
child_jobs: list[dict[str, Any]] = []
# We remove parent_id and instead use that info to represent jobs as a tree
job_dict = current_job.as_dict() | {"child_jobs": child_jobs}
job_dict.pop("parent_id")
current_list.append(job_dict)
if current_job.uuid in jobs_by_parent:
queue.extend(
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
)
return job_list
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return JobManager information."""
return {
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
ATTR_JOBS: self._list_jobs(),
}
@api_process

View File

@@ -19,7 +19,6 @@ from ...const import (
CoreState,
)
from ...coresys import CoreSys, CoreSysAttributes
from ...utils import version_is_new_enough
from ..utils import api_return_error, excract_supervisor_token
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -196,7 +195,7 @@ class SecurityMiddleware(CoreSysAttributes):
CoreState.FREEZE,
):
return api_return_error(
message=f"System is not ready with state: {self.sys_core.state}"
message=f"System is not ready with state: {self.sys_core.state.value}"
)
return await handler(request)
@@ -274,8 +273,9 @@ class SecurityMiddleware(CoreSysAttributes):
@middleware
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy."""
if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough(
self.sys_homeassistant.version, _CORE_VERSION
if (
request[REQUEST_FROM] != self.sys_homeassistant
or self.sys_homeassistant.version >= _CORE_VERSION
):
return await handler(request)

View File

@@ -1,11 +1,11 @@
"""REST API for network."""
import asyncio
from collections.abc import Awaitable
from dataclasses import replace
from ipaddress import ip_address, ip_interface
from typing import Any
from aiohttp import web
import attr
import voluptuous as vol
from ..const import (
@@ -43,7 +43,8 @@ from ..const import (
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, HostNetworkNotFound
from ..host.configuration import (
from ..host.const import AuthMethod, InterfaceType, WifiMode
from ..host.network import (
AccessPoint,
Interface,
InterfaceMethod,
@@ -51,7 +52,6 @@ from ..host.configuration import (
VlanConfig,
WifiConfig,
)
from ..host.const import AuthMethod, InterfaceType, WifiMode
from .utils import api_process, api_validate
_SCHEMA_IP_CONFIG = vol.Schema(
@@ -121,7 +121,6 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_ENABLED: interface.enabled,
ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac,
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
@@ -197,19 +196,19 @@ class APINetwork(CoreSysAttributes):
# Apply config
for key, config in body.items():
if key == ATTR_IPV4:
interface.ipv4 = replace(
interface.ipv4 = attr.evolve(
interface.ipv4
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
**config,
)
elif key == ATTR_IPV6:
interface.ipv6 = replace(
interface.ipv6 = attr.evolve(
interface.ipv6
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
**config,
)
elif key == ATTR_WIFI:
interface.wifi = replace(
interface.wifi = attr.evolve(
interface.wifi
or WifiConfig(
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
@@ -277,8 +276,6 @@ class APINetwork(CoreSysAttributes):
)
vlan_interface = Interface(
"",
"",
"",
True,
True,

View File

@@ -8,15 +8,11 @@ from aiohttp import web
import voluptuous as vol
from ..const import (
ATTR_ACTIVITY_LED,
ATTR_BOARD,
ATTR_BOOT,
ATTR_DEVICES,
ATTR_DISK_LED,
ATTR_HEARTBEAT_LED,
ATTR_ID,
ATTR_NAME,
ATTR_POWER_LED,
ATTR_SERIAL,
ATTR_SIZE,
ATTR_UPDATE_AVAILABLE,
@@ -31,19 +27,21 @@ from .const import (
ATTR_DATA_DISK,
ATTR_DEV_PATH,
ATTR_DEVICE,
ATTR_DISK_LED,
ATTR_DISKS,
ATTR_HEARTBEAT_LED,
ATTR_MODEL,
ATTR_SYSTEM_HEALTH_LED,
ATTR_POWER_LED,
ATTR_VENDOR,
)
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
# pylint: disable=no-value-for-parameter
SCHEMA_YELLOW_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
@@ -51,14 +49,6 @@ SCHEMA_YELLOW_OPTIONS = vol.Schema(
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
}
)
SCHEMA_GREEN_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
class APIOS(CoreSysAttributes):
@@ -115,31 +105,6 @@ class APIOS(CoreSysAttributes):
],
}
@api_process
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
"""Get green board settings."""
return {
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
}
@api_process
async def boards_green_options(self, request: web.Request) -> None:
"""Update green board settings."""
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
if ATTR_ACTIVITY_LED in body:
self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED]
if ATTR_POWER_LED in body:
self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED]
if ATTR_SYSTEM_HEALTH_LED in body:
self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED]
self.sys_dbus.agent.board.green.save_data()
@api_process
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
"""Get yellow board settings."""
@@ -163,7 +128,6 @@ class APIOS(CoreSysAttributes):
if ATTR_POWER_LED in body:
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
self.sys_dbus.agent.board.yellow.save_data()
self.sys_resolution.create_issue(
IssueType.REBOOT_REQUIRED,
ContextType.SYSTEM,

View File

@@ -1 +1 @@
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-NoHhvMA3Ku8.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-G81gb268sps.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-NoHhvMA3Ku8.js")}}()

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230601.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230601.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
!function(){"use strict";var n,t,e={14595:function(n,t,e){e(58556);var r,i,o=e(93217),u=e(422),a=e(62173),s=function(n,t,e){if("input"===n){if("type"===t&&"checkbox"===e||"checked"===t||"disabled"===t)return;return""}},c={renderMarkdown:function(n,t){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign(Object.assign({},(0,a.getDefaultWhiteList)()),{},{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign(Object.assign({},r),{},{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(n,t),{whiteList:e,onTagAttr:s})}};(0,o.Jj)(c)}},r={};function i(n){var t=r[n];if(void 0!==t)return t.exports;var o=r[n]={exports:{}};return e[n](o,o.exports,i),o.exports}i.m=e,i.x=function(){var n=i.O(void 0,[9191,215],(function(){return i(14595)}));return n=i.O(n)},n=[],i.O=function(t,e,r,o){if(!e){var u=1/0;for(f=0;f<n.length;f++){e=n[f][0],r=n[f][1],o=n[f][2];for(var a=!0,s=0;s<e.length;s++)(!1&o||u>=o)&&Object.keys(i.O).every((function(n){return i.O[n](e[s])}))?e.splice(s--,1):(a=!1,o<u&&(u=o));if(a){n.splice(f--,1);var c=r();void 0!==c&&(t=c)}}return t}o=o||0;for(var f=n.length;f>0&&n[f-1][2]>o;f--)n[f]=n[f-1];n[f]=[e,r,o]},i.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return i.d(t,{a:t}),t},i.d=function(n,t){for(var e in t)i.o(t,e)&&!i.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},i.f={},i.e=function(n){return Promise.all(Object.keys(i.f).reduce((function(t,e){return i.f[e](n,t),t}),[]))},i.u=function(n){return n+"-"+{215:"FPZmDYZTPdk",9191:"37260H-osZ4"}[n]+".js"},i.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},i.p="/api/hassio/app/frontend_es5/",function(){var n={1402:1};i.f.i=function(t,e){n[t]||importScripts(i.p+i.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var r=t[0],o=t[1],u=t[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)n[r.pop()]=1;e(t)}}(),t=i.x,i.x=function(){return Promise.all([i.e(9191),i.e(215)]).then(t)};i.x()}();
//# sourceMappingURL=1402-6WKUruvoXtM.js.map

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More