Compare commits

..

1 Commits

Author SHA1 Message Date
Mike Degatano
dafe271050 Use checkboxes in PR template to set labels 2026-03-30 23:46:49 +00:00
167 changed files with 5034 additions and 7324 deletions

View File

@@ -1,8 +1,6 @@
{
"name": "Supervisor dev",
"image": "ghcr.io/home-assistant/devcontainer:6-supervisor",
"overrideCommand": false,
"remoteUser": "vscode",
"image": "ghcr.io/home-assistant/devcontainer:3-supervisor",
"containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
@@ -19,10 +17,10 @@
"charliermarsh.ruff",
"ms-python.pylint",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml",
"esbenp.prettier-vscode",
"GitHub.vscode-pull-request-github",
"GitHub.copilot"
"GitHub.vscode-pull-request-github"
],
"settings": {
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
@@ -48,8 +46,6 @@
},
"mounts": [
"type=volume,target=/var/lib/docker",
"type=volume,target=/var/lib/containerd",
"type=volume,target=/mnt/supervisor",
"type=tmpfs,target=/tmp"
"type=volume,target=/mnt/supervisor"
]
}

View File

@@ -98,8 +98,6 @@ availability.
- Dataclasses and enum classes
- Async/await patterns
- Pattern matching where appropriate
- Parenthesis-free `except` clauses with comma-separated exceptions
(e.g., `except KeyError, TypeError:`) — available since Python 3.14
### Code Quality Standards

View File

@@ -25,7 +25,6 @@ on:
push:
branches: ["main"]
paths:
- ".github/workflows/builder.yml"
- "rootfs/**"
- "supervisor/**"
- Dockerfile
@@ -35,9 +34,10 @@ on:
env:
DEFAULT_PYTHON: "3.14.3"
COSIGN_VERSION: "v2.5.3"
CRANE_VERSION: "v0.20.7"
CRANE_SHA256: "8ef3564d264e6b5ca93f7b7f5652704c4dd29d33935aff6947dd5adefd05953e"
BUILD_NAME: supervisor
BUILD_TYPE: supervisor
IMAGE_NAME: hassio-supervisor
ARCHITECTURES: '["amd64", "aarch64"]'
concurrency:
@@ -49,11 +49,11 @@ jobs:
name: Initialize build
runs-on: ubuntu-latest
outputs:
architectures: ${{ env.ARCHITECTURES }}
version: ${{ steps.version.outputs.version }}
channel: ${{ steps.version.outputs.channel }}
publish: ${{ steps.version.outputs.publish }}
build_wheels: ${{ steps.requirements.outputs.build_wheels }}
matrix: ${{ steps.matrix.outputs.matrix }}
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -86,23 +86,21 @@ jobs:
echo "build_wheels=false" >> "$GITHUB_OUTPUT"
fi
- name: Get build matrix
id: matrix
uses: home-assistant/builder/actions/prepare-multi-arch-matrix@62a1597b84b3461abad9816d9cd92862a2b542c3 # 2026.03.2
with:
architectures: ${{ env.ARCHITECTURES }}
image-name: ${{ env.IMAGE_NAME }}
build:
name: Build ${{ matrix.arch }} supervisor
needs: init
runs-on: ${{ matrix.os }}
runs-on: ${{ matrix.runs-on }}
permissions:
contents: read
id-token: write
packages: write
strategy:
matrix: ${{ fromJSON(needs.init.outputs.matrix) }}
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
include:
- runs-on: ubuntu-24.04
- runs-on: ubuntu-24.04-arm
arch: aarch64
env:
WHEELS_ABI: cp314
WHEELS_TAG: musllinux_1_2
@@ -153,12 +151,18 @@ jobs:
- name: Upload local wheels artifact
if: needs.init.outputs.build_wheels == 'true' && needs.init.outputs.publish == 'false'
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: wheels-${{ matrix.arch }}
path: wheels
retention-days: 1
- name: Set version
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version@master
with:
type: ${{ env.BUILD_TYPE }}
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -190,42 +194,25 @@ jobs:
container-registry-password: ${{ secrets.GITHUB_TOKEN }}
cosign-base-identity: 'https://github.com/home-assistant/docker-base/.*'
cosign-base-verify: ghcr.io/home-assistant/base-python:3.14-alpine3.22
image: ${{ matrix.image }}
image: ghcr.io/home-assistant/${{ matrix.arch }}-hassio-supervisor
image-tags: |
${{ needs.init.outputs.version }}
latest
push: ${{ needs.init.outputs.publish == 'true' }}
version: ${{ needs.init.outputs.version }}
manifest:
name: Publish multi-arch manifest
needs: ["init", "build"]
if: needs.init.outputs.publish == 'true'
runs-on: ubuntu-latest
permissions:
id-token: write
packages: write
steps:
- name: Publish multi-arch manifest
uses: home-assistant/builder/actions/publish-multi-arch-manifest@62a1597b84b3461abad9816d9cd92862a2b542c3 # 2026.03.2
with:
architectures: ${{ env.ARCHITECTURES }}
container-registry-password: ${{ secrets.GITHUB_TOKEN }}
image-name: ${{ env.IMAGE_NAME }}
image-tags: |
${{ needs.init.outputs.version }}
latest
version:
name: Update version
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
needs: ["init", "run_supervisor"]
if: github.repository_owner == 'home-assistant'
needs: ["init", "run_supervisor", "retag_deprecated"]
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Initialize git
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/git-init@master
with:
name: ${{ secrets.GIT_NAME }}
@@ -233,6 +220,7 @@ jobs:
token: ${{ secrets.GIT_TOKEN }}
- name: Update version file
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version-push@master
with:
key: ${{ env.BUILD_NAME }}
@@ -265,7 +253,7 @@ jobs:
image: ghcr.io/home-assistant/amd64-hassio-supervisor
image-tags: runner
load: true
version: ${{ needs.init.outputs.version }}
version: runner
# Pull the Supervisor for publish runs to test the published image
- name: Pull Supervisor
@@ -281,10 +269,9 @@ jobs:
--privileged \
--security-opt seccomp=unconfined \
--security-opt apparmor=unconfined \
-v /run/docker.sock:/run/docker.sock:rw \
-v /run/dbus:/run/dbus:ro \
-v /run/supervisor:/run/os:rw \
-v /tmp/supervisor/data:/data:rw,slave \
-v /run/docker.sock:/run/docker.sock \
-v /run/dbus:/run/dbus \
-v /tmp/supervisor/data:/data \
-v /etc/machine-id:/etc/machine-id:ro \
-e SUPERVISOR_SHARE="/tmp/supervisor/data" \
-e SUPERVISOR_NAME=hassio_supervisor \
@@ -452,3 +439,50 @@ jobs:
- name: Get supervisor logs on failiure
if: ${{ cancelled() || failure() }}
run: docker logs hassio_supervisor
retag_deprecated:
needs: ["build", "init"]
name: Re-tag deprecated ${{ matrix.arch }} images
if: needs.init.outputs.publish == 'true'
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
packages: write
strategy:
matrix:
arch: ["armhf", "armv7", "i386"]
env:
# Last available release for deprecated architectures
FROZEN_VERSION: "2025.11.5"
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Install Cosign
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v4.1.1
with:
cosign-release: ${{ env.COSIGN_VERSION }}
- name: Install crane
run: |
curl -sLO https://github.com/google/go-containerregistry/releases/download/${{ env.CRANE_VERSION }}/go-containerregistry_Linux_x86_64.tar.gz
echo "${{ env.CRANE_SHA256 }} go-containerregistry_Linux_x86_64.tar.gz" | sha256sum -c -
tar xzf go-containerregistry_Linux_x86_64.tar.gz crane
sudo mv crane /usr/local/bin/
- name: Re-tag deprecated image with updated version label
run: |
crane auth login ghcr.io -u ${{ github.repository_owner }} -p ${{ secrets.GITHUB_TOKEN }}
crane mutate \
--label io.hass.version=${{ needs.init.outputs.version }} \
--tag ghcr.io/home-assistant/${{ matrix.arch }}-hassio-supervisor:${{ needs.init.outputs.version }} \
ghcr.io/home-assistant/${{ matrix.arch }}-hassio-supervisor:${{ env.FROZEN_VERSION }}
- name: Sign image with Cosign
run: |
cosign sign --yes ghcr.io/home-assistant/${{ matrix.arch }}-hassio-supervisor:${{ needs.init.outputs.version }}

View File

@@ -1,19 +1,111 @@
name: Check PR
# yamllint disable-line rule:truthy
on:
pull_request:
branches: ["main"]
types: [labeled, unlabeled, synchronize]
types: [opened, edited, labeled, unlabeled, synchronize]
permissions:
contents: read
pull-requests: write
jobs:
sync-type-labels:
name: Sync type labels from PR body
runs-on: ubuntu-latest
outputs:
labels: ${{ steps.sync.outputs.labels }}
steps:
- id: sync
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const pr = context.payload.pull_request;
const body = pr.body || "";
function isTypeChecked(text, keySubstring) {
for (const line of text.split("\n")) {
if (!line.includes(keySubstring)) continue;
const m = line.match(/^\s*-\s*\[\s*([ xX])\s*\]\s*/);
if (m) return m[1].toLowerCase() === "x";
}
return false;
}
const typeMappings = [
{ key: "Dependency upgrade", label: "dependencies" },
{
key: "Bugfix (non-breaking change which fixes an issue)",
label: "bugfix",
},
{
key: "New feature (which adds functionality to the supervisor)",
label: "new-feature",
},
{
key: "Breaking change (fix/feature causing existing functionality to break)",
label: "breaking-change",
},
{
key: "Code quality improvements to existing code or addition of tests",
label: "ci",
},
];
const originalLabels = new Set(pr.labels.map((l) => l.name));
const desiredLabels = new Set(originalLabels);
for (const { key, label } of typeMappings) {
if (isTypeChecked(body, key)) {
desiredLabels.add(label);
} else {
desiredLabels.delete(label);
}
}
const owner = context.repo.owner;
const repo = context.repo.repo;
const prNumber = pr.number;
for (const { label } of typeMappings) {
const wanted = desiredLabels.has(label);
const had = originalLabels.has(label);
if (wanted === had) continue;
try {
if (wanted) {
await github.rest.issues.addLabels({
owner,
repo,
issue_number: prNumber,
labels: [label],
});
} else {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: prNumber,
name: label,
});
}
} catch (e) {
core.warning(`Label API (${label}): ${e.message}`);
}
}
const labelsJson = JSON.stringify([...desiredLabels].sort());
core.setOutput("labels", labelsJson);
init:
name: Check labels
needs: sync-type-labels
runs-on: ubuntu-latest
steps:
- name: Check labels
env:
LABELS_JSON: ${{ needs.sync-type-labels.outputs.labels }}
run: |
labels=$(jq -r '.pull_request.labels[] | .name' ${{github.event_path }})
echo "$labels"
if [ "$labels" == "cla-signed" ]; then
echo "$LABELS_JSON" | jq -r '.[]'
if [ "$(echo "$LABELS_JSON" | jq -c .)" = '["cla-signed"]' ]; then
exit 1
fi

View File

@@ -34,7 +34,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: |
@@ -48,7 +48,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -76,7 +76,7 @@ jobs:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: |
@@ -88,7 +88,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -119,7 +119,7 @@ jobs:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: |
@@ -131,7 +131,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -177,7 +177,7 @@ jobs:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: |
@@ -189,7 +189,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -221,7 +221,7 @@ jobs:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: |
@@ -233,7 +233,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -265,7 +265,7 @@ jobs:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: |
@@ -307,7 +307,7 @@ jobs:
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: >-
@@ -318,7 +318,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore mypy cache
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: .mypy_cache
key: >-
@@ -351,7 +351,7 @@ jobs:
cosign-release: "v2.5.3"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: |
@@ -386,7 +386,7 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: coverage
path: .coverage
@@ -406,7 +406,7 @@ jobs:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: venv
key: |

View File

@@ -36,7 +36,7 @@ jobs:
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
- name: Run Release Drafter
uses: release-drafter/release-drafter@5de93583980a40bd78603b6dfdcda5b4df377b32 # v7.2.0
uses: release-drafter/release-drafter@139054aeaa9adc52ab36ddf67437541f039b88e2 # v7.1.1
with:
tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }}

View File

@@ -12,7 +12,7 @@ jobs:
if: github.event.issue.type.name == 'Task'
steps:
- name: Check if user is authorized
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
const issueAuthor = context.payload.issue.user.login;

View File

@@ -12,7 +12,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Sentry Release
uses: getsentry/action-release@5657c9e888b4e2cc85f4d29143ea4131fde4a73a # v3.6.0
uses: getsentry/action-release@dab6548b3c03c4717878099e43782cf5be654289 # v3.5.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -40,11 +40,9 @@ RUN \
${LOCAL_WHEELS:+--find-links $LOCAL_WHEELS}
# Install Home Assistant Supervisor
ARG BUILD_VERSION="9999.09.9.dev9999"
COPY . supervisor
RUN \
sed -i "s/^SUPERVISOR_VERSION =.*/SUPERVISOR_VERSION = \"${BUILD_VERSION}\"/g" /usr/src/supervisor/supervisor/const.py \
&& uv pip install --no-cache -e ./supervisor \
uv pip install --no-cache -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor

View File

@@ -1,5 +1,5 @@
[build-system]
requires = ["setuptools~=82.0.0"]
requires = ["setuptools~=82.0.0", "wheel~=0.46.1"]
build-backend = "setuptools.build_meta"
[project]
@@ -208,9 +208,6 @@ score = false
[tool.pylint.TYPECHECK]
ignored-modules = ["distutils"]
# re.Pattern methods are C extension methods; pylint cannot detect them when
# re.Pattern is used as a dataclass field type annotation (false positive).
generated-members = ["re.Pattern.*"]
[tool.pylint.FORMAT]
expected-line-ending-format = "LF"

View File

@@ -1,6 +1,6 @@
aiodns==4.0.0
aiodocker==0.26.0
aiohttp==3.13.5
aiohttp==3.13.4
atomicwrites-homeassistant==1.4.1
attrs==26.1.0
awesomeversion==25.8.0
@@ -9,21 +9,21 @@ brotli==1.2.0
ciso8601==2.3.3
colorlog==6.10.1
cpe==1.3.1
cryptography==46.0.7
cryptography==46.0.6
debugpy==1.8.20
deepmerge==2.0
dirhash==0.5.0
faust-cchardet==2.1.19
gitpython==3.1.47
gitpython==3.1.46
jinja2==3.1.6
log-rate-limit==1.4.2
orjson==3.11.8
orjson==3.11.7
pulsectl==24.12.0
pyudev==0.24.4
PyYAML==6.0.3
securetar==2026.4.1
sentry-sdk==2.58.0
securetar==2026.2.0
sentry-sdk==2.56.0
setuptools==82.0.1
voluptuous==0.16.0
dbus-fast==4.0.4
dbus-fast==4.0.0
zlib-fast==0.2.1

View File

@@ -1,14 +1,14 @@
astroid==4.0.3
coverage==7.13.5
mypy==1.20.2
pre-commit==4.6.0
mypy==1.19.1
pre-commit==4.5.1
pylint==4.0.5
pytest-aiohttp==1.1.0
pytest-asyncio==1.3.0
pytest-cov==7.1.0
pytest-timeout==2.4.0
pytest==9.0.3
ruff==0.15.11
pytest==9.0.2
ruff==0.15.8
time-machine==3.2.0
types-pyyaml==6.0.12.20260408
types-pyyaml==6.0.12.20250915
urllib3==2.6.3

View File

@@ -5,9 +5,7 @@ import re
from setuptools import setup
RE_SUPERVISOR_VERSION = re.compile(
r'^SUPERVISOR_VERSION =\s*"?((?P<git_sha>[0-9a-f]{40})|[^"]+)"?$'
)
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$")
SUPERVISOR_DIR = Path(__file__).parent
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
@@ -18,15 +16,13 @@ CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
def _get_supervisor_version():
for line in CONSTANTS.split("\n"):
for line in CONSTANTS.split("/n"):
if match := RE_SUPERVISOR_VERSION.match(line):
if git_sha := match.group("git_sha"):
return f"9999.09.9.dev9999+{git_sha}"
return match.group(1)
return "9999.09.9.dev9999"
setup(
version=_get_supervisor_version(),
dependencies=REQUIREMENTS.split("\n"),
dependencies=REQUIREMENTS.split("/n"),
)

View File

@@ -1 +1 @@
"""Init file for Supervisor apps."""
"""Init file for Supervisor add-ons."""

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
"""Supervisor app build environment."""
"""Supervisor add-on build environment."""
from __future__ import annotations
@@ -7,10 +7,9 @@ from functools import cached_property
import json
import logging
from pathlib import Path, PurePath
from typing import TYPE_CHECKING, Any, Self
from typing import TYPE_CHECKING, Any
from awesomeversion import AwesomeVersion
import voluptuous as vol
from ..const import (
ATTR_ARGS,
@@ -20,13 +19,7 @@ from ..const import (
ATTR_SQUASH,
ATTR_USERNAME,
FILE_SUFFIX_CONFIGURATION,
LABEL_ARCH,
LABEL_DESCRIPTION,
LABEL_NAME,
LABEL_TYPE,
LABEL_URL,
LABEL_VERSION,
META_APP,
META_ADDON,
SOCKET_DOCKER,
CpuArch,
)
@@ -34,128 +27,98 @@ from ..coresys import CoreSys, CoreSysAttributes
from ..docker.const import DOCKER_HUB, DOCKER_HUB_LEGACY, DockerMount, MountType
from ..docker.interface import MAP_ARCH
from ..exceptions import (
AppBuildArchitectureNotSupportedError,
AppBuildDockerfileMissingError,
AddonBuildArchitectureNotSupportedError,
AddonBuildDockerfileMissingError,
ConfigurationFileError,
HassioArchNotFound,
)
from ..utils.common import find_one_filetype, read_json_or_yaml_file
from ..utils.common import FileConfiguration, find_one_filetype
from .validate import SCHEMA_BUILD_CONFIG
if TYPE_CHECKING:
from .manager import AnyApp
from .manager import AnyAddon
_LOGGER: logging.Logger = logging.getLogger(__name__)
class AppBuild(CoreSysAttributes):
"""Handle build options for apps."""
class AddonBuild(FileConfiguration, CoreSysAttributes):
"""Handle build options for add-ons."""
def __init__(self, coresys: CoreSys, app: AnyApp, data: dict[str, Any]) -> None:
"""Initialize Supervisor app builder."""
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
"""Initialize Supervisor add-on builder."""
self.coresys: CoreSys = coresys
self.app = app
self._build_config: dict[str, Any] = data
self.addon = addon
@classmethod
async def create(cls, coresys: CoreSys, app: AnyApp) -> Self:
"""Create an AppBuild by reading the build configuration from disk."""
data = await coresys.run_in_executor(cls._read_build_config, app)
# Search for build file later in executor
super().__init__(None, SCHEMA_BUILD_CONFIG)
if data:
_LOGGER.warning(
"App %s uses build.yaml which is deprecated. "
"Move build parameters into the Dockerfile directly.",
app.slug,
)
if data[ATTR_SQUASH]:
_LOGGER.warning(
"Ignoring squash build option for %s as Docker BuildKit"
" does not support it.",
app.slug,
)
return cls(coresys, app, data or {})
@staticmethod
def _read_build_config(app: AnyApp) -> dict[str, Any] | None:
"""Find and read the build configuration file.
def _get_build_file(self) -> Path:
"""Get build file.
Must be run in executor.
"""
try:
build_file = find_one_filetype(
app.path_location, "build", FILE_SUFFIX_CONFIGURATION
return find_one_filetype(
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
)
except ConfigurationFileError:
# No build config file found, assuming modernized build
return None
return self.addon.path_location / "build.json"
try:
raw = read_json_or_yaml_file(build_file)
build_config = SCHEMA_BUILD_CONFIG(raw)
except ConfigurationFileError as ex:
_LOGGER.exception(
"Error reading %s build config (%s), using defaults",
app.slug,
ex,
)
build_config = SCHEMA_BUILD_CONFIG({})
except vol.Invalid as ex:
_LOGGER.warning(
"Error parsing %s build config (%s), using defaults", app.slug, ex
)
build_config = SCHEMA_BUILD_CONFIG({})
async def read_data(self) -> None:
"""Load data from file."""
if not self._file:
self._file = await self.sys_run_in_executor(self._get_build_file)
# Default base image is passed in BUILD_FROM only when build.yaml is used
# (this is legacy behavior - without build config, Dockerfile should specify it)
if not build_config[ATTR_BUILD_FROM]:
build_config[ATTR_BUILD_FROM] = "ghcr.io/home-assistant/base:latest"
await super().read_data()
return build_config
async def save_data(self):
"""Ignore save function."""
raise RuntimeError()
@cached_property
def arch(self) -> CpuArch:
"""Return arch of the app."""
return self.sys_arch.match([self.app.arch])
"""Return arch of the add-on."""
return self.sys_arch.match([self.addon.arch])
@property
def base_image(self) -> str | None:
"""Return base image for this app, or None to use Dockerfile default."""
# No build config (otherwise default is coerced when reading the config)
if not self._build_config.get(ATTR_BUILD_FROM):
return None
def base_image(self) -> str:
"""Return base image for this add-on."""
if not self._data[ATTR_BUILD_FROM]:
return f"ghcr.io/home-assistant/{self.arch!s}-base:latest"
# Single base image in build config
if isinstance(self._build_config[ATTR_BUILD_FROM], str):
return self._build_config[ATTR_BUILD_FROM]
if isinstance(self._data[ATTR_BUILD_FROM], str):
return self._data[ATTR_BUILD_FROM]
# Dict - per-arch base images in build config
if self.arch not in self._build_config[ATTR_BUILD_FROM]:
# Evaluate correct base image
if self.arch not in self._data[ATTR_BUILD_FROM]:
raise HassioArchNotFound(
f"App {self.app.slug} is not supported on {self.arch}"
f"Add-on {self.addon.slug} is not supported on {self.arch}"
)
return self._build_config[ATTR_BUILD_FROM][self.arch]
return self._data[ATTR_BUILD_FROM][self.arch]
@property
def squash(self) -> bool:
"""Return True or False if squash is active."""
return self._data[ATTR_SQUASH]
@property
def additional_args(self) -> dict[str, str]:
"""Return additional Docker build arguments."""
return self._build_config.get(ATTR_ARGS, {})
return self._data[ATTR_ARGS]
@property
def additional_labels(self) -> dict[str, str]:
"""Return additional Docker labels."""
return self._build_config.get(ATTR_LABELS, {})
return self._data[ATTR_LABELS]
def get_dockerfile(self) -> Path:
"""Return Dockerfile path.
Must be run in executor.
"""
if self.app.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
return self.app.path_location.joinpath(f"Dockerfile.{self.arch}")
return self.app.path_location.joinpath("Dockerfile")
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
return self.addon.path_location.joinpath("Dockerfile")
async def is_valid(self) -> None:
"""Return true if the build env is valid."""
@@ -163,55 +126,67 @@ class AppBuild(CoreSysAttributes):
def build_is_valid() -> bool:
return all(
[
self.app.path_location.is_dir(),
self.addon.path_location.is_dir(),
self.get_dockerfile().is_file(),
]
)
try:
if not await self.sys_run_in_executor(build_is_valid):
raise AppBuildDockerfileMissingError(_LOGGER.error, app=self.app.slug)
raise AddonBuildDockerfileMissingError(
_LOGGER.error, addon=self.addon.slug
)
except HassioArchNotFound:
raise AppBuildArchitectureNotSupportedError(
raise AddonBuildArchitectureNotSupportedError(
_LOGGER.error,
app=self.app.slug,
app_arch_list=self.app.supported_arch,
addon=self.addon.slug,
addon_arch_list=self.addon.supported_arch,
system_arch_list=[arch.value for arch in self.sys_arch.supported],
) from None
def _registry_key(self, registry: str) -> str:
"""Return the Docker config.json key for a registry."""
if registry in (DOCKER_HUB, DOCKER_HUB_LEGACY):
return "https://index.docker.io/v1/"
return registry
def _registry_auth(self, registry: str) -> str:
"""Return base64-encoded auth string for a registry."""
stored = self.sys_docker.config.registries[registry]
return base64.b64encode(
f"{stored[ATTR_USERNAME]}:{stored[ATTR_PASSWORD]}".encode()
).decode()
def get_docker_config_json(self) -> str | None:
"""Generate Docker config.json content with all configured registry credentials.
"""Generate Docker config.json content with registry credentials for base image.
Returns a JSON string with registry credentials for the base image's registry,
or None if no matching registry is configured.
Raises:
HassioArchNotFound: If the add-on is not supported on the current architecture.
Returns a JSON string with registry credentials, or None if no registries
are configured.
"""
# Early return before accessing base_image to avoid unnecessary arch lookup
if not self.sys_docker.config.registries:
return None
auths = {
self._registry_key(registry): {"auth": self._registry_auth(registry)}
for registry in self.sys_docker.config.registries
}
return json.dumps({"auths": auths})
registry = self.sys_docker.config.get_registry_for_image(self.base_image)
if not registry:
return None
stored = self.sys_docker.config.registries[registry]
username = stored[ATTR_USERNAME]
password = stored[ATTR_PASSWORD]
# Docker config.json uses base64-encoded "username:password" for auth
auth_string = base64.b64encode(f"{username}:{password}".encode()).decode()
# Use the actual registry URL for the key
# Docker Hub uses "https://index.docker.io/v1/" as the key
# Support both docker.io (official) and hub.docker.com (legacy)
registry_key = (
"https://index.docker.io/v1/"
if registry in (DOCKER_HUB, DOCKER_HUB_LEGACY)
else registry
)
config = {"auths": {registry_key: {"auth": auth_string}}}
return json.dumps(config)
def get_docker_args(
self, version: AwesomeVersion, image_tag: str, docker_config_path: Path | None
) -> dict[str, Any]:
"""Create a dict with Docker run args."""
dockerfile_path = self.get_dockerfile().relative_to(self.app.path_location)
dockerfile_path = self.get_dockerfile().relative_to(self.addon.path_location)
build_cmd = [
"docker",
@@ -228,40 +203,34 @@ class AppBuild(CoreSysAttributes):
]
labels = {
LABEL_VERSION: version,
LABEL_ARCH: self.arch,
LABEL_TYPE: META_APP,
"io.hass.version": version,
"io.hass.arch": self.arch,
"io.hass.type": META_ADDON,
"io.hass.name": self._fix_label("name"),
"io.hass.description": self._fix_label("description"),
**self.additional_labels,
}
# Set name only if non-empty, could have been set in Dockerfile
if name := self._fix_label("name"):
labels[LABEL_NAME] = name
# Set description only if non-empty, could have been set in Dockerfile
if description := self._fix_label("description"):
labels[LABEL_DESCRIPTION] = description
if self.app.url:
labels[LABEL_URL] = self.app.url
if self.addon.url:
labels["io.hass.url"] = self.addon.url
for key, value in labels.items():
build_cmd.extend(["--label", f"{key}={value}"])
build_args = {
"BUILD_FROM": self.base_image,
"BUILD_VERSION": version,
"BUILD_ARCH": self.arch,
**self.additional_args,
}
if self.base_image is not None:
build_args["BUILD_FROM"] = self.base_image
for key, value in build_args.items():
build_cmd.extend(["--build-arg", f"{key}={value}"])
# The app path will be mounted from the host system
app_extern_path = self.sys_config.local_to_extern_path(self.app.path_location)
# The addon path will be mounted from the host system
addon_extern_path = self.sys_config.local_to_extern_path(
self.addon.path_location
)
mounts = [
DockerMount(
@@ -272,7 +241,7 @@ class AppBuild(CoreSysAttributes):
),
DockerMount(
type=MountType.BIND,
source=app_extern_path.as_posix(),
source=addon_extern_path.as_posix(),
target="/addon",
read_only=True,
),
@@ -300,5 +269,5 @@ class AppBuild(CoreSysAttributes):
def _fix_label(self, label_name: str) -> str:
"""Remove characters they are not supported."""
label = getattr(self.app, label_name, "")
label = getattr(self.addon, label_name, "")
return label.replace("'", "")

View File

@@ -1,4 +1,4 @@
"""Confgiuration Objects for App Config."""
"""Confgiuration Objects for Addon Config."""
from dataclasses import dataclass

View File

@@ -1,4 +1,4 @@
"""App static data."""
"""Add-on static data."""
from datetime import timedelta
from enum import StrEnum
@@ -6,15 +6,15 @@ from enum import StrEnum
from ..jobs.const import JobCondition
class AppBackupMode(StrEnum):
"""Backup mode of an App."""
class AddonBackupMode(StrEnum):
"""Backup mode of an Add-on."""
HOT = "hot"
COLD = "cold"
class MappingType(StrEnum):
"""Mapping type of an App Folder."""
"""Mapping type of an Add-on Folder."""
DATA = "data"
CONFIG = "config"
@@ -38,7 +38,7 @@ WATCHDOG_MAX_ATTEMPTS = 5
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
WATCHDOG_THROTTLE_MAX_CALLS = 10
APP_UPDATE_CONDITIONS = [
ADDON_UPDATE_CONDITIONS = [
JobCondition.FREE_SPACE,
JobCondition.HEALTHY,
JobCondition.INTERNET_HOST,

View File

@@ -1,4 +1,4 @@
"""Init file for Supervisor app data."""
"""Init file for Supervisor add-on data."""
from copy import deepcopy
from typing import Any
@@ -12,16 +12,16 @@ from ..const import (
FILE_HASSIO_ADDONS,
)
from ..coresys import CoreSys, CoreSysAttributes
from ..store.addon import AppStore
from ..store.addon import AddonStore
from ..utils.common import FileConfiguration
from .addon import App
from .addon import Addon
from .validate import SCHEMA_ADDONS_FILE
Config = dict[str, Any]
class AppsData(FileConfiguration, CoreSysAttributes):
"""Hold data for installed Apps inside Supervisor."""
class AddonsData(FileConfiguration, CoreSysAttributes):
"""Hold data for installed Add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize data holder."""
@@ -30,40 +30,42 @@ class AppsData(FileConfiguration, CoreSysAttributes):
@property
def user(self):
"""Return local app user data."""
"""Return local add-on user data."""
return self._data[ATTR_USER]
@property
def system(self):
"""Return local app data."""
"""Return local add-on data."""
return self._data[ATTR_SYSTEM]
async def install(self, app: AppStore) -> None:
"""Set app as installed."""
self.system[app.slug] = deepcopy(app.data)
self.user[app.slug] = {
async def install(self, addon: AddonStore) -> None:
"""Set addon as installed."""
self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug] = {
ATTR_OPTIONS: {},
ATTR_VERSION: app.version,
ATTR_IMAGE: app.image,
ATTR_VERSION: addon.version,
ATTR_IMAGE: addon.image,
}
await self.save_data()
async def uninstall(self, app: App) -> None:
"""Set app as uninstalled."""
self.system.pop(app.slug, None)
self.user.pop(app.slug, None)
async def uninstall(self, addon: Addon) -> None:
"""Set add-on as uninstalled."""
self.system.pop(addon.slug, None)
self.user.pop(addon.slug, None)
await self.save_data()
async def update(self, app: AppStore) -> None:
"""Update version of app."""
self.system[app.slug] = deepcopy(app.data)
self.user[app.slug].update({ATTR_VERSION: app.version, ATTR_IMAGE: app.image})
async def update(self, addon: AddonStore) -> None:
"""Update version of add-on."""
self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug].update(
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
)
await self.save_data()
async def restore(
self, slug: str, user: Config, system: Config, image: str
) -> None:
"""Restore data to app."""
"""Restore data to add-on."""
self.user[slug] = deepcopy(user)
self.system[slug] = deepcopy(system)

View File

@@ -1,4 +1,4 @@
"""Supervisor app manager."""
"""Supervisor add-on manager."""
import asyncio
from collections.abc import Awaitable
@@ -9,12 +9,12 @@ from typing import Self, Union
from attr import evolve
from securetar import SecureTarFile
from ..const import AppBoot, AppStartup, AppState
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AppNotSupportedError,
AppsError,
AppsJobError,
AddonNotSupportedError,
AddonsError,
AddonsJobError,
CoreDNSError,
DockerError,
HassioError,
@@ -23,60 +23,60 @@ from ..jobs import ChildJobSyncFilter
from ..jobs.const import JobConcurrency
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason
from ..store.addon import AppStore
from ..store.addon import AddonStore
from ..utils.sentry import async_capture_exception
from .addon import App
from .const import APP_UPDATE_CONDITIONS
from .data import AppsData
from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyApp = Union[App, AppStore]
AnyAddon = Union[Addon, AddonStore]
class AppManager(CoreSysAttributes):
"""Manage apps inside Supervisor."""
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AppsData = AppsData(coresys)
self.local: dict[str, App] = {}
self.store: dict[str, AppStore] = {}
self.data: AddonsData = AddonsData(coresys)
self.local: dict[str, Addon] = {}
self.store: dict[str, AddonStore] = {}
@property
def all(self) -> list[AnyApp]:
"""Return a list of all apps."""
apps: dict[str, AnyApp] = {**self.store, **self.local}
return list(apps.values())
def all(self) -> list[AnyAddon]:
"""Return a list of all add-ons."""
addons: dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> list[App]:
"""Return a list of all installed apps."""
def installed(self) -> list[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, app_slug: str, local_only: bool = False) -> AnyApp | None:
"""Return an app from slug.
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if app_slug in self.local:
return self.local[app_slug]
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(app_slug)
return self.store.get(addon_slug)
return None
def get_local_only(self, app_slug: str) -> App | None:
"""Return an installed app from slug."""
return self.local.get(app_slug)
def get_local_only(self, addon_slug: str) -> Addon | None:
"""Return an installed add-on from slug."""
return self.local.get(addon_slug)
def from_token(self, token: str) -> App | None:
"""Return an app from Supervisor token."""
for app in self.installed:
if token == app.supervisor_token:
return app
def from_token(self, token: str) -> Addon | None:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load_config(self) -> Self:
@@ -85,61 +85,61 @@ class AppManager(CoreSysAttributes):
return self
async def load(self) -> None:
"""Start up app management."""
# Refresh cache for all store apps
"""Start up add-on management."""
# Refresh cache for all store addons
tasks: list[Awaitable[None]] = [
store.refresh_path_cache() for store in self.store.values()
]
# Load all installed apps
# Load all installed addons
for slug in self.data.system:
app = self.local[slug] = App(self.coresys, slug)
tasks.append(app.load())
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(addon.load())
# Run initial tasks
_LOGGER.info("Found %d installed apps", len(self.data.system))
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
if tasks:
await asyncio.gather(*tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AppStartup) -> None:
"""Boot apps with mode auto."""
tasks: list[App] = []
for app in self.installed:
if app.boot != AppBoot.AUTO or app.startup != stage:
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
if (
app.host_network
addon.host_network
and UnhealthyReason.DOCKER_GATEWAY_UNPROTECTED
in self.sys_resolution.unhealthy
):
_LOGGER.warning(
"Skipping boot of app %s because gateway firewall"
"Skipping boot of add-on %s because gateway firewall"
" rules are not active",
app.slug,
addon.slug,
)
continue
tasks.append(app)
tasks.append(addon)
# Evaluate apps which need to be started
_LOGGER.info("Phase '%s' starting %d apps", stage, len(tasks))
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Apps sequential
# Start Add-ons sequential
# avoid issue on slow IO
# Config.wait_boot is deprecated. Until apps update with healthchecks,
# Config.wait_boot is deprecated. Until addons update with healthchecks,
# add a sleep task for it to keep the same minimum amount of wait time
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
for app in tasks:
for addon in tasks:
try:
if start_task := await app.start():
if start_task := await addon.start():
wait_boot.append(start_task)
except HassioError:
self.sys_resolution.add_issue(
evolve(app.boot_failed_issue),
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
@@ -148,50 +148,50 @@ class AppManager(CoreSysAttributes):
else:
continue
_LOGGER.warning("Can't start app %s", app.slug)
_LOGGER.warning("Can't start Add-on %s", addon.slug)
# Ignore exceptions from waiting for app startup, app errors handled elsewhere
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True)
# After waiting for startup, create an issue for boot apps that are error or unknown state
# Ignore stopped as single shot apps can be run at boot and this is successful exit
# Timeout waiting for startup is not a failure, app is probably just slow
for app in tasks:
if app.state in {AppState.ERROR, AppState.UNKNOWN}:
# After waiting for startup, create an issue for boot addons that are error or unknown state
# Ignore stopped as single shot addons can be run at boot and this is successful exit
# Timeout waiting for startup is not a failure, addon is probably just slow
for addon in tasks:
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
self.sys_resolution.add_issue(
evolve(app.boot_failed_issue),
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
async def shutdown(self, stage: AppStartup) -> None:
"""Shutdown apps."""
tasks: list[App] = []
for app in self.installed:
if app.state != AppState.STARTED or app.startup != stage:
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(app)
tasks.append(addon)
# Evaluate apps which need to be stopped
_LOGGER.info("Phase '%s' stopping %d apps", stage, len(tasks))
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Apps sequential
# Stop Add-ons sequential
# avoid issue on slow IO
for app in tasks:
for addon in tasks:
try:
await app.stop()
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop app %s: %s", app.slug, err)
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
await async_capture_exception(err)
@Job(
name="addon_manager_install",
conditions=APP_UPDATE_CONDITIONS,
on_condition=AppsJobError,
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
concurrency=JobConcurrency.QUEUE,
child_job_syncs=[
ChildJobSyncFilter("docker_interface_install", progress_allocation=1.0)
@@ -200,15 +200,15 @@ class AppManager(CoreSysAttributes):
async def install(
self, slug: str, *, validation_complete: asyncio.Event | None = None
) -> None:
"""Install an app."""
"""Install an add-on."""
self.sys_jobs.current.reference = slug
if slug in self.local:
raise AppsError(f"App {slug} is already installed", _LOGGER.warning)
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AppsError(f"App {slug} does not exist", _LOGGER.error)
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
store.validate_availability()
@@ -216,37 +216,37 @@ class AppManager(CoreSysAttributes):
if validation_complete:
validation_complete.set()
await App(self.coresys, slug).install()
await Addon(self.coresys, slug).install()
_LOGGER.info("App '%s' successfully installed", slug)
_LOGGER.info("Add-on '%s' successfully installed", slug)
@Job(name="addon_manager_uninstall")
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
"""Remove an app."""
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("App %s is not installed", slug)
_LOGGER.warning("Add-on %s is not installed", slug)
return
shared_image = any(
self.local[slug].image == app.image
and self.local[slug].version == app.version
for app in self.installed
if app.slug != slug
self.local[slug].image == addon.image
and self.local[slug].version == addon.version
for addon in self.installed
if addon.slug != slug
)
await self.local[slug].uninstall(
remove_config=remove_config, remove_image=not shared_image
)
_LOGGER.info("App '%s' successfully removed", slug)
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
name="addon_manager_update",
conditions=APP_UPDATE_CONDITIONS,
on_condition=AppsJobError,
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
# We assume for now the docker image pull is 100% of this task for progress
# allocation. But from a user perspective that isn't true. Other steps
# that take time which is not accounted for in progress include:
# partial backup, image cleanup, apparmor update, and app restart
# partial backup, image cleanup, apparmor update, and addon restart
child_job_syncs=[
ChildJobSyncFilter("docker_interface_install", progress_allocation=1.0)
],
@@ -258,23 +258,25 @@ class AppManager(CoreSysAttributes):
*,
validation_complete: asyncio.Event | None = None,
) -> asyncio.Task | None:
"""Update app.
"""Update add-on.
Returns a Task that completes when app has state 'started' (see app.start)
if app is started after update. Else nothing is returned.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after update. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AppsError(f"App {slug} is not installed", _LOGGER.error)
app = self.local[slug]
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if app.is_detached:
raise AppsError(f"App {slug} is not available inside store", _LOGGER.error)
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if app.version == store.version:
raise AppsError(f"No update available for app {slug}", _LOGGER.warning)
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
store.validate_availability()
@@ -285,14 +287,14 @@ class AppManager(CoreSysAttributes):
if backup:
await self.sys_backups.do_backup_partial(
name=f"addon_{app.slug}_{app.version}",
name=f"addon_{addon.slug}_{addon.version}",
homeassistant=False,
apps=[app.slug],
addons=[addon.slug],
)
task = await app.update()
task = await addon.update()
_LOGGER.info("App '%s' successfully updated", slug)
_LOGGER.info("Add-on '%s' successfully updated", slug)
return task
@Job(
@@ -302,35 +304,37 @@ class AppManager(CoreSysAttributes):
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AppsJobError,
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str, *, force: bool = False) -> asyncio.Task | None:
"""Perform a rebuild of local build app.
"""Perform a rebuild of local build add-on.
Returns a Task that completes when app has state 'started' (see app.start)
if app is started after rebuild. Else nothing is returned.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after rebuild. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AppsError(f"App {slug} is not installed", _LOGGER.error)
app = self.local[slug]
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if app.is_detached:
raise AppsError(f"App {slug} is not available inside store", _LOGGER.error)
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
# Check if a rebuild is possible now
if app.version != store.version:
raise AppsError(
if addon.version != store.version:
raise AddonsError(
"Version changed, use Update instead Rebuild", _LOGGER.error
)
if not force and not app.need_build:
raise AppNotSupportedError(
"Can't rebuild an image-based app", _LOGGER.error
if not force and not addon.need_build:
raise AddonNotSupportedError(
"Can't rebuild a image based add-on", _LOGGER.error
)
return await app.rebuild()
return await addon.rebuild()
@Job(
name="addon_manager_restore",
@@ -339,36 +343,36 @@ class AppManager(CoreSysAttributes):
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AppsJobError,
on_condition=AddonsJobError,
)
async def restore(self, slug: str, tar_file: SecureTarFile) -> asyncio.Task | None:
"""Restore state of an app.
"""Restore state of an add-on.
Returns a Task that completes when app has state 'started' (see app.start)
if app is started after restore. Else nothing is returned.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after restore. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
_LOGGER.debug("App %s is not locally available for restore", slug)
app = App(self.coresys, slug)
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
had_ingress: bool | None = False
else:
_LOGGER.debug("App %s is locally available for restore", slug)
app = self.local[slug]
had_ingress = app.ingress_panel
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
had_ingress = addon.ingress_panel
wait_for_start = await app.restore(tar_file)
wait_for_start = await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detected new app after restore: %s", slug)
self.local[slug] = app
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if had_ingress != app.ingress_panel:
if had_ingress != addon.ingress_panel:
await self.sys_ingress.reload()
await self.sys_ingress.update_hass_panel(app)
await self.sys_ingress.update_hass_panel(addon)
return wait_for_start
@@ -377,60 +381,60 @@ class AppManager(CoreSysAttributes):
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
)
async def repair(self) -> None:
"""Repair local apps."""
needs_repair: list[App] = []
"""Repair local add-ons."""
needs_repair: list[Addon] = []
# Evaluate Apps to repair
for app in self.installed:
if await app.instance.exists():
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(app)
needs_repair.append(addon)
_LOGGER.info("Found %d apps to repair", len(needs_repair))
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for app in needs_repair:
_LOGGER.info("Repairing for app: %s", app.slug)
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not app.need_build:
await app.instance.install(app.version, app.image)
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if app.need_build and not app.is_detached:
store = self.store[app.slug]
# If this app is available for rebuild
if app.version == store.version:
await app.instance.install(app.version, app.image)
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", app.slug)
with suppress(AppsError):
await self.uninstall(app.slug)
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync apps DNS names."""
"""Sync add-ons DNS names."""
# Update hosts
add_host_coros: list[Awaitable[None]] = []
for app in self.installed:
for addon in self.installed:
try:
if not await app.instance.is_running():
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("App %s is corrupt: %s", app.slug, err)
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=app.slug,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
await async_capture_exception(err)
else:
add_host_coros.append(
self.sys_plugins.dns.add_host(
ipv4=app.ip_address, names=[app.hostname], write=False
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
)

View File

@@ -1,4 +1,4 @@
"""Init file for Supervisor apps."""
"""Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod
from collections import defaultdict
@@ -82,19 +82,19 @@ from ..const import (
SECURITY_DEFAULT,
SECURITY_DISABLE,
SECURITY_PROFILE,
AppBoot,
AppBootConfig,
AppStage,
AppStartup,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
CpuArch,
)
from ..coresys import CoreSys
from ..docker.const import Capabilities
from ..exceptions import (
AppNotSupportedArchitectureError,
AppNotSupportedError,
AppNotSupportedHomeAssistantVersionError,
AppNotSupportedMachineTypeError,
AddonNotSupportedArchitectureError,
AddonNotSupportedError,
AddonNotSupportedHomeAssistantVersionError,
AddonNotSupportedMachineTypeError,
HassioArchNotFound,
)
from ..jobs.const import JOB_GROUP_ADDON
@@ -107,10 +107,10 @@ from .const import (
ATTR_BREAKING_VERSIONS,
ATTR_PATH,
ATTR_READ_ONLY,
AppBackupMode,
AddonBackupMode,
MappingType,
)
from .options import AppOptions, UiOptions
from .options import AddonOptions, UiOptions
from .validate import RE_SERVICE
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -118,8 +118,8 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
Data = dict[str, Any]
class AppModel(JobGroup, ABC):
"""App Data layout."""
class AddonModel(JobGroup, ABC):
"""Add-on Data layout."""
def __init__(self, coresys: CoreSys, slug: str):
"""Initialize data holder."""
@@ -135,21 +135,21 @@ class AppModel(JobGroup, ABC):
@property
@abstractmethod
def data(self) -> Data:
"""Return app config/data."""
"""Return add-on config/data."""
@property
@abstractmethod
def is_installed(self) -> bool:
"""Return True if an app is installed."""
"""Return True if an add-on is installed."""
@property
@abstractmethod
def is_detached(self) -> bool:
"""Return True if app is detached."""
"""Return True if add-on is detached."""
@property
def available(self) -> bool:
"""Return True if this app is available on this platform."""
"""Return True if this add-on is available on this platform."""
return self._available(self.data)
@property
@@ -158,14 +158,14 @@ class AppModel(JobGroup, ABC):
return self.data[ATTR_OPTIONS]
@property
def boot_config(self) -> AppBootConfig:
def boot_config(self) -> AddonBootConfig:
"""Return boot config."""
return self.data[ATTR_BOOT]
@property
def boot(self) -> AppBoot:
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced."""
return AppBoot(self.data[ATTR_BOOT])
return AddonBoot(self.data[ATTR_BOOT])
@property
def auto_update(self) -> bool | None:
@@ -174,27 +174,27 @@ class AppModel(JobGroup, ABC):
@property
def name(self) -> str:
"""Return name of app."""
"""Return name of add-on."""
return self.data[ATTR_NAME]
@property
def hostname(self) -> str:
"""Return slug/id of app."""
"""Return slug/id of add-on."""
return self.slug.replace("_", "-")
@property
def dns(self) -> list[str]:
"""Return list of DNS name for that app."""
"""Return list of DNS name for that add-on."""
return []
@property
def timeout(self) -> int:
"""Return timeout of app for docker stop."""
"""Return timeout of addon for docker stop."""
return self.data[ATTR_TIMEOUT]
@property
def uuid(self) -> str | None:
"""Return an API token for this app."""
"""Return an API token for this add-on."""
return None
@property
@@ -214,22 +214,22 @@ class AppModel(JobGroup, ABC):
@property
def description(self) -> str:
"""Return description of app."""
"""Return description of add-on."""
return self.data[ATTR_DESCRIPTON]
@property
def repository(self) -> str:
"""Return repository of app."""
"""Return repository of add-on."""
return self.data[ATTR_REPOSITORY]
@property
def translations(self) -> dict:
"""Return app translations."""
"""Return add-on translations."""
return self.data[ATTR_TRANSLATIONS]
@property
def latest_version(self) -> AwesomeVersion:
"""Return latest version of app."""
"""Return latest version of add-on."""
return self.data[ATTR_VERSION]
@property
@@ -239,17 +239,17 @@ class AppModel(JobGroup, ABC):
@property
def version(self) -> AwesomeVersion:
"""Return version of app."""
"""Return version of add-on."""
return self.data[ATTR_VERSION]
@property
def protected(self) -> bool:
"""Return if app is in protected mode."""
"""Return if add-on is in protected mode."""
return True
@property
def startup(self) -> AppStartup:
"""Return startup type of app."""
def startup(self) -> AddonStartup:
"""Return startup type of add-on."""
return self.data[ATTR_STARTUP]
@property
@@ -260,8 +260,8 @@ class AppModel(JobGroup, ABC):
return False
@property
def stage(self) -> AppStage:
"""Return stage mode of app."""
def stage(self) -> AddonStage:
"""Return stage mode of add-on."""
return self.data[ATTR_STAGE]
@property
@@ -289,7 +289,7 @@ class AppModel(JobGroup, ABC):
@property
def ports(self) -> dict[str, int | None] | None:
"""Return ports of app."""
"""Return ports of add-on."""
return self.data.get(ATTR_PORTS)
@property
@@ -329,37 +329,37 @@ class AppModel(JobGroup, ABC):
@property
def host_network(self) -> bool:
"""Return True if app run on host network."""
"""Return True if add-on run on host network."""
return self.data[ATTR_HOST_NETWORK]
@property
def host_pid(self) -> bool:
"""Return True if app run on host PID namespace."""
"""Return True if add-on run on host PID namespace."""
return self.data[ATTR_HOST_PID]
@property
def host_ipc(self) -> bool:
"""Return True if app run on host IPC namespace."""
"""Return True if add-on run on host IPC namespace."""
return self.data[ATTR_HOST_IPC]
@property
def host_uts(self) -> bool:
"""Return True if app run on host UTS namespace."""
"""Return True if add-on run on host UTS namespace."""
return self.data[ATTR_HOST_UTS]
@property
def host_dbus(self) -> bool:
"""Return True if app run on host D-BUS."""
"""Return True if add-on run on host D-BUS."""
return self.data[ATTR_HOST_DBUS]
@property
def static_devices(self) -> list[Path]:
"""Return static devices of app."""
"""Return static devices of add-on."""
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
@property
def environment(self) -> dict[str, str] | None:
"""Return environment of app."""
"""Return environment of add-on."""
return self.data.get(ATTR_ENVIRONMENT)
@property
@@ -378,22 +378,22 @@ class AppModel(JobGroup, ABC):
@property
def legacy(self) -> bool:
"""Return if the app don't support Home Assistant labels."""
"""Return if the add-on don't support Home Assistant labels."""
return self.data[ATTR_LEGACY]
@property
def access_docker_api(self) -> bool:
"""Return if the app need read-only Docker API access."""
"""Return if the add-on need read-only Docker API access."""
return self.data[ATTR_DOCKER_API]
@property
def access_hassio_api(self) -> bool:
"""Return True if the app access to Supervisor REASTful API."""
"""Return True if the add-on access to Supervisor REASTful API."""
return self.data[ATTR_HASSIO_API]
@property
def access_homeassistant_api(self) -> bool:
"""Return True if the app access to Home Assistant API proxy."""
"""Return True if the add-on access to Home Assistant API proxy."""
return self.data[ATTR_HOMEASSISTANT_API]
@property
@@ -417,28 +417,28 @@ class AppModel(JobGroup, ABC):
return self.data.get(ATTR_BACKUP_POST)
@property
def backup_mode(self) -> AppBackupMode:
def backup_mode(self) -> AddonBackupMode:
"""Return if backup is hot/cold."""
return self.data[ATTR_BACKUP]
@property
def default_init(self) -> bool:
"""Return True if the app have no own init."""
"""Return True if the add-on have no own init."""
return self.data[ATTR_INIT]
@property
def with_stdin(self) -> bool:
"""Return True if the app access use stdin input."""
"""Return True if the add-on access use stdin input."""
return self.data[ATTR_STDIN]
@property
def with_ingress(self) -> bool:
"""Return True if the app access support ingress."""
"""Return True if the add-on access support ingress."""
return self.data[ATTR_INGRESS]
@property
def ingress_panel(self) -> bool | None:
"""Return True if the app access support ingress."""
"""Return True if the add-on access support ingress."""
return None
@property
@@ -448,12 +448,12 @@ class AppModel(JobGroup, ABC):
@property
def with_gpio(self) -> bool:
"""Return True if the app access to GPIO interface."""
"""Return True if the add-on access to GPIO interface."""
return self.data[ATTR_GPIO]
@property
def with_usb(self) -> bool:
"""Return True if the app need USB access."""
"""Return True if the add-on need USB access."""
return self.data[ATTR_USB]
@property
@@ -463,7 +463,7 @@ class AppModel(JobGroup, ABC):
@property
def with_udev(self) -> bool:
"""Return True if the app have his own udev."""
"""Return True if the add-on have his own udev."""
return self.data[ATTR_UDEV]
@property
@@ -473,52 +473,52 @@ class AppModel(JobGroup, ABC):
@property
def with_kernel_modules(self) -> bool:
"""Return True if the app access to kernel modules."""
"""Return True if the add-on access to kernel modules."""
return self.data[ATTR_KERNEL_MODULES]
@property
def with_realtime(self) -> bool:
"""Return True if the app need realtime schedule functions."""
"""Return True if the add-on need realtime schedule functions."""
return self.data[ATTR_REALTIME]
@property
def with_full_access(self) -> bool:
"""Return True if the app want full access to hardware."""
"""Return True if the add-on want full access to hardware."""
return self.data[ATTR_FULL_ACCESS]
@property
def with_devicetree(self) -> bool:
"""Return True if the app read access to devicetree."""
"""Return True if the add-on read access to devicetree."""
return self.data[ATTR_DEVICETREE]
@property
def with_tmpfs(self) -> bool:
"""Return if tmp is in memory of app."""
"""Return if tmp is in memory of add-on."""
return self.data[ATTR_TMPFS]
@property
def access_auth_api(self) -> bool:
"""Return True if the app access to login/auth backend."""
"""Return True if the add-on access to login/auth backend."""
return self.data[ATTR_AUTH_API]
@property
def with_audio(self) -> bool:
"""Return True if the app access to audio."""
"""Return True if the add-on access to audio."""
return self.data[ATTR_AUDIO]
@property
def with_video(self) -> bool:
"""Return True if the app access to video."""
"""Return True if the add-on access to video."""
return self.data[ATTR_VIDEO]
@property
def homeassistant_version(self) -> AwesomeVersion | None:
"""Return min Home Assistant version they needed by App."""
"""Return min Home Assistant version they needed by Add-on."""
return self.data.get(ATTR_HOMEASSISTANT)
@property
def url(self) -> str | None:
"""Return URL of app."""
"""Return URL of add-on."""
return self.data.get(ATTR_URL)
@property
@@ -548,17 +548,17 @@ class AppModel(JobGroup, ABC):
@property
def has_deprecated_arch(self) -> bool:
"""Return True if app includes deprecated architectures."""
"""Return True if add-on includes deprecated architectures."""
return any(arch in ARCH_DEPRECATED for arch in self.supported_arch)
@property
def has_supported_arch(self) -> bool:
"""Return True if app supports any architecture on this system."""
"""Return True if add-on supports any architecture on this system."""
return self.sys_arch.is_supported(self.supported_arch)
@property
def has_deprecated_machine(self) -> bool:
"""Return True if app includes deprecated machine entries."""
"""Return True if add-on includes deprecated machine entries."""
return any(
machine.lstrip("!") in MACHINE_DEPRECATED
for machine in self.supported_machine
@@ -566,7 +566,7 @@ class AppModel(JobGroup, ABC):
@property
def has_supported_machine(self) -> bool:
"""Return True if app supports this machine."""
"""Return True if add-on supports this machine."""
if not (machine_types := self.supported_machine):
return True
@@ -582,7 +582,7 @@ class AppModel(JobGroup, ABC):
@property
def arch(self) -> CpuArch:
"""Return architecture to use for the app's image."""
"""Return architecture to use for the addon's image."""
return self.sys_arch.match(self.data[ATTR_ARCH])
@property
@@ -592,12 +592,12 @@ class AppModel(JobGroup, ABC):
@property
def need_build(self) -> bool:
"""Return True if this app need a local build."""
"""Return True if this add-on need a local build."""
return ATTR_IMAGE not in self.data
@property
def map_volumes(self) -> dict[MappingType, FolderMapping]:
"""Return a dict of {MappingType: FolderMapping} from app."""
"""Return a dict of {MappingType: FolderMapping} from add-on."""
volumes = {}
for volume in self.data[ATTR_MAP]:
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
@@ -608,27 +608,27 @@ class AppModel(JobGroup, ABC):
@property
def path_location(self) -> Path:
"""Return path to this app."""
"""Return path to this add-on."""
return Path(self.data[ATTR_LOCATION])
@property
def path_icon(self) -> Path:
"""Return path to app icon."""
"""Return path to add-on icon."""
return Path(self.path_location, "icon.png")
@property
def path_logo(self) -> Path:
"""Return path to app logo."""
"""Return path to add-on logo."""
return Path(self.path_location, "logo.png")
@property
def path_changelog(self) -> Path:
"""Return path to app changelog."""
"""Return path to add-on changelog."""
return Path(self.path_location, "CHANGELOG.md")
@property
def path_documentation(self) -> Path:
"""Return path to app changelog."""
"""Return path to add-on changelog."""
return Path(self.path_location, "DOCS.md")
@property
@@ -637,17 +637,17 @@ class AppModel(JobGroup, ABC):
return Path(self.path_location, "apparmor.txt")
@property
def schema(self) -> AppOptions:
"""Return App options validation object."""
def schema(self) -> AddonOptions:
"""Return Addon options validation object."""
raw_schema = self.data[ATTR_SCHEMA]
if isinstance(raw_schema, bool):
raw_schema = {}
return AppOptions(self.coresys, raw_schema, self.name, self.slug)
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
@property
def schema_ui(self) -> list[dict[Any, Any]] | None:
"""Create a UI schema for app options."""
"""Create a UI schema for add-on options."""
raw_schema = self.data[ATTR_SCHEMA]
if isinstance(raw_schema, bool):
@@ -656,7 +656,7 @@ class AppModel(JobGroup, ABC):
@property
def with_journald(self) -> bool:
"""Return True if the app accesses the system journal."""
"""Return True if the add-on accesses the system journal."""
return self.data[ATTR_JOURNALD]
@property
@@ -666,7 +666,7 @@ class AppModel(JobGroup, ABC):
@property
def breaking_versions(self) -> list[AwesomeVersion]:
"""Return breaking versions of app."""
"""Return breaking versions of addon."""
return self.data[ATTR_BREAKING_VERSIONS]
async def long_description(self) -> str | None:
@@ -696,26 +696,26 @@ class AppModel(JobGroup, ABC):
return self.sys_run_in_executor(check_paths)
def validate_availability(self) -> None:
"""Validate if app is available for current system."""
"""Validate if addon is available for current system."""
return self._validate_availability(self.data, logger=_LOGGER.error)
def __eq__(self, other: Any) -> bool:
"""Compare app objects."""
if not isinstance(other, AppModel):
"""Compare add-on objects."""
if not isinstance(other, AddonModel):
return False
return self.slug == other.slug
def __hash__(self) -> int:
"""Hash for app objects."""
"""Hash for add-on objects."""
return hash(self.slug)
def _validate_availability(
self, config, *, logger: Callable[..., None] | None = None
) -> None:
"""Validate if app is available for current system."""
"""Validate if addon is available for current system."""
# Architecture
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
raise AppNotSupportedArchitectureError(
raise AddonNotSupportedArchitectureError(
logger, slug=self.slug, architectures=config[ATTR_ARCH]
)
@@ -724,7 +724,7 @@ class AppModel(JobGroup, ABC):
if machine and (
f"!{self.sys_machine}" in machine or self.sys_machine not in machine
):
raise AppNotSupportedMachineTypeError(
raise AddonNotSupportedMachineTypeError(
logger, slug=self.slug, machine_types=machine
)
@@ -734,15 +734,15 @@ class AppModel(JobGroup, ABC):
if version and not version_is_new_enough(
self.sys_homeassistant.version, version
):
raise AppNotSupportedHomeAssistantVersionError(
raise AddonNotSupportedHomeAssistantVersionError(
logger, slug=self.slug, version=str(version)
)
def _available(self, config) -> bool:
"""Return True if this app is available on this platform."""
"""Return True if this add-on is available on this platform."""
try:
self._validate_availability(config)
except AppNotSupportedError:
except AddonNotSupportedError:
return False
return True

View File

@@ -1,4 +1,4 @@
"""App Options / UI rendering."""
"""Add-on Options / UI rendering."""
import hashlib
import logging
@@ -56,8 +56,8 @@ _SCHEMA_LENGTH_PARTS = (
)
class AppOptions(CoreSysAttributes):
"""Validate Apps Options."""
class AddonOptions(CoreSysAttributes):
"""Validate Add-ons Options."""
def __init__(
self, coresys: CoreSys, raw_schema: dict[str, Any], name: str, slug: str
@@ -72,11 +72,11 @@ class AppOptions(CoreSysAttributes):
@property
def validate(self) -> vol.Schema:
"""Create a schema for app options."""
"""Create a schema for add-on options."""
return vol.Schema(vol.All(dict, self))
def __call__(self, struct: dict[str, Any]) -> dict[str, Any]:
"""Create schema validator for apps options."""
"""Create schema validator for add-ons options."""
options = {}
# read options
@@ -262,7 +262,7 @@ class AppOptions(CoreSysAttributes):
class UiOptions(CoreSysAttributes):
"""Render UI Apps Options."""
"""Render UI Add-ons Options."""
def __init__(self, coresys: CoreSys) -> None:
"""Initialize UI option render."""

View File

@@ -1,4 +1,4 @@
"""Util apps functions."""
"""Util add-ons functions."""
from __future__ import annotations
@@ -11,12 +11,12 @@ from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
from ..docker.const import Capabilities
if TYPE_CHECKING:
from .model import AppModel
from .model import AddonModel
_LOGGER: logging.Logger = logging.getLogger(__name__)
def rating_security(app: AppModel) -> int:
def rating_security(addon: AddonModel) -> int:
"""Return 1-8 for security rating.
1 = not secure
@@ -25,25 +25,25 @@ def rating_security(app: AppModel) -> int:
rating = 5
# AppArmor
if app.apparmor == SECURITY_DISABLE:
if addon.apparmor == SECURITY_DISABLE:
rating += -1
elif app.apparmor == SECURITY_PROFILE:
elif addon.apparmor == SECURITY_PROFILE:
rating += 1
# Home Assistant Login & Ingress
if app.with_ingress:
if addon.with_ingress:
rating += 2
elif app.access_auth_api:
elif addon.access_auth_api:
rating += 1
# Signed
if app.signed:
if addon.signed:
rating += 1
# Privileged options
if (
any(
privilege in app.privileged
privilege in addon.privileged
for privilege in (
Capabilities.BPF,
Capabilities.CHECKPOINT_RESTORE,
@@ -57,30 +57,30 @@ def rating_security(app: AppModel) -> int:
Capabilities.SYS_RAWIO,
)
)
or app.with_kernel_modules
or addon.with_kernel_modules
):
rating += -1
# API Supervisor role
if app.hassio_role == ROLE_MANAGER:
if addon.hassio_role == ROLE_MANAGER:
rating += -1
elif app.hassio_role == ROLE_ADMIN:
elif addon.hassio_role == ROLE_ADMIN:
rating += -2
# Not secure Networking
if app.host_network:
if addon.host_network:
rating += -1
# Insecure PID namespace
if app.host_pid:
if addon.host_pid:
rating += -2
# UTS host namespace allows to set hostname only with SYS_ADMIN
if app.host_uts and Capabilities.SYS_ADMIN in app.privileged:
if addon.host_uts and Capabilities.SYS_ADMIN in addon.privileged:
rating += -1
# Docker Access & full Access
if app.access_docker_api or app.with_full_access:
if addon.access_docker_api or addon.with_full_access:
rating = 1
return max(min(8, rating), 1)
@@ -102,4 +102,4 @@ def remove_data(folder: Path) -> None:
else:
return
_LOGGER.error("Can't remove app data: %s", error_msg)
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)

View File

@@ -1,4 +1,4 @@
"""Validate apps options schema."""
"""Validate add-ons options schema."""
import logging
import re
@@ -101,11 +101,11 @@ from ..const import (
MACHINE_DEPRECATED,
ROLE_ALL,
ROLE_DEFAULT,
AppBoot,
AppBootConfig,
AppStage,
AppStartup,
AppState,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
AddonState,
)
from ..docker.const import Capabilities
from ..validate import (
@@ -124,7 +124,7 @@ from .const import (
ATTR_PATH,
ATTR_READ_ONLY,
RE_SLUG,
AppBackupMode,
AddonBackupMode,
MappingType,
)
from .options import RE_SCHEMA_ELEMENT
@@ -186,17 +186,17 @@ RE_MACHINE = re.compile(
RE_SLUG_FIELD = re.compile(r"^" + RE_SLUG + r"$")
def _warn_app_config(config: dict[str, Any]):
def _warn_addon_config(config: dict[str, Any]):
"""Warn about miss configs."""
name = config.get(ATTR_NAME)
if not name:
raise vol.Invalid("Invalid app config!")
raise vol.Invalid("Invalid Add-on config!")
if ATTR_ADVANCED in config:
# Deprecated since Supervisor 2026.03.0; this field is ignored and the
# warning can be removed once that version is the minimum supported.
_LOGGER.warning(
"App '%s' uses deprecated 'advanced' field in config. "
"Add-on '%s' uses deprecated 'advanced' field in config. "
"This field is ignored by the Supervisor. Please report this to the maintainer.",
name,
)
@@ -208,15 +208,15 @@ def _warn_app_config(config: dict[str, Any]):
or config.get(ATTR_GPIO)
):
_LOGGER.warning(
"App has full device access, and selective device access in the configuration. Please report this to the maintainer of %s",
"Add-on have full device access, and selective device access in the configuration. Please report this to the maintainer of %s",
name,
)
if config.get(ATTR_BACKUP, AppBackupMode.HOT) == AppBackupMode.COLD and (
if config.get(ATTR_BACKUP, AddonBackupMode.HOT) == AddonBackupMode.COLD and (
config.get(ATTR_BACKUP_POST) or config.get(ATTR_BACKUP_PRE)
):
_LOGGER.warning(
"An app that only supports COLD backups is trying to use pre/post commands. Please report this to the maintainer of %s",
"Add-on which only support COLD backups trying to use post/pre commands. Please report this to the maintainer of %s",
name,
)
@@ -224,7 +224,7 @@ def _warn_app_config(config: dict[str, Any]):
arch for arch in config.get(ATTR_ARCH, []) if arch in ARCH_DEPRECATED
]:
_LOGGER.warning(
"App config 'arch' uses deprecated values %s. Please report this to the maintainer of %s",
"Add-on config 'arch' uses deprecated values %s. Please report this to the maintainer of %s",
deprecated_arches,
name,
)
@@ -235,49 +235,49 @@ def _warn_app_config(config: dict[str, Any]):
if machine.lstrip("!") in MACHINE_DEPRECATED
]:
_LOGGER.warning(
"App config 'machine' uses deprecated values %s. Please report this to the maintainer of %s",
"Add-on config 'machine' uses deprecated values %s. Please report this to the maintainer of %s",
deprecated_machines,
name,
)
if ATTR_CODENOTARY in config:
_LOGGER.warning(
"App '%s' uses deprecated 'codenotary' field in config. This field is no longer used and will be ignored. Please report this to the maintainer.",
"Add-on '%s' uses deprecated 'codenotary' field in config. This field is no longer used and will be ignored. Please report this to the maintainer.",
name,
)
return config
def _migrate_app_config(protocol=False):
"""Migrate app config."""
def _migrate_addon_config(protocol=False):
"""Migrate addon config."""
def _migrate(config: dict[str, Any]):
if not isinstance(config, dict):
raise vol.Invalid("App config must be a dictionary!")
raise vol.Invalid("Add-on config must be a dictionary!")
name = config.get(ATTR_NAME)
if not name:
raise vol.Invalid("Invalid app config!")
raise vol.Invalid("Invalid Add-on config!")
# Startup 2018-03-30
if config.get(ATTR_STARTUP) in ("before", "after"):
value = config[ATTR_STARTUP]
if protocol:
_LOGGER.warning(
"App config 'startup' with '%s' is deprecated. Please report this to the maintainer of %s",
"Add-on config 'startup' with '%s' is deprecated. Please report this to the maintainer of %s",
value,
name,
)
if value == "before":
config[ATTR_STARTUP] = AppStartup.SERVICES
config[ATTR_STARTUP] = AddonStartup.SERVICES
elif value == "after":
config[ATTR_STARTUP] = AppStartup.APPLICATION
config[ATTR_STARTUP] = AddonStartup.APPLICATION
# UART 2021-01-20
if "auto_uart" in config:
if protocol:
_LOGGER.warning(
"App config 'auto_uart' is deprecated, use 'uart'. Please report this to the maintainer of %s",
"Add-on config 'auto_uart' is deprecated, use 'uart'. Please report this to the maintainer of %s",
name,
)
config[ATTR_UART] = config.pop("auto_uart")
@@ -286,7 +286,7 @@ def _migrate_app_config(protocol=False):
if ATTR_DEVICES in config and any(":" in line for line in config[ATTR_DEVICES]):
if protocol:
_LOGGER.warning(
"App config 'devices' uses a deprecated format instead of a list of paths only. Please report this to the maintainer of %s",
"Add-on config 'devices' use a deprecated format, the new format uses a list of paths only. Please report this to the maintainer of %s",
name,
)
config[ATTR_DEVICES] = [line.split(":")[0] for line in config[ATTR_DEVICES]]
@@ -295,7 +295,7 @@ def _migrate_app_config(protocol=False):
if ATTR_TMPFS in config and not isinstance(config[ATTR_TMPFS], bool):
if protocol:
_LOGGER.warning(
"App config 'tmpfs' uses a deprecated format instead of just a boolean. Please report this to the maintainer of %s",
"Add-on config 'tmpfs' use a deprecated format, new it's only a boolean. Please report this to the maintainer of %s",
name,
)
config[ATTR_TMPFS] = True
@@ -311,7 +311,7 @@ def _migrate_app_config(protocol=False):
new_entry = entry.replace("snapshot", "backup")
config[new_entry] = config.pop(entry)
_LOGGER.warning(
"App config '%s' is deprecated, '%s' should be used instead. Please report this to the maintainer of %s",
"Add-on config '%s' is deprecated, '%s' should be used instead. Please report this to the maintainer of %s",
entry,
new_entry,
name,
@@ -324,7 +324,7 @@ def _migrate_app_config(protocol=False):
# Validate that dict entries have required 'type' field
if ATTR_TYPE not in entry:
_LOGGER.warning(
"App config has invalid map entry missing 'type' field: %s. Skipping invalid entry for %s",
"Add-on config has invalid map entry missing 'type' field: %s. Skipping invalid entry for %s",
entry,
name,
)
@@ -334,7 +334,7 @@ def _migrate_app_config(protocol=False):
result = RE_VOLUME.match(entry)
if not result:
_LOGGER.warning(
"App config has invalid map entry: %s. Skipping invalid entry for %s",
"Add-on config has invalid map entry: %s. Skipping invalid entry for %s",
entry,
name,
)
@@ -349,7 +349,7 @@ def _migrate_app_config(protocol=False):
# Always update config to clear potentially malformed ones
config[ATTR_MAP] = volumes
# 2023-10 "config" became "homeassistant" so /config can be used for app's public config
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
if any(
volume
@@ -358,7 +358,7 @@ def _migrate_app_config(protocol=False):
for volume in volumes
):
_LOGGER.warning(
"App config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
MappingType.ADDON_CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
MappingType.CONFIG,
@@ -366,7 +366,7 @@ def _migrate_app_config(protocol=False):
)
else:
_LOGGER.debug(
"App config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
MappingType.CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
name,
@@ -387,13 +387,15 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL_COMPAT)],
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
vol.Optional(ATTR_URL): vol.Url(),
vol.Optional(ATTR_STARTUP, default=AppStartup.APPLICATION): vol.Coerce(
AppStartup
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
AddonStartup
),
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
AddonBootConfig
),
vol.Optional(ATTR_BOOT, default=AppBootConfig.AUTO): vol.Coerce(AppBootConfig),
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
vol.Optional(ATTR_STAGE, default=AppStage.STABLE): vol.Coerce(AppStage),
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
vol.Optional(ATTR_PORTS): docker_ports,
vol.Optional(ATTR_PORTS_DESCRIPTION): docker_ports_description,
vol.Optional(ATTR_WATCHDOG): vol.Match(
@@ -453,7 +455,9 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
vol.Optional(ATTR_BACKUP_PRE): str,
vol.Optional(ATTR_BACKUP_POST): str,
vol.Optional(ATTR_BACKUP, default=AppBackupMode.HOT): vol.Coerce(AppBackupMode),
vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce(
AddonBackupMode
),
vol.Optional(ATTR_OPTIONS, default={}): dict,
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
vol.Schema({str: SCHEMA_ELEMENT}),
@@ -484,7 +488,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
)
SCHEMA_ADDON_CONFIG = vol.All(
_migrate_app_config(True), _warn_app_config, _SCHEMA_ADDON_CONFIG
_migrate_addon_config(True), _warn_addon_config, _SCHEMA_ADDON_CONFIG
)
@@ -531,7 +535,7 @@ SCHEMA_ADDON_USER = vol.Schema(
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): str,
vol.Optional(ATTR_OPTIONS, default=dict): dict,
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
vol.Optional(ATTR_BOOT): vol.Coerce(AppBoot),
vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot),
vol.Optional(ATTR_NETWORK): docker_ports,
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
@@ -545,7 +549,7 @@ SCHEMA_ADDON_USER = vol.Schema(
)
SCHEMA_ADDON_SYSTEM = vol.All(
_migrate_app_config(),
_migrate_addon_config(),
_SCHEMA_ADDON_CONFIG.extend(
{
vol.Required(ATTR_LOCATION): str,
@@ -571,7 +575,7 @@ SCHEMA_ADDON_BACKUP = vol.Schema(
{
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
vol.Required(ATTR_STATE): vol.Coerce(AppState),
vol.Required(ATTR_STATE): vol.Coerce(AddonState),
vol.Required(ATTR_VERSION): version_tag,
},
extra=vol.REMOVE_EXTRA,

View File

@@ -8,12 +8,11 @@ from typing import Any
from aiohttp import hdrs, web
from ..addons.addon import App
from ..const import SUPERVISOR_DOCKER_NAME, AppState, FeatureFlag
from ..const import SUPERVISOR_DOCKER_NAME, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import APIAppNotInstalled, HostNotSupportedError
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
from ..utils.sentry import async_capture_exception
from .addons import APIApps
from .addons import APIAddons
from .audio import APIAudio
from .auth import APIAuth
from .backups import APIBackups
@@ -77,9 +76,6 @@ class RestAPI(CoreSysAttributes):
"max_field_size": MAX_LINE_SIZE,
},
)
# v2 sub-app: no middleware of its own — parent webapp's middleware
# stack runs first for all requests including sub-app routes.
self.v2_app: web.Application = web.Application()
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
@@ -89,16 +85,11 @@ class RestAPI(CoreSysAttributes):
self._api_host: APIHost = APIHost()
self._api_host.coresys = coresys
# handler instances shared between v1 and v2 registrations
self._api_apps: APIApps | None = None
self._api_backups: APIBackups | None = None
self._api_store: APIStore | None = None
async def load(self) -> None:
"""Register REST API Calls."""
static_resource_configs: list[StaticResourceConfig] = []
self._register_apps()
self._register_addons()
self._register_audio()
self._register_auth()
self._register_backups()
@@ -125,14 +116,6 @@ class RestAPI(CoreSysAttributes):
self._register_store()
self._register_supervisor()
# Register v2 routes before mounting the sub-app
# (add_subapp freezes the sub-app's router)
if self.sys_config.feature_flags.get(FeatureFlag.SUPERVISOR_V2_API, False):
self._register_v2_apps()
self._register_v2_backups()
self._register_v2_store()
self.webapp.add_subapp("/v2", self.v2_app)
if static_resource_configs:
def process_configs() -> list[web.StaticResource]:
@@ -580,118 +563,74 @@ class RestAPI(CoreSysAttributes):
]
)
def _register_apps(self) -> None:
"""Register App functions."""
api_apps = APIApps()
api_apps.coresys = self.coresys
self._api_apps = api_apps
def _register_addons(self) -> None:
"""Register Add-on functions."""
api_addons = APIAddons()
api_addons.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/addons", api_apps.list_apps_v1),
web.post("/addons/{app}/uninstall", api_apps.uninstall),
web.post("/addons/{app}/start", api_apps.start),
web.post("/addons/{app}/stop", api_apps.stop),
web.post("/addons/{app}/restart", api_apps.restart),
web.post("/addons/{app}/options", api_apps.options),
web.post("/addons/{app}/sys_options", api_apps.sys_options),
web.post("/addons/{app}/options/validate", api_apps.options_validate),
web.get("/addons/{app}/options/config", api_apps.options_config),
web.post("/addons/{app}/rebuild", api_apps.rebuild),
web.post("/addons/{app}/stdin", api_apps.stdin),
web.post("/addons/{app}/security", api_apps.security),
web.get("/addons/{app}/stats", api_apps.stats),
web.get("/addons", api_addons.list_addons),
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
web.post("/addons/{addon}/start", api_addons.start),
web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/options", api_addons.options),
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
web.post(
"/addons/{addon}/options/validate", api_addons.options_validate
),
web.get("/addons/{addon}/options/config", api_addons.options_config),
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats),
]
)
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def get_app_logs(request, *args, **kwargs):
app = api_apps.get_app_for_request(request)
kwargs["identifier"] = f"addon_{app.slug}"
async def get_addon_logs(request, *args, **kwargs):
addon = api_addons.get_addon_for_request(request)
kwargs["identifier"] = f"addon_{addon.slug}"
return await self._api_host.advanced_logs(request, *args, **kwargs)
self.webapp.add_routes(
[
web.get("/addons/{app}/logs", get_app_logs),
web.get("/addons/{addon}/logs", get_addon_logs),
web.get(
"/addons/{app}/logs/follow",
partial(get_app_logs, follow=True),
"/addons/{addon}/logs/follow",
partial(get_addon_logs, follow=True),
),
web.get(
"/addons/{app}/logs/latest",
partial(get_app_logs, latest=True, no_colors=True),
"/addons/{addon}/logs/latest",
partial(get_addon_logs, latest=True, no_colors=True),
),
web.get("/addons/{app}/logs/boots/{bootid}", get_app_logs),
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
web.get(
"/addons/{app}/logs/boots/{bootid}/follow",
partial(get_app_logs, follow=True),
"/addons/{addon}/logs/boots/{bootid}/follow",
partial(get_addon_logs, follow=True),
),
]
)
# Legacy routing to support requests for not installed apps
# Legacy routing to support requests for not installed addons
api_store = APIStore()
api_store.coresys = self.coresys
@api_process
async def apps_app_info(request: web.Request) -> dict[str, Any]:
"""Route to store if info requested for not installed app."""
async def addons_addon_info(request: web.Request) -> dict[str, Any]:
"""Route to store if info requested for not installed addon."""
try:
app: App = api_apps.get_app_for_request(request)
return await api_apps.info_data(app)
except APIAppNotInstalled:
# Route to store/{app}/info but add missing fields
return await api_addons.info(request)
except APIAddonNotInstalled:
# Route to store/{addon}/info but add missing fields
return dict(
await api_store.apps_app_info_wrapped(request),
state=AppState.UNKNOWN,
options=self.sys_apps.store[request.match_info["app"]].options,
await api_store.addons_addon_info_wrapped(request),
state=AddonState.UNKNOWN,
options=self.sys_addons.store[request.match_info["addon"]].options,
)
self.webapp.add_routes([web.get("/addons/{app}/info", apps_app_info)])
def _register_v2_apps(self) -> None:
"""Register v2 app routes on the v2 sub-app (accessible as /v2/apps/...)."""
assert self._api_apps is not None
api_apps = self._api_apps
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def get_app_logs_v2(request, *args, **kwargs):
app = api_apps.get_app_for_request(request)
kwargs["identifier"] = f"addon_{app.slug}"
return await self._api_host.advanced_logs(request, *args, **kwargs)
self.v2_app.add_routes(
[
web.get("/apps", api_apps.list_apps),
web.post("/apps/{app}/uninstall", api_apps.uninstall),
web.post("/apps/{app}/start", api_apps.start),
web.post("/apps/{app}/stop", api_apps.stop),
web.post("/apps/{app}/restart", api_apps.restart),
web.post("/apps/{app}/options", api_apps.options),
web.post("/apps/{app}/sys_options", api_apps.sys_options),
web.post("/apps/{app}/options/validate", api_apps.options_validate),
web.get("/apps/{app}/options/config", api_apps.options_config),
web.post("/apps/{app}/rebuild", api_apps.rebuild),
web.post("/apps/{app}/stdin", api_apps.stdin),
web.post("/apps/{app}/security", api_apps.security),
web.get("/apps/{app}/stats", api_apps.stats),
web.get("/apps/{app}/info", api_apps.info),
web.get("/apps/{app}/logs", get_app_logs_v2),
web.get(
"/apps/{app}/logs/follow",
partial(get_app_logs_v2, follow=True),
),
web.get(
"/apps/{app}/logs/latest",
partial(get_app_logs_v2, latest=True, no_colors=True),
),
web.get("/apps/{app}/logs/boots/{bootid}", get_app_logs_v2),
web.get(
"/apps/{app}/logs/boots/{bootid}/follow",
partial(get_app_logs_v2, follow=True),
),
]
)
self.webapp.add_routes([web.get("/addons/{addon}/info", addons_addon_info)])
def _register_ingress(self) -> None:
"""Register Ingress functions."""
@@ -713,36 +652,8 @@ class RestAPI(CoreSysAttributes):
"""Register backups functions."""
api_backups = APIBackups()
api_backups.coresys = self.coresys
self._api_backups = api_backups
self.webapp.add_routes(
[
web.get("/backups", api_backups.list_backups_v1),
web.get("/backups/info", api_backups.info_v1),
web.post("/backups/options", api_backups.options),
web.post("/backups/reload", api_backups.reload),
web.post("/backups/freeze", api_backups.freeze),
web.post("/backups/thaw", api_backups.thaw),
web.post("/backups/new/full", api_backups.backup_full),
web.post("/backups/new/partial", api_backups.backup_partial_v1),
web.post("/backups/new/upload", api_backups.upload),
web.get("/backups/{slug}/info", api_backups.backup_info_v1),
web.delete("/backups/{slug}", api_backups.remove),
web.post("/backups/{slug}/restore/full", api_backups.restore_full),
web.post(
"/backups/{slug}/restore/partial",
api_backups.restore_partial_v1,
),
web.get("/backups/{slug}/download", api_backups.download),
]
)
def _register_v2_backups(self) -> None:
"""Register v2 backup routes on the v2 sub-app (accessible as /v2/backups/...)."""
assert self._api_backups is not None
api_backups = self._api_backups
self.v2_app.add_routes(
[
web.get("/backups", api_backups.list_backups),
web.get("/backups/info", api_backups.info),
@@ -853,36 +764,39 @@ class RestAPI(CoreSysAttributes):
"""Register store endpoints."""
api_store = APIStore()
api_store.coresys = self.coresys
self._api_store = api_store
self.webapp.add_routes(
[
web.get("/store", api_store.store_info_v1),
web.get("/store/addons", api_store.apps_list_v1),
web.get("/store/addons/{app}", api_store.apps_app_info),
web.get("/store/addons/{app}/icon", api_store.apps_app_icon),
web.get("/store/addons/{app}/logo", api_store.apps_app_logo),
web.get("/store/addons/{app}/changelog", api_store.apps_app_changelog),
web.get("/store", api_store.store_info),
web.get("/store/addons", api_store.addons_list),
web.get("/store/addons/{addon}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
web.get(
"/store/addons/{app}/documentation",
api_store.apps_app_documentation,
"/store/addons/{addon}/changelog", api_store.addons_addon_changelog
),
web.get(
"/store/addons/{app}/availability",
api_store.apps_app_availability,
"/store/addons/{addon}/documentation",
api_store.addons_addon_documentation,
),
web.post("/store/addons/{app}/install", api_store.apps_app_install),
web.post(
"/store/addons/{app}/install/{version}",
api_store.apps_app_install,
web.get(
"/store/addons/{addon}/availability",
api_store.addons_addon_availability,
),
web.post("/store/addons/{app}/update", api_store.apps_app_update),
web.post(
"/store/addons/{app}/update/{version}",
api_store.apps_app_update,
"/store/addons/{addon}/install", api_store.addons_addon_install
),
web.post(
"/store/addons/{addon}/install/{version}",
api_store.addons_addon_install,
),
web.post("/store/addons/{addon}/update", api_store.addons_addon_update),
web.post(
"/store/addons/{addon}/update/{version}",
api_store.addons_addon_update,
),
# Must be below others since it has a wildcard in resource path
web.get("/store/addons/{app}/{version}", api_store.apps_app_info),
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.post("/store/reload", api_store.reload),
web.get("/store/repositories", api_store.repositories_list),
web.get(
@@ -904,64 +818,14 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes(
[
web.post("/addons/reload", api_store.reload),
web.post("/addons/{app}/install", api_store.apps_app_install),
web.post("/addons/{app}/update", api_store.apps_app_update),
web.get("/addons/{app}/icon", api_store.apps_app_icon),
web.get("/addons/{app}/logo", api_store.apps_app_logo),
web.get("/addons/{app}/changelog", api_store.apps_app_changelog),
web.post("/addons/{addon}/install", api_store.addons_addon_install),
web.post("/addons/{addon}/update", api_store.addons_addon_update),
web.get("/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/addons/{addon}/logo", api_store.addons_addon_logo),
web.get("/addons/{addon}/changelog", api_store.addons_addon_changelog),
web.get(
"/addons/{app}/documentation",
api_store.apps_app_documentation,
),
]
)
def _register_v2_store(self) -> None:
"""Register v2 store routes on the v2 sub-app (accessible as /v2/store/...)."""
assert self._api_store is not None
api_store = self._api_store
self.v2_app.add_routes(
[
web.get("/store", api_store.store_info),
web.get("/store/apps", api_store.apps_list),
web.get("/store/apps/{app}", api_store.apps_app_info),
web.get("/store/apps/{app}/icon", api_store.apps_app_icon),
web.get("/store/apps/{app}/logo", api_store.apps_app_logo),
web.get("/store/apps/{app}/changelog", api_store.apps_app_changelog),
web.get(
"/store/apps/{app}/documentation",
api_store.apps_app_documentation,
),
web.get(
"/store/apps/{app}/availability",
api_store.apps_app_availability,
),
web.post("/store/apps/{app}/install", api_store.apps_app_install),
web.post(
"/store/apps/{app}/install/{version}",
api_store.apps_app_install,
),
web.post("/store/apps/{app}/update", api_store.apps_app_update),
web.post(
"/store/apps/{app}/update/{version}",
api_store.apps_app_update,
),
# Must be below others since it has a wildcard in resource path
web.get("/store/apps/{app}/{version}", api_store.apps_app_info),
web.post("/store/reload", api_store.reload),
web.get("/store/repositories", api_store.repositories_list),
web.get(
"/store/repositories/{repository}",
api_store.repositories_repository_info,
),
web.post("/store/repositories", api_store.add_repository),
web.delete(
"/store/repositories/{repository}", api_store.remove_repository
),
web.post(
"/store/repositories/{repository}/repair",
api_store.repositories_repository_repair,
"/addons/{addon}/documentation",
api_store.addons_addon_documentation,
),
]
)

View File

@@ -9,13 +9,12 @@ from aiohttp import web
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..addons.addon import App
from ..addons.addon import Addon
from ..addons.utils import rating_security
from ..const import (
ATTR_ADDONS,
ATTR_ADVANCED,
ATTR_APPARMOR,
ATTR_APPS,
ATTR_ARCH,
ATTR_AUDIO,
ATTR_AUDIO_INPUT,
@@ -95,19 +94,19 @@ from ..const import (
ATTR_WATCHDOG,
ATTR_WEBUI,
REQUEST_FROM,
AppBoot,
AppBootConfig,
AddonBoot,
AddonBootConfig,
)
from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats
from ..exceptions import (
APIAppNotInstalled,
AddonBootConfigCannotChangeError,
AddonConfigurationInvalidError,
AddonNotSupportedWriteStdinError,
APIAddonNotInstalled,
APIError,
APIForbidden,
APINotFound,
AppBootConfigCannotChangeError,
AppConfigurationInvalidError,
AppNotSupportedWriteStdinError,
PwnedError,
PwnedSecret,
)
@@ -122,7 +121,7 @@ SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): str})
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_BOOT): vol.Coerce(AppBoot),
vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot),
vol.Optional(ATTR_NETWORK): vol.Maybe(docker_ports),
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
@@ -158,157 +157,149 @@ class OptionsValidateResponse(TypedDict):
pwned: bool | None
class APIApps(CoreSysAttributes):
"""Handle RESTful API for app functions."""
class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions."""
def get_app_for_request(self, request: web.Request) -> App:
"""Return app, throw an exception if it doesn't exist."""
app_slug: str = request.match_info["app"]
def get_addon_for_request(self, request: web.Request) -> Addon:
"""Return addon, throw an exception if it doesn't exist."""
addon_slug: str = request.match_info["addon"]
# Lookup itself
if app_slug == "self":
app = request.get(REQUEST_FROM)
if not isinstance(app, App):
raise APIError("Self is not an App")
return app
if addon_slug == "self":
addon = request.get(REQUEST_FROM)
if not isinstance(addon, Addon):
raise APIError("Self is not an Addon")
return addon
app = self.sys_apps.get(app_slug)
if not app:
raise APINotFound(f"App {app_slug} does not exist")
if not isinstance(app, App) or not app.is_installed:
raise APIAppNotInstalled("App is not installed")
addon = self.sys_addons.get(addon_slug)
if not addon:
raise APINotFound(f"Addon {addon_slug} does not exist")
if not isinstance(addon, Addon) or not addon.is_installed:
raise APIAddonNotInstalled("Addon is not installed")
return app
return addon
def _list_apps_data(self) -> list[dict[str, Any]]:
"""Build the list of installed app data dicts."""
return [
@api_process
async def list_addons(self, request: web.Request) -> dict[str, Any]:
"""Return all add-ons or repositories."""
data_addons = [
{
ATTR_NAME: app.name,
ATTR_SLUG: app.slug,
ATTR_DESCRIPTON: app.description,
ATTR_ADVANCED: app.advanced, # Deprecated 2026.03
ATTR_STAGE: app.stage,
ATTR_VERSION: app.version,
ATTR_VERSION_LATEST: app.latest_version,
ATTR_UPDATE_AVAILABLE: app.need_update,
ATTR_AVAILABLE: app.available,
ATTR_DETACHED: app.is_detached,
ATTR_HOMEASSISTANT: app.homeassistant_version,
ATTR_STATE: app.state,
ATTR_REPOSITORY: app.repository,
ATTR_BUILD: app.need_build,
ATTR_URL: app.url,
ATTR_ICON: app.with_icon,
ATTR_LOGO: app.with_logo,
ATTR_SYSTEM_MANAGED: app.system_managed,
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description,
ATTR_ADVANCED: addon.advanced, # Deprecated 2026.03
ATTR_STAGE: addon.stage,
ATTR_VERSION: addon.version,
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_UPDATE_AVAILABLE: addon.need_update,
ATTR_AVAILABLE: addon.available,
ATTR_DETACHED: addon.is_detached,
ATTR_HOMEASSISTANT: addon.homeassistant_version,
ATTR_STATE: addon.state,
ATTR_REPOSITORY: addon.repository,
ATTR_BUILD: addon.need_build,
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_SYSTEM_MANAGED: addon.system_managed,
}
for app in self.sys_apps.installed
for addon in self.sys_addons.installed
]
@api_process
async def list_apps(self, request: web.Request) -> dict[str, Any]:
"""Return all installed apps (v2: uses "apps" key)."""
return {ATTR_APPS: self._list_apps_data()}
@api_process
async def list_apps_v1(self, request: web.Request) -> dict[str, Any]:
"""Return all installed apps (v1: uses "addons" key)."""
return {ATTR_ADDONS: self._list_apps_data()}
return {ATTR_ADDONS: data_addons}
@api_process
async def reload(self, request: web.Request) -> None:
"""Reload all app data from store."""
"""Reload all add-on data from store."""
await asyncio.shield(self.sys_store.reload())
async def info_data(self, app: App) -> dict[str, Any]:
"""Build and return app information dict (raises on invalid state)."""
return {
ATTR_NAME: app.name,
ATTR_SLUG: app.slug,
ATTR_HOSTNAME: app.hostname,
ATTR_DNS: app.dns,
ATTR_DESCRIPTON: app.description,
ATTR_LONG_DESCRIPTION: await app.long_description(),
ATTR_ADVANCED: app.advanced, # Deprecated 2026.03
ATTR_STAGE: app.stage,
ATTR_REPOSITORY: app.repository,
ATTR_VERSION_LATEST: app.latest_version,
ATTR_PROTECTED: app.protected,
ATTR_RATING: rating_security(app),
ATTR_BOOT_CONFIG: app.boot_config,
ATTR_BOOT: app.boot,
ATTR_OPTIONS: app.options,
ATTR_SCHEMA: app.schema_ui,
ATTR_ARCH: app.supported_arch,
ATTR_MACHINE: app.supported_machine,
ATTR_HOMEASSISTANT: app.homeassistant_version,
ATTR_URL: app.url,
ATTR_DETACHED: app.is_detached,
ATTR_AVAILABLE: app.available,
ATTR_BUILD: app.need_build,
ATTR_NETWORK: app.ports,
ATTR_NETWORK_DESCRIPTION: app.ports_description,
ATTR_HOST_NETWORK: app.host_network,
ATTR_HOST_PID: app.host_pid,
ATTR_HOST_IPC: app.host_ipc,
ATTR_HOST_UTS: app.host_uts,
ATTR_HOST_DBUS: app.host_dbus,
ATTR_PRIVILEGED: app.privileged,
ATTR_FULL_ACCESS: app.with_full_access,
ATTR_APPARMOR: app.apparmor,
ATTR_ICON: app.with_icon,
ATTR_LOGO: app.with_logo,
ATTR_CHANGELOG: app.with_changelog,
ATTR_DOCUMENTATION: app.with_documentation,
ATTR_STDIN: app.with_stdin,
ATTR_HASSIO_API: app.access_hassio_api,
ATTR_HASSIO_ROLE: app.hassio_role,
ATTR_AUTH_API: app.access_auth_api,
ATTR_HOMEASSISTANT_API: app.access_homeassistant_api,
ATTR_GPIO: app.with_gpio,
ATTR_USB: app.with_usb,
ATTR_UART: app.with_uart,
ATTR_KERNEL_MODULES: app.with_kernel_modules,
ATTR_DEVICETREE: app.with_devicetree,
ATTR_UDEV: app.with_udev,
ATTR_DOCKER_API: app.access_docker_api,
ATTR_VIDEO: app.with_video,
ATTR_AUDIO: app.with_audio,
ATTR_STARTUP: app.startup,
ATTR_SERVICES: _pretty_services(app),
ATTR_DISCOVERY: app.discovery,
ATTR_TRANSLATIONS: app.translations,
ATTR_INGRESS: app.with_ingress,
ATTR_SIGNED: app.signed,
ATTR_STATE: app.state,
ATTR_WEBUI: app.webui,
ATTR_INGRESS_ENTRY: app.ingress_entry,
ATTR_INGRESS_URL: app.ingress_url,
ATTR_INGRESS_PORT: app.ingress_port,
ATTR_INGRESS_PANEL: app.ingress_panel,
ATTR_AUDIO_INPUT: app.audio_input,
ATTR_AUDIO_OUTPUT: app.audio_output,
ATTR_AUTO_UPDATE: app.auto_update,
ATTR_IP_ADDRESS: str(app.ip_address),
ATTR_VERSION: app.version,
ATTR_UPDATE_AVAILABLE: app.need_update,
ATTR_WATCHDOG: app.watchdog,
ATTR_DEVICES: app.static_devices + [device.path for device in app.devices],
ATTR_SYSTEM_MANAGED: app.system_managed,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: app.system_managed_config_entry,
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information."""
addon: Addon = self.get_addon_for_request(request)
data = {
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_HOSTNAME: addon.hostname,
ATTR_DNS: addon.dns,
ATTR_DESCRIPTON: addon.description,
ATTR_LONG_DESCRIPTION: await addon.long_description(),
ATTR_ADVANCED: addon.advanced, # Deprecated 2026.03
ATTR_STAGE: addon.stage,
ATTR_REPOSITORY: addon.repository,
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon),
ATTR_BOOT_CONFIG: addon.boot_config,
ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options,
ATTR_SCHEMA: addon.schema_ui,
ATTR_ARCH: addon.supported_arch,
ATTR_MACHINE: addon.supported_machine,
ATTR_HOMEASSISTANT: addon.homeassistant_version,
ATTR_URL: addon.url,
ATTR_DETACHED: addon.is_detached,
ATTR_AVAILABLE: addon.available,
ATTR_BUILD: addon.need_build,
ATTR_NETWORK: addon.ports,
ATTR_NETWORK_DESCRIPTION: addon.ports_description,
ATTR_HOST_NETWORK: addon.host_network,
ATTR_HOST_PID: addon.host_pid,
ATTR_HOST_IPC: addon.host_ipc,
ATTR_HOST_UTS: addon.host_uts,
ATTR_HOST_DBUS: addon.host_dbus,
ATTR_PRIVILEGED: addon.privileged,
ATTR_FULL_ACCESS: addon.with_full_access,
ATTR_APPARMOR: addon.apparmor,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_CHANGELOG: addon.with_changelog,
ATTR_DOCUMENTATION: addon.with_documentation,
ATTR_STDIN: addon.with_stdin,
ATTR_HASSIO_API: addon.access_hassio_api,
ATTR_HASSIO_ROLE: addon.hassio_role,
ATTR_AUTH_API: addon.access_auth_api,
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
ATTR_GPIO: addon.with_gpio,
ATTR_USB: addon.with_usb,
ATTR_UART: addon.with_uart,
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
ATTR_DEVICETREE: addon.with_devicetree,
ATTR_UDEV: addon.with_udev,
ATTR_DOCKER_API: addon.access_docker_api,
ATTR_VIDEO: addon.with_video,
ATTR_AUDIO: addon.with_audio,
ATTR_STARTUP: addon.startup,
ATTR_SERVICES: _pretty_services(addon),
ATTR_DISCOVERY: addon.discovery,
ATTR_TRANSLATIONS: addon.translations,
ATTR_INGRESS: addon.with_ingress,
ATTR_SIGNED: addon.signed,
ATTR_STATE: addon.state,
ATTR_WEBUI: addon.webui,
ATTR_INGRESS_ENTRY: addon.ingress_entry,
ATTR_INGRESS_URL: addon.ingress_url,
ATTR_INGRESS_PORT: addon.ingress_port,
ATTR_INGRESS_PANEL: addon.ingress_panel,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_IP_ADDRESS: str(addon.ip_address),
ATTR_VERSION: addon.version,
ATTR_UPDATE_AVAILABLE: addon.need_update,
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
ATTR_SYSTEM_MANAGED: addon.system_managed,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
}
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return app information."""
app: App = self.get_app_for_request(request)
return await self.info_data(app)
return data
@api_process
async def options(self, request: web.Request) -> None:
"""Store user options for app."""
app = self.get_app_for_request(request)
"""Store user options for add-on."""
addon = self.get_addon_for_request(request)
# Update secrets for validation
await self.sys_homeassistant.secrets.reload()
@@ -318,61 +309,61 @@ class APIApps(CoreSysAttributes):
if ATTR_OPTIONS in body:
# None resets options to defaults, otherwise validate the options
if body[ATTR_OPTIONS] is None:
app.options = None
addon.options = None
else:
try:
app.options = app.schema(body[ATTR_OPTIONS])
addon.options = addon.schema(body[ATTR_OPTIONS])
except vol.Invalid as ex:
raise AppConfigurationInvalidError(
app=app.slug,
raise AddonConfigurationInvalidError(
addon=addon.slug,
validation_error=humanize_error(body[ATTR_OPTIONS], ex),
) from None
if ATTR_BOOT in body:
if app.boot_config == AppBootConfig.MANUAL_ONLY:
raise AppBootConfigCannotChangeError(
app=app.slug, boot_config=app.boot_config.value
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
raise AddonBootConfigCannotChangeError(
addon=addon.slug, boot_config=addon.boot_config.value
)
app.boot = body[ATTR_BOOT]
addon.boot = body[ATTR_BOOT]
if ATTR_AUTO_UPDATE in body:
app.auto_update = body[ATTR_AUTO_UPDATE]
addon.auto_update = body[ATTR_AUTO_UPDATE]
if ATTR_NETWORK in body:
app.ports = body[ATTR_NETWORK]
addon.ports = body[ATTR_NETWORK]
if ATTR_AUDIO_INPUT in body:
app.audio_input = body[ATTR_AUDIO_INPUT]
addon.audio_input = body[ATTR_AUDIO_INPUT]
if ATTR_AUDIO_OUTPUT in body:
app.audio_output = body[ATTR_AUDIO_OUTPUT]
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
if ATTR_INGRESS_PANEL in body:
app.ingress_panel = body[ATTR_INGRESS_PANEL]
await self.sys_ingress.update_hass_panel(app)
addon.ingress_panel = body[ATTR_INGRESS_PANEL]
await self.sys_ingress.update_hass_panel(addon)
if ATTR_WATCHDOG in body:
app.watchdog = body[ATTR_WATCHDOG]
addon.watchdog = body[ATTR_WATCHDOG]
await app.save_persist()
await addon.save_persist()
@api_process
async def sys_options(self, request: web.Request) -> None:
"""Store system options for an app."""
app = self.get_app_for_request(request)
"""Store system options for an add-on."""
addon = self.get_addon_for_request(request)
# Validate/Process Body
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
if ATTR_SYSTEM_MANAGED in body:
app.system_managed = body[ATTR_SYSTEM_MANAGED]
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
app.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
await app.save_persist()
await addon.save_persist()
@api_process
async def options_validate(self, request: web.Request) -> OptionsValidateResponse:
"""Validate user options for app."""
app = self.get_app_for_request(request)
"""Validate user options for add-on."""
addon = self.get_addon_for_request(request)
data = OptionsValidateResponse(message="", valid=True, pwned=False)
options = await request.json(loads=json_loads) or app.options
options = await request.json(loads=json_loads) or addon.options
# Validate config
options_schema = app.schema
options_schema = addon.schema
try:
options_schema.validate(options)
except vol.Invalid as ex:
@@ -398,43 +389,43 @@ class APIApps(CoreSysAttributes):
if data["pwned"] is None:
data["message"] = "Error happening on pwned secrets check!"
else:
data["message"] = "App uses pwned secrets!"
data["message"] = "Add-on uses pwned secrets!"
return data
@api_process
async def options_config(self, request: web.Request) -> dict[str, Any]:
"""Validate user options for app."""
slug: str = request.match_info["app"]
"""Validate user options for add-on."""
slug: str = request.match_info["addon"]
if slug != "self":
raise APIForbidden("This can be only read by the app itself!")
app = self.get_app_for_request(request)
raise APIForbidden("This can be only read by the Add-on itself!")
addon = self.get_addon_for_request(request)
# Lookup/reload secrets
await self.sys_homeassistant.secrets.reload()
try:
return app.schema.validate(app.options)
return addon.schema.validate(addon.options)
except vol.Invalid:
raise APIError("Invalid configuration data for the app") from None
raise APIError("Invalid configuration data for the add-on") from None
@api_process
async def security(self, request: web.Request) -> None:
"""Store security options for app."""
app = self.get_app_for_request(request)
"""Store security options for add-on."""
addon = self.get_addon_for_request(request)
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body:
_LOGGER.warning("Changing protected flag for %s!", app.slug)
app.protected = body[ATTR_PROTECTED]
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
addon.protected = body[ATTR_PROTECTED]
await app.save_persist()
await addon.save_persist()
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
"""Return resource information."""
app = self.get_app_for_request(request)
addon = self.get_addon_for_request(request)
stats: DockerStats = await app.stats()
stats: DockerStats = await addon.stats()
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
@@ -449,55 +440,57 @@ class APIApps(CoreSysAttributes):
@api_process
async def uninstall(self, request: web.Request) -> None:
"""Uninstall app."""
app = self.get_app_for_request(request)
"""Uninstall add-on."""
addon = self.get_addon_for_request(request)
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
await asyncio.shield(
self.sys_apps.uninstall(app.slug, remove_config=body[ATTR_REMOVE_CONFIG])
self.sys_addons.uninstall(
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
)
)
@api_process
async def start(self, request: web.Request) -> None:
"""Start app."""
app = self.get_app_for_request(request)
if start_task := await asyncio.shield(app.start()):
"""Start add-on."""
addon = self.get_addon_for_request(request)
if start_task := await asyncio.shield(addon.start()):
await start_task
@api_process
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop app."""
app = self.get_app_for_request(request)
return asyncio.shield(app.stop())
"""Stop add-on."""
addon = self.get_addon_for_request(request)
return asyncio.shield(addon.stop())
@api_process
async def restart(self, request: web.Request) -> None:
"""Restart app."""
app: App = self.get_app_for_request(request)
if start_task := await asyncio.shield(app.restart()):
"""Restart add-on."""
addon: Addon = self.get_addon_for_request(request)
if start_task := await asyncio.shield(addon.restart()):
await start_task
@api_process
async def rebuild(self, request: web.Request) -> None:
"""Rebuild local build app."""
app = self.get_app_for_request(request)
"""Rebuild local build add-on."""
addon = self.get_addon_for_request(request)
body: dict[str, Any] = await api_validate(SCHEMA_REBUILD, request)
if start_task := await asyncio.shield(
self.sys_apps.rebuild(app.slug, force=body[ATTR_FORCE])
self.sys_addons.rebuild(addon.slug, force=body[ATTR_FORCE])
):
await start_task
@api_process
async def stdin(self, request: web.Request) -> None:
"""Write to stdin of app."""
app = self.get_app_for_request(request)
if not app.with_stdin:
raise AppNotSupportedWriteStdinError(_LOGGER.error, app=app.slug)
"""Write to stdin of add-on."""
addon = self.get_addon_for_request(request)
if not addon.with_stdin:
raise AddonNotSupportedWriteStdinError(_LOGGER.error, addon=addon.slug)
data = await request.read()
await asyncio.shield(app.write_stdin(data))
await asyncio.shield(addon.write_stdin(data))
def _pretty_services(app: App) -> list[str]:
def _pretty_services(addon: Addon) -> list[str]:
"""Return a simplified services role list."""
return [f"{name}:{access}" for name, access in app.services_role.items()]
return [f"{name}:{access}" for name, access in addon.services_role.items()]

View File

@@ -12,7 +12,7 @@ from aiohttp.web_exceptions import HTTPUnauthorized
from multidict import MultiDictProxy
import voluptuous as vol
from ..addons.addon import App
from ..addons.addon import Addon
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden, AuthInvalidNonStringValueError
@@ -44,7 +44,7 @@ REALM_HEADER: dict[str, str] = {
class APIAuth(CoreSysAttributes):
"""Handle RESTful API for auth functions."""
def _process_basic(self, request: web.Request, app: App) -> Awaitable[bool]:
def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]:
"""Process login request with basic auth.
Return a coroutine.
@@ -53,12 +53,12 @@ class APIAuth(CoreSysAttributes):
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
except ValueError as err:
raise HTTPUnauthorized(headers=REALM_HEADER) from err
return self.sys_auth.check_login(app, auth.login, auth.password)
return self.sys_auth.check_login(addon, auth.login, auth.password)
def _process_dict(
self,
request: web.Request,
app: App,
addon: Addon,
data: dict[str, Any] | MultiDictProxy[str | bytes | FileField],
) -> Awaitable[bool]:
"""Process login with dict data.
@@ -76,33 +76,35 @@ class APIAuth(CoreSysAttributes):
_LOGGER.error, headers=REALM_HEADER
) from None
return self.sys_auth.check_login(app, cast(str, username), cast(str, password))
return self.sys_auth.check_login(
addon, cast(str, username), cast(str, password)
)
@api_process
async def auth(self, request: web.Request) -> bool:
"""Process login request."""
app = request[REQUEST_FROM]
addon = request[REQUEST_FROM]
if not isinstance(app, App) or not app.access_auth_api:
if not isinstance(addon, Addon) or not addon.access_auth_api:
raise APIForbidden("Can't use Home Assistant auth!")
# BasicAuth
if AUTHORIZATION in request.headers:
if not await self._process_basic(request, app):
if not await self._process_basic(request, addon):
raise HTTPUnauthorized(headers=REALM_HEADER)
return True
# Json
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
data = await request.json(loads=json_loads)
if not await self._process_dict(request, app, data):
if not await self._process_dict(request, addon, data):
raise HTTPUnauthorized()
return True
# URL encoded
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
data = await request.post()
if not await self._process_dict(request, app, data):
if not await self._process_dict(request, addon, data):
raise HTTPUnauthorized()
return True

View File

@@ -20,7 +20,6 @@ from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
from ..const import (
ATTR_ADDONS,
ATTR_APPS,
ATTR_BACKUPS,
ATTR_COMPRESSED,
ATTR_CONTENT,
@@ -61,7 +60,7 @@ from .utils import api_process, api_validate, background_task
_LOGGER: logging.Logger = logging.getLogger(__name__)
ALL_APPS_FLAG = "ALL"
ALL_ADDONS_FLAG = "ALL"
LOCATION_LOCAL = ".local"
@@ -100,20 +99,10 @@ SCHEMA_RESTORE_FULL = vol.Schema(
}
)
# V1 schemas use "addons" as the request body key (legacy API contract).
SCHEMA_RESTORE_PARTIAL_V1 = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
}
)
# V2 schemas use "apps" as the request body key.
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_APPS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
}
)
@@ -131,19 +120,11 @@ SCHEMA_BACKUP_FULL = vol.Schema(
}
)
# V1 schema uses "addons" as the request body key (legacy API contract).
SCHEMA_BACKUP_PARTIAL_V1 = SCHEMA_BACKUP_FULL.extend(
{
vol.Optional(ATTR_ADDONS): vol.Or(ALL_APPS_FLAG, vol.All([str], vol.Unique())),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
}
)
# V2 schema uses "apps" as the request body key.
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{
vol.Optional(ATTR_APPS): vol.Or(ALL_APPS_FLAG, vol.All([str], vol.Unique())),
vol.Optional(ATTR_ADDONS): vol.Or(
ALL_ADDONS_FLAG, vol.All([str], vol.Unique())
),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
}
@@ -174,8 +155,8 @@ class APIBackups(CoreSysAttributes):
for loc in backup.locations
}
def _list_backups(self) -> list[dict[str, Any]]:
"""Return list of backups using v2 field names (content["apps"])."""
def _list_backups(self):
"""Return list of backups."""
return [
{
ATTR_SLUG: backup.slug,
@@ -191,7 +172,7 @@ class APIBackups(CoreSysAttributes):
ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
ATTR_APPS: backup.app_list,
ATTR_ADDONS: backup.addon_list,
ATTR_FOLDERS: backup.folders,
},
}
@@ -199,76 +180,25 @@ class APIBackups(CoreSysAttributes):
if backup.location != LOCATION_CLOUD_BACKUP
]
@staticmethod
def _rename_apps_to_addons_in_backups(
data_backups: list[dict[str, Any]],
) -> list[dict[str, Any]]:
"""Rename the content["apps"] key to content["addons"] for v1 responses."""
for backup in data_backups:
content = backup[ATTR_CONTENT]
content[ATTR_ADDONS] = content.pop(ATTR_APPS)
return data_backups
@api_process
async def list_backups(self, request):
"""Return backup list."""
data_backups = self._list_backups()
def _backup_info_data(self, backup: Backup) -> dict[str, Any]:
"""Return backup info dict using v2 field names (top-level "apps")."""
data_apps = [
{
ATTR_SLUG: app_data[ATTR_SLUG],
ATTR_NAME: app_data[ATTR_NAME],
ATTR_VERSION: app_data[ATTR_VERSION],
ATTR_SIZE: app_data[ATTR_SIZE],
}
for app_data in backup.apps
]
return {
ATTR_SLUG: backup.slug,
ATTR_TYPE: backup.sys_type,
ATTR_NAME: backup.name,
ATTR_DATE: backup.date,
ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes,
ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_LOCATION: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_APPS: data_apps,
ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
ATTR_EXTRA: backup.extra,
}
if request.path == "/snapshots":
# Kept for backwards compability
return {"snapshots": data_backups}
return {ATTR_BACKUPS: data_backups}
@api_process
async def list_backups(self, request: web.Request) -> dict[str, Any]:
"""Return backup list (v2: content uses "apps" key)."""
return {ATTR_BACKUPS: self._list_backups()}
@api_process
async def list_backups_v1(self, request: web.Request) -> dict[str, Any]:
"""Return backup list (v1: content uses "addons" key)."""
return {
ATTR_BACKUPS: self._rename_apps_to_addons_in_backups(self._list_backups())
}
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return backup list and manager info (v2: content uses "apps" key)."""
async def info(self, request):
"""Return backup list and manager info."""
return {
ATTR_BACKUPS: self._list_backups(),
ATTR_DAYS_UNTIL_STALE: self.sys_backups.days_until_stale,
}
@api_process
async def info_v1(self, request: web.Request) -> dict[str, Any]:
"""Return backup list and manager info (v1: content uses "addons" key)."""
return {
ATTR_BACKUPS: self._rename_apps_to_addons_in_backups(self._list_backups()),
ATTR_DAYS_UNTIL_STALE: self.sys_backups.days_until_stale,
}
@api_process
async def options(self, request):
"""Set backup manager options."""
@@ -286,18 +216,41 @@ class APIBackups(CoreSysAttributes):
return True
@api_process
async def backup_info(self, request: web.Request) -> dict[str, Any]:
"""Return backup info (v2: top-level "apps" key)."""
async def backup_info(self, request):
"""Return backup info."""
backup = self._extract_slug(request)
return self._backup_info_data(backup)
@api_process
async def backup_info_v1(self, request: web.Request) -> dict[str, Any]:
"""Return backup info (v1: top-level "addons" key)."""
backup = self._extract_slug(request)
data = self._backup_info_data(backup)
data[ATTR_ADDONS] = data.pop(ATTR_APPS)
return data
data_addons = []
for addon_data in backup.addons:
data_addons.append(
{
ATTR_SLUG: addon_data[ATTR_SLUG],
ATTR_NAME: addon_data[ATTR_NAME],
ATTR_VERSION: addon_data[ATTR_VERSION],
ATTR_SIZE: addon_data[ATTR_SIZE],
}
)
return {
ATTR_SLUG: backup.slug,
ATTR_TYPE: backup.sys_type,
ATTR_NAME: backup.name,
ATTR_DATE: backup.date,
ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes,
ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_LOCATION: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
ATTR_EXTRA: backup.extra,
}
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE:
"""Convert a single location to a mount if possible."""
@@ -331,20 +284,6 @@ class APIBackups(CoreSysAttributes):
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
)
def _process_location_in_body(
self, request: web.Request, body: dict[str, Any]
) -> dict[str, Any]:
"""Validate and convert location field in partial backup/restore body."""
if ATTR_LOCATION not in body:
return body
location_names: list[str | None] = body.pop(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
locations = [self._location_to_mount(loc) for loc in location_names]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
return body
@api_process
async def backup_full(self, request: web.Request):
"""Create full backup."""
@@ -378,10 +317,27 @@ class APIBackups(CoreSysAttributes):
job_id=job_id,
)
async def _do_backup_partial(
self, body: dict[str, Any], background: bool
) -> dict[str, Any]:
"""Run backup_partial business logic. Expects body["apps"] (v2 key)."""
@api_process
async def backup_partial(self, request: web.Request):
"""Create a partial backup."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body:
location_names: list[str | None] = body.pop(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
locations = [
self._location_to_mount(location) for location in location_names
]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG:
body[ATTR_ADDONS] = list(self.sys_addons.local)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await background_task(
self, self.sys_backups.do_backup_partial, **body
)
@@ -397,34 +353,6 @@ class APIBackups(CoreSysAttributes):
job_id=job_id,
)
@api_process
async def backup_partial(self, request: web.Request):
"""Create a partial backup (v2: accepts "apps" key in request body)."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
self._process_location_in_body(request, body)
if body.get(ATTR_APPS) == ALL_APPS_FLAG:
body[ATTR_APPS] = list(self.sys_apps.local)
background = body.pop(ATTR_BACKGROUND)
return await self._do_backup_partial(body, background)
@api_process
async def backup_partial_v1(self, request: web.Request):
"""Create a partial backup (v1: accepts "addons" key in request body)."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL_V1, request)
self._process_location_in_body(request, body)
if body.get(ATTR_ADDONS) == ALL_APPS_FLAG:
body[ATTR_ADDONS] = list(self.sys_apps.local)
# Rename "addons" → "apps" so _do_backup_partial receives the v2 key
if ATTR_ADDONS in body:
body[ATTR_APPS] = body.pop(ATTR_ADDONS)
background = body.pop(ATTR_BACKGROUND)
return await self._do_backup_partial(body, background)
@api_process
async def restore_full(self, request: web.Request):
"""Full restore of a backup."""
@@ -445,10 +373,15 @@ class APIBackups(CoreSysAttributes):
job_id=job_id,
)
async def _do_restore_partial(
self, backup: Backup, body: dict[str, Any], background: bool
) -> dict[str, Any]:
"""Run restore_partial business logic. Expects body["apps"] (v2 key)."""
@api_process
async def restore_partial(self, request: web.Request):
"""Partial restore a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await background_task(
self, self.sys_backups.do_restore_partial, backup, **body
)
@@ -460,33 +393,6 @@ class APIBackups(CoreSysAttributes):
job_id=job_id,
)
@api_process
async def restore_partial(self, request: web.Request):
"""Partial restore a backup (v2: accepts "apps" key in request body)."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND)
return await self._do_restore_partial(backup, body, background)
@api_process
async def restore_partial_v1(self, request: web.Request):
"""Partial restore a backup (v1: accepts "addons" key in request body)."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL_V1, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND)
# Rename "addons" → "apps" so _do_restore_partial receives the v2 key
if ATTR_ADDONS in body:
body[ATTR_APPS] = body.pop(ATTR_ADDONS)
return await self._do_restore_partial(backup, body, background)
@api_process
async def freeze(self, request: web.Request):
"""Initiate manual freeze for external backup."""

View File

@@ -6,16 +6,16 @@ from typing import Any
from aiohttp import web
import voluptuous as vol
from ..addons.addon import App
from ..addons.addon import Addon
from ..const import (
ATTR_APP,
ATTR_ADDON,
ATTR_CONFIG,
ATTR_DISCOVERY,
ATTR_SERVICE,
ATTR_SERVICES,
ATTR_UUID,
REQUEST_FROM,
AppState,
AddonState,
)
from ..coresys import CoreSysAttributes
from ..discovery import Message
@@ -49,25 +49,25 @@ class APIDiscovery(CoreSysAttributes):
# Get available discovery
discovery = [
{
ATTR_APP: message.addon,
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
for message in self.sys_discovery.list_messages
if (
discovered := self.sys_apps.get_local_only(
discovered := self.sys_addons.get_local_only(
message.addon,
)
)
and discovered.state == AppState.STARTED
and discovered.state == AddonState.STARTED
]
# Get available services/apps
# Get available services/add-ons
services: dict[str, list[str]] = {}
for app in self.sys_apps.all:
for name in app.discovery:
services.setdefault(name, []).append(app.slug)
for addon in self.sys_addons.all:
for name in addon.discovery:
services.setdefault(name, []).append(addon.slug)
return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services}
@@ -75,22 +75,22 @@ class APIDiscovery(CoreSysAttributes):
async def set_discovery(self, request: web.Request) -> dict[str, str]:
"""Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request)
app: App = request[REQUEST_FROM]
addon: Addon = request[REQUEST_FROM]
service = body[ATTR_SERVICE]
# Access?
if body[ATTR_SERVICE] not in app.discovery:
if body[ATTR_SERVICE] not in addon.discovery:
_LOGGER.error(
"App %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the app",
app.name,
"Add-on %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the add-on",
addon.name,
service,
)
raise APIForbidden(
"Apps must list services they provide via discovery in their config!"
"Add-ons must list services they provide via discovery in their config!"
)
# Process discovery message
message = await self.sys_discovery.send(app, **body)
message = await self.sys_discovery.send(addon, **body)
return {ATTR_UUID: message.uuid}
@@ -101,7 +101,7 @@ class APIDiscovery(CoreSysAttributes):
message = self._extract_message(request)
return {
ATTR_APP: message.addon,
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
@@ -111,10 +111,10 @@ class APIDiscovery(CoreSysAttributes):
async def del_discovery(self, request: web.Request) -> None:
"""Delete data into a discovery message."""
message = self._extract_message(request)
app = request[REQUEST_FROM]
addon = request[REQUEST_FROM]
# Permission
if message.addon != app.slug:
if message.addon != addon.slug:
raise APIForbidden("Can't remove discovery message")
await self.sys_discovery.remove(message)

View File

@@ -357,8 +357,8 @@ class APIHost(CoreSysAttributes):
known_paths = await self.sys_run_in_executor(
disk.get_dir_sizes,
{
"addons_data": self.sys_config.path_apps_data,
"addons_config": self.sys_config.path_app_configs,
"addons_data": self.sys_config.path_addons_data,
"addons_config": self.sys_config.path_addon_configs,
"media": self.sys_config.path_media,
"share": self.sys_config.path_share,
"backup": self.sys_config.path_backup,

View File

@@ -1,4 +1,4 @@
"""Supervisor App ingress service."""
"""Supervisor Add-on ingress service."""
import asyncio
from ipaddress import ip_address
@@ -15,7 +15,7 @@ from aiohttp.web_exceptions import (
from multidict import CIMultiDict, istr
import voluptuous as vol
from ..addons.addon import App
from ..addons.addon import Addon
from ..const import (
ATTR_ADMIN,
ATTR_ENABLE,
@@ -75,37 +75,37 @@ def status_code_must_be_empty_body(code: int) -> bool:
class APIIngress(CoreSysAttributes):
"""Ingress view to handle app webui routing."""
"""Ingress view to handle add-on webui routing."""
def _extract_app(self, request: web.Request) -> App:
"""Return app, throw an exception it it doesn't exist."""
def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception it it doesn't exist."""
token = request.match_info["token"]
# Find correct app
app = self.sys_ingress.get(token)
if not app:
# Find correct add-on
addon = self.sys_ingress.get(token)
if not addon:
_LOGGER.warning("Ingress for %s not available", token)
raise HTTPServiceUnavailable()
return app
return addon
def _create_url(self, app: App, path: str) -> str:
def _create_url(self, addon: Addon, path: str) -> str:
"""Create URL to container."""
return f"http://{app.ip_address}:{app.ingress_port}/{path}"
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
@api_process
async def panels(self, request: web.Request) -> dict[str, Any]:
"""Create a list of panel data."""
apps = {}
for app in self.sys_ingress.apps:
apps[app.slug] = {
ATTR_TITLE: app.panel_title,
ATTR_ICON: app.panel_icon,
ATTR_ADMIN: app.panel_admin,
ATTR_ENABLE: app.ingress_panel,
addons = {}
for addon in self.sys_ingress.addons:
addons[addon.slug] = {
ATTR_TITLE: addon.panel_title,
ATTR_ICON: addon.panel_icon,
ATTR_ADMIN: addon.panel_admin,
ATTR_ENABLE: addon.ingress_panel,
}
return {ATTR_PANELS: apps}
return {ATTR_PANELS: addons}
@api_process
@require_home_assistant
@@ -149,16 +149,16 @@ class APIIngress(CoreSysAttributes):
raise HTTPUnauthorized()
# Process requests
app = self._extract_app(request)
addon = self._extract_addon(request)
path = request.match_info.get("path", "")
session_data = self.sys_ingress.get_session_data(session)
try:
# Websocket
if _is_websocket(request):
return await self._handle_websocket(request, app, path, session_data)
return await self._handle_websocket(request, addon, path, session_data)
# Request
return await self._handle_request(request, app, path, session_data)
return await self._handle_request(request, addon, path, session_data)
except aiohttp.ClientError as err:
_LOGGER.error("Ingress error: %s", err)
@@ -168,7 +168,7 @@ class APIIngress(CoreSysAttributes):
async def _handle_websocket(
self,
request: web.Request,
app: App,
addon: Addon,
path: str,
session_data: IngressSessionData | None,
) -> web.WebSocketResponse:
@@ -190,8 +190,8 @@ class APIIngress(CoreSysAttributes):
await ws_server.prepare(request)
# Preparing
url = self._create_url(app, path)
source_header = _init_header(request, app, session_data)
url = self._create_url(addon, path)
source_header = _init_header(request, addon, session_data)
# Support GET query
if request.query_string:
@@ -199,7 +199,7 @@ class APIIngress(CoreSysAttributes):
# Start proxy
try:
_LOGGER.debug("Proxing WebSocket to %s, upstream url: %s", app.slug, url)
_LOGGER.debug("Proxing WebSocket to %s, upstream url: %s", addon.slug, url)
async with self.sys_websession.ws_connect(
url,
headers=source_header,
@@ -217,28 +217,28 @@ class APIIngress(CoreSysAttributes):
return_when=asyncio.FIRST_COMPLETED,
)
except TimeoutError:
_LOGGER.warning("WebSocket proxy to %s timed out", app.slug)
_LOGGER.warning("WebSocket proxy to %s timed out", addon.slug)
return ws_server
async def _handle_request(
self,
request: web.Request,
app: App,
addon: Addon,
path: str,
session_data: IngressSessionData | None,
) -> web.Response | web.StreamResponse:
"""Ingress route for request."""
url = self._create_url(app, path)
source_header = _init_header(request, app, session_data)
url = self._create_url(addon, path)
source_header = _init_header(request, addon, session_data)
# Passing the raw stream breaks requests for some webservers
# since we just need it for POST requests really, for all other methods
# we read the bytes and pass that to the request to the app
# apps needs to add support with that in the configuration
# we read the bytes and pass that to the request to the add-on
# add-ons needs to add support with that in the configuration
data = (
request.content
if request.method == "POST" and app.ingress_stream
if request.method == "POST" and addon.ingress_stream
else await request.read()
)
@@ -318,7 +318,7 @@ class APIIngress(CoreSysAttributes):
def _init_header(
request: web.Request, app: App, session_data: IngressSessionData | None
request: web.Request, addon: Addon, session_data: IngressSessionData | None
) -> CIMultiDict[str]:
"""Create initial header."""
headers = CIMultiDict[str]()

View File

@@ -1,7 +1,6 @@
"""Handle security part of this API."""
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
import logging
import re
from typing import Final
@@ -31,13 +30,13 @@ _CORE_VERSION: Final = AwesomeVersion("2023.3.4")
# fmt: off
_V1_FRONTEND_PATHS: Final = (
_CORE_FRONTEND_PATHS: Final = (
r"|/app/.*\.(?:js|gz|json|map|woff2)"
r"|/(store/)?addons/" + RE_SLUG + r"/(logo|icon)"
)
_V2_FRONTEND_PATHS: Final = (
r"|/store/apps/" + RE_SLUG + r"/(logo|icon)"
CORE_FRONTEND: Final = re.compile(
r"^(?:" + _CORE_FRONTEND_PATHS + r")$"
)
@@ -49,6 +48,19 @@ BLACKLIST: Final = re.compile(
r")$"
)
# Free to call or have own security concepts
NO_SECURITY_CHECK: Final = re.compile(
r"^(?:"
r"|/homeassistant/api/.*"
r"|/homeassistant/websocket"
r"|/core/api/.*"
r"|/core/websocket"
r"|/supervisor/ping"
r"|/ingress/[-_A-Za-z0-9]+/.*"
+ _CORE_FRONTEND_PATHS
+ r")$"
)
# Observer allow API calls
OBSERVER_CHECK: Final = re.compile(
r"^(?:"
@@ -56,6 +68,80 @@ OBSERVER_CHECK: Final = re.compile(
r")$"
)
# Can called by every add-on
ADDONS_API_BYPASS: Final = re.compile(
r"^(?:"
r"|/addons/self/(?!security|update)[^/]+"
r"|/addons/self/options/config"
r"|/info"
r"|/services.*"
r"|/discovery.*"
r"|/auth"
r")$"
)
# Home Assistant only
CORE_ONLY_PATHS: Final = re.compile(
r"^(?:"
r"/addons/" + RE_SLUG + "/sys_options"
r")$"
)
# Policy role add-on API access
ADDONS_ROLE_ACCESS: dict[str, re.Pattern[str]] = {
ROLE_DEFAULT: re.compile(
r"^(?:"
r"|/.+/info"
r")$"
),
ROLE_HOMEASSISTANT: re.compile(
r"^(?:"
r"|/.+/info"
r"|/core/.+"
r"|/homeassistant/.+"
r")$"
),
ROLE_BACKUP: re.compile(
r"^(?:"
r"|/.+/info"
r"|/backups.*"
r")$"
),
ROLE_MANAGER: re.compile(
r"^(?:"
r"|/.+/info"
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
r"|/audio/.+"
r"|/auth/cache"
r"|/available_updates"
r"|/backups.*"
r"|/cli/.+"
r"|/core/.+"
r"|/dns/.+"
r"|/docker/.+"
r"|/jobs/.+"
r"|/hardware/.+"
r"|/hassos/.+"
r"|/homeassistant/.+"
r"|/host/.+"
r"|/mounts.*"
r"|/multicast/.+"
r"|/network/.+"
r"|/observer/.+"
r"|/os/(?!datadisk/wipe).+"
r"|/refresh_updates"
r"|/resolution/.+"
r"|/security/.+"
r"|/snapshots.*"
r"|/store.*"
r"|/supervisor/.+"
r")$"
),
ROLE_ADMIN: re.compile(
r".*"
),
}
FILTERS: Final = re.compile(
r"(?:"
@@ -76,193 +162,9 @@ FILTERS: Final = re.compile(
flags=re.IGNORECASE,
)
@dataclass(slots=True, frozen=True)
class _AppSecurityPatterns:
"""All compiled regex patterns for app API access control, per API version."""
# Paths where an installed app's token bypasses normal role checks
api_bypass: re.Pattern[str]
# Paths that only Home Assistant Core may call
core_only: re.Pattern[str]
# Per-role allowed path patterns for installed apps
role_access: dict[str, re.Pattern[str]]
# Paths serving frontend assets (checked in core_proxy middleware)
supervisor_frontend: re.Pattern[str]
# Paths that skip token validation entirely
no_security_check: re.Pattern[str]
# fmt: off
_V1_PATTERNS: Final = _AppSecurityPatterns(
api_bypass=re.compile(
r"^(?:"
r"|/addons/self/(?!security|update)[^/]+"
r"|/addons/self/options/config"
r"|/info"
r"|/services.*"
r"|/discovery.*"
r"|/auth"
r")$"
),
core_only=re.compile(
r"^(?:"
r"/addons/" + RE_SLUG + r"/sys_options"
r")$"
),
role_access={
ROLE_DEFAULT: re.compile(
r"^(?:"
r"|/.+/info"
r")$"
),
ROLE_HOMEASSISTANT: re.compile(
r"^(?:"
r"|/.+/info"
r"|/core/.+"
r"|/homeassistant/.+"
r")$"
),
ROLE_BACKUP: re.compile(
r"^(?:"
r"|/.+/info"
r"|/backups.*"
r")$"
),
ROLE_MANAGER: re.compile(
r"^(?:"
r"|/.+/info"
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
r"|/audio/.+"
r"|/auth/cache"
r"|/available_updates"
r"|/backups.*"
r"|/cli/.+"
r"|/core/.+"
r"|/dns/.+"
r"|/docker/.+"
r"|/jobs/.+"
r"|/hardware/.+"
r"|/homeassistant/.+"
r"|/host/.+"
r"|/mounts.*"
r"|/multicast/.+"
r"|/network/.+"
r"|/observer/.+"
r"|/os/(?!datadisk/wipe).+"
r"|/refresh_updates"
r"|/resolution/.+"
r"|/security/.+"
r"|/snapshots.*"
r"|/store.*"
r"|/supervisor/.+"
r")$"
),
ROLE_ADMIN: re.compile(r".*"),
},
supervisor_frontend=re.compile(r"^(?:" + _V1_FRONTEND_PATHS + r")$"),
no_security_check=re.compile(
r"^(?:"
r"|/homeassistant/api/.*"
r"|/homeassistant/websocket"
r"|/core/api/.*"
r"|/core/websocket"
r"|/supervisor/ping"
r"|/ingress/[-_A-Za-z0-9]+/.*"
+ _V1_FRONTEND_PATHS
+ r")$"
),
)
_V2_PATTERNS: Final = _AppSecurityPatterns(
# /v2 is factored out as a literal prefix — alternatives only list the
# path suffix, making v1 ↔ v2 pattern diffs easy to read.
api_bypass=re.compile(
r"^/v2(?:"
r"|/apps/self/(?!security|update)[^/]+"
r"|/apps/self/options/config"
r"|/info"
r"|/services.*"
r"|/discovery.*"
r"|/auth"
r")$"
),
core_only=re.compile(
r"^/v2(?:"
r"/apps/" + RE_SLUG + r"/sys_options"
r")$"
),
role_access={
ROLE_DEFAULT: re.compile(
r"^/v2(?:"
r"|/.+/info"
r")$"
),
ROLE_HOMEASSISTANT: re.compile(
r"^/v2(?:"
r"|/.+/info"
r"|/core/.+"
r"|/homeassistant/.+"
r")$"
),
ROLE_BACKUP: re.compile(
r"^/v2(?:"
r"|/.+/info"
r"|/backups.*"
r")$"
),
ROLE_MANAGER: re.compile(
r"^/v2(?:"
r"|/.+/info"
r"|/apps(?:/" + RE_SLUG + r"/(?!security).+)?"
r"|/audio/.+"
r"|/auth/cache"
r"|/backups.*"
r"|/cli/.+"
r"|/core/.+"
r"|/dns/.+"
r"|/docker/.+"
r"|/jobs/.+"
r"|/hardware/.+"
r"|/homeassistant/.+"
r"|/host/.+"
r"|/mounts.*"
r"|/multicast/.+"
r"|/network/.+"
r"|/observer/.+"
r"|/os/(?!datadisk/wipe).+"
r"|/reload_updates"
r"|/resolution/.+"
r"|/security/.+"
r"|/store.*"
r"|/supervisor/.+"
r")$"
),
ROLE_ADMIN: re.compile(r".*"),
},
supervisor_frontend=re.compile(r"^/v2(?:" + _V2_FRONTEND_PATHS + r")$"),
no_security_check=re.compile(
r"^/v2(?:"
r"|/ingress/[-_A-Za-z0-9]+/.*"
+ _V2_FRONTEND_PATHS
+ r")$"
),
)
# fmt: on
def _get_app_security_patterns(request: Request) -> _AppSecurityPatterns:
"""Return the correct pattern set based on the request's API version."""
if request.path.startswith("/v2/"):
return _V2_PATTERNS
return _V1_PATTERNS
class SecurityMiddleware(CoreSysAttributes):
"""Security middleware functions."""
@@ -315,7 +217,6 @@ class SecurityMiddleware(CoreSysAttributes):
"""Check security access of this layer."""
request_from: CoreSysAttributes | None = None
supervisor_token = extract_supervisor_token(request)
patterns = _get_app_security_patterns(request)
# Blacklist
if BLACKLIST.match(request.path):
@@ -323,7 +224,7 @@ class SecurityMiddleware(CoreSysAttributes):
raise HTTPForbidden()
# Ignore security check
if patterns.no_security_check.match(request.path):
if NO_SECURITY_CHECK.match(request.path):
_LOGGER.debug("Passthrough %s", request.path)
request[REQUEST_FROM] = None
return await handler(request)
@@ -337,11 +238,8 @@ class SecurityMiddleware(CoreSysAttributes):
if supervisor_token == self.sys_homeassistant.supervisor_token:
_LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant
elif patterns.core_only.match(request.path):
_LOGGER.warning(
"Attempted access to %s from client besides Home Assistant",
request.path,
)
elif CORE_ONLY_PATHS.match(request.path):
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
raise HTTPForbidden()
# Host
@@ -357,24 +255,26 @@ class SecurityMiddleware(CoreSysAttributes):
_LOGGER.debug("%s access from Observer", request.path)
request_from = self.sys_plugins.observer
# App
app = None
# Add-on
addon = None
if supervisor_token and not request_from:
app = self.sys_apps.from_token(supervisor_token)
addon = self.sys_addons.from_token(supervisor_token)
# Check App API access
if app and patterns.api_bypass.match(request.path):
_LOGGER.debug("Passthrough %s from %s", request.path, app.slug)
request_from = app
elif app and app.access_hassio_api:
# Check Add-on API access
if addon and ADDONS_API_BYPASS.match(request.path):
_LOGGER.debug("Passthrough %s from %s", request.path, addon.slug)
request_from = addon
elif addon and addon.access_hassio_api:
# Check Role
if patterns.role_access[app.hassio_role].match(request.path):
_LOGGER.info("%s access from %s", request.path, app.slug)
request_from = app
if ADDONS_ROLE_ACCESS[addon.hassio_role].match(request.path):
_LOGGER.info("%s access from %s", request.path, addon.slug)
request_from = addon
else:
_LOGGER.warning("%s no role for %s", request.path, app.slug)
elif app:
_LOGGER.warning("%s missing API permission for %s", app.slug, request.path)
_LOGGER.warning("%s no role for %s", request.path, addon.slug)
elif addon:
_LOGGER.warning(
"%s missing API permission for %s", addon.slug, request.path
)
if request_from:
request[REQUEST_FROM] = request_from
@@ -422,9 +322,8 @@ class SecurityMiddleware(CoreSysAttributes):
and content_type_index - authorization_index == 1
)
patterns = _get_app_security_patterns(request)
if (
not patterns.supervisor_frontend.match(request.path) and is_proxy_request
not CORE_FRONTEND.match(request.path) and is_proxy_request
) or ingress_request:
raise HTTPBadRequest()
return await handler(request)

View File

@@ -7,6 +7,7 @@ import logging
import aiohttp
from aiohttp import WSCloseCode, WSMessageTypeError, web
from aiohttp.client_exceptions import ClientConnectorError
from aiohttp.client_ws import ClientWebSocketResponse
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE
from aiohttp.http_websocket import WSMsgType
@@ -15,7 +16,7 @@ from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError
from ..utils.json import json_dumps
from ..utils.logging import AppLoggerAdapter
from ..utils.logging import AddonLoggerAdapter
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -29,6 +30,13 @@ FORWARD_HEADERS = (
)
HEADER_HA_ACCESS = "X-Ha-Access"
# Maximum message size for websocket messages from Home Assistant.
# Since these are coming from core we want the largest possible size
# that is not likely to cause a memory problem as most modern browsers
# support large messages.
# https://github.com/home-assistant/supervisor/issues/4392
MAX_MESSAGE_SIZE_FROM_CORE = 64 * 1024 * 1024
class APIProxy(CoreSysAttributes):
"""API Proxy for Home Assistant."""
@@ -73,13 +81,13 @@ class APIProxy(CoreSysAttributes):
else:
supervisor_token = request.headers.get(HEADER_HA_ACCESS, "")
app = self.sys_apps.from_token(supervisor_token)
if not app:
addon = self.sys_addons.from_token(supervisor_token)
if not addon:
_LOGGER.warning("Unknown Home Assistant API access!")
elif not app.access_homeassistant_api:
_LOGGER.warning("Not permitted API access: %s", app.slug)
elif not addon.access_homeassistant_api:
_LOGGER.warning("Not permitted API access: %s", addon.slug)
else:
_LOGGER.debug("%s access from %s", request.path, app.slug)
_LOGGER.debug("%s access from %s", request.path, addon.slug)
return
raise HTTPUnauthorized()
@@ -171,20 +179,63 @@ class APIProxy(CoreSysAttributes):
async def _websocket_client(self) -> ClientWebSocketResponse:
"""Initialize a WebSocket API connection."""
url = f"{self.sys_homeassistant.api_url}/api/websocket"
try:
ws_client = await self.sys_homeassistant.api.connect_websocket()
return ws_client.client
except HomeAssistantAPIError as err:
raise APIError(
f"Error connecting to Home Assistant WebSocket: {err}",
_LOGGER.error,
) from err
client = await self.sys_websession.ws_connect(
url, heartbeat=30, ssl=False, max_msg_size=MAX_MESSAGE_SIZE_FROM_CORE
)
# Handle authentication
data = await client.receive_json()
if data.get("type") == "auth_ok":
return client
if data.get("type") != "auth_required":
# Invalid protocol
raise APIError(
f"Got unexpected response from Home Assistant WebSocket: {data}",
_LOGGER.error,
)
# Auth session
await self.sys_homeassistant.api.ensure_access_token()
await client.send_json(
{
"type": "auth",
"access_token": self.sys_homeassistant.api.access_token,
},
dumps=json_dumps,
)
data = await client.receive_json()
if data.get("type") == "auth_ok":
return client
# Renew the Token is invalid
if (
data.get("type") == "invalid_auth"
and self.sys_homeassistant.refresh_token
):
self.sys_homeassistant.api.access_token = None
return await self._websocket_client()
raise HomeAssistantAuthError()
except (RuntimeError, ValueError, TypeError, ClientConnectorError) as err:
_LOGGER.error("Client error on WebSocket API %s.", err)
except HomeAssistantAuthError:
_LOGGER.error("Failed authentication to Home Assistant WebSocket")
raise APIError()
async def _proxy_message(
self,
source: web.WebSocketResponse | ClientWebSocketResponse,
target: web.WebSocketResponse | ClientWebSocketResponse,
logger: AppLoggerAdapter,
logger: AddonLoggerAdapter,
) -> None:
"""Proxy a message from client to server or vice versa."""
while not source.closed and not target.closed:
@@ -198,7 +249,7 @@ class APIProxy(CoreSysAttributes):
logger.debug(
"Received WebSocket message type %r from %s.",
msg.type,
"app" if type(source) is web.WebSocketResponse else "Core",
"add-on" if type(source) is web.WebSocketResponse else "Core",
)
await target.close()
case WSMsgType.CLOSING:
@@ -227,7 +278,7 @@ class APIProxy(CoreSysAttributes):
# init server
server = web.WebSocketResponse(heartbeat=30)
await server.prepare(request)
app_name = None
addon_name = None
# handle authentication
try:
@@ -241,9 +292,9 @@ class APIProxy(CoreSysAttributes):
supervisor_token = response.get("api_password") or response.get(
"access_token"
)
app = self.sys_apps.from_token(supervisor_token)
addon = self.sys_addons.from_token(supervisor_token)
if not app or not app.access_homeassistant_api:
if not addon or not addon.access_homeassistant_api:
_LOGGER.warning("Unauthorized WebSocket access!")
await server.send_json(
{"type": "auth_invalid", "message": "Invalid access"},
@@ -251,8 +302,8 @@ class APIProxy(CoreSysAttributes):
)
return server
app_name = app.slug
_LOGGER.info("WebSocket access from %s", app_name)
addon_name = addon.slug
_LOGGER.info("WebSocket access from %s", addon_name)
await server.send_json(
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version},
@@ -276,7 +327,7 @@ class APIProxy(CoreSysAttributes):
except APIError:
return server
logger = AppLoggerAdapter(_LOGGER, {"app_name": app_name})
logger = AddonLoggerAdapter(_LOGGER, {"addon_name": addon_name})
logger.info("Home Assistant WebSocket API proxy running")
client_task = self.sys_create_task(self._proxy_message(client, server, logger))

View File

@@ -59,8 +59,8 @@ class APIResoulution(CoreSysAttributes):
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return resolution information."""
return {
ATTR_UNSUPPORTED: sorted(self.sys_resolution.unsupported),
ATTR_UNHEALTHY: sorted(self.sys_resolution.unhealthy),
ATTR_UNSUPPORTED: self.sys_resolution.unsupported,
ATTR_UNHEALTHY: self.sys_resolution.unhealthy,
ATTR_SUGGESTIONS: [
self._generate_suggestion_information(suggestion)
for suggestion in self.sys_resolution.suggestions

View File

@@ -94,17 +94,17 @@ class APIRoot(CoreSysAttributes):
}
)
# Apps
# Add-ons
available_updates.extend(
{
ATTR_UPDATE_TYPE: "addon",
ATTR_NAME: app.name,
ATTR_ICON: f"/addons/{app.slug}/icon" if app.with_icon else None,
ATTR_PANEL_PATH: f"/update-available/{app.slug}",
ATTR_VERSION_LATEST: app.latest_version,
ATTR_NAME: addon.name,
ATTR_ICON: f"/addons/{addon.slug}/icon" if addon.with_icon else None,
ATTR_PANEL_PATH: f"/update-available/{addon.slug}",
ATTR_VERSION_LATEST: addon.latest_version,
}
for app in self.sys_apps.installed
if app.need_update
for addon in self.sys_addons.installed
if addon.need_update
)
return {ATTR_AVAILABLE_UPDATES: available_updates}

View File

@@ -48,10 +48,10 @@ class APIServices(CoreSysAttributes):
"""Write data into a service."""
service = self._extract_service(request)
body = await api_validate(service.schema, request)
app = request[REQUEST_FROM]
addon = request[REQUEST_FROM]
_check_access(request, service.slug)
await service.set_service_data(app, body)
await service.set_service_data(addon, body)
@api_process
async def get_service(self, request: web.Request) -> dict[str, Any]:
@@ -69,18 +69,18 @@ class APIServices(CoreSysAttributes):
async def del_service(self, request: web.Request) -> None:
"""Delete data into a service."""
service = self._extract_service(request)
app = request[REQUEST_FROM]
addon = request[REQUEST_FROM]
# Access
_check_access(request, service.slug, True)
await service.del_service_data(app)
await service.del_service_data(addon)
def _check_access(request, service, provide=False):
"""Raise error if the rights are wrong."""
app = request[REQUEST_FROM]
if not app.services_role.get(service):
addon = request[REQUEST_FROM]
if not addon.services_role.get(service):
raise APIForbidden(f"No access to {service} service!")
if provide and app.services_role.get(service) != PROVIDE_SERVICE:
if provide and addon.services_role.get(service) != PROVIDE_SERVICE:
raise APIForbidden(f"No access to write {service} service!")

View File

@@ -7,8 +7,8 @@ from typing import Any, cast
from aiohttp import web
import voluptuous as vol
from ..addons.addon import App
from ..addons.manager import AnyApp
from ..addons.addon import Addon
from ..addons.manager import AnyAddon
from ..addons.utils import rating_security
from ..api.const import ATTR_SIGNED
from ..api.utils import api_process, api_process_raw, api_validate
@@ -16,7 +16,6 @@ from ..const import (
ATTR_ADDONS,
ATTR_ADVANCED,
ATTR_APPARMOR,
ATTR_APPS,
ATTR_ARCH,
ATTR_AUTH_API,
ATTR_AVAILABLE,
@@ -54,9 +53,9 @@ from ..const import (
REQUEST_FROM,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden, APINotFound, StoreAppNotFoundError
from ..exceptions import APIError, APIForbidden, APINotFound, StoreAddonNotFoundError
from ..resolution.const import ContextType, SuggestionType
from ..store.addon import AppStore
from ..store.addon import AddonStore
from ..store.repository import Repository
from ..store.validate import validate_repository
from .const import ATTR_BACKGROUND, CONTENT_TYPE_PNG, CONTENT_TYPE_TEXT
@@ -101,23 +100,23 @@ def _read_static_binary_file(path: Path) -> Any:
class APIStore(CoreSysAttributes):
"""Handle RESTful API for store functions."""
def _extract_app(self, request: web.Request, installed=False) -> AnyApp:
"""Return app, throw an exception it it doesn't exist."""
app_slug: str = request.match_info["app"]
def _extract_addon(self, request: web.Request, installed=False) -> AnyAddon:
"""Return add-on, throw an exception it it doesn't exist."""
addon_slug: str = request.match_info["addon"]
if not (app := self.sys_apps.get(app_slug)):
raise StoreAppNotFoundError(app=app_slug)
if not (addon := self.sys_addons.get(addon_slug)):
raise StoreAddonNotFoundError(addon=addon_slug)
if installed and not app.is_installed:
raise APIError(f"App {app_slug} is not installed")
if installed and not addon.is_installed:
raise APIError(f"Addon {addon_slug} is not installed")
if not installed and app.is_installed:
app = cast(App, app)
if not app.app_store:
raise StoreAppNotFoundError(app=app_slug)
return app.app_store
if not installed and addon.is_installed:
addon = cast(Addon, addon)
if not addon.addon_store:
raise StoreAddonNotFoundError(addon=addon_slug)
return addon.addon_store
return app
return addon
def _extract_repository(self, request: web.Request) -> Repository:
"""Return repository, throw an exception it it doesn't exist."""
@@ -130,50 +129,52 @@ class APIStore(CoreSysAttributes):
return self.sys_store.get(repository_slug)
async def _generate_app_information(
self, app: AppStore, extended: bool = False
async def _generate_addon_information(
self, addon: AddonStore, extended: bool = False
) -> dict[str, Any]:
"""Generate app information."""
"""Generate addon information."""
installed = self.sys_apps.get_local_only(app.slug) if app.is_installed else None
installed = (
self.sys_addons.get_local_only(addon.slug) if addon.is_installed else None
)
data = {
ATTR_ADVANCED: app.advanced,
ATTR_ARCH: app.supported_arch,
ATTR_AVAILABLE: app.available,
ATTR_BUILD: app.need_build,
ATTR_DESCRIPTON: app.description,
ATTR_DOCUMENTATION: app.with_documentation,
ATTR_HOMEASSISTANT: app.homeassistant_version,
ATTR_ICON: app.with_icon,
ATTR_INSTALLED: app.is_installed,
ATTR_LOGO: app.with_logo,
ATTR_NAME: app.name,
ATTR_REPOSITORY: app.repository,
ATTR_SLUG: app.slug,
ATTR_STAGE: app.stage,
ATTR_ADVANCED: addon.advanced,
ATTR_ARCH: addon.supported_arch,
ATTR_AVAILABLE: addon.available,
ATTR_BUILD: addon.need_build,
ATTR_DESCRIPTON: addon.description,
ATTR_DOCUMENTATION: addon.with_documentation,
ATTR_HOMEASSISTANT: addon.homeassistant_version,
ATTR_ICON: addon.with_icon,
ATTR_INSTALLED: addon.is_installed,
ATTR_LOGO: addon.with_logo,
ATTR_NAME: addon.name,
ATTR_REPOSITORY: addon.repository,
ATTR_SLUG: addon.slug,
ATTR_STAGE: addon.stage,
ATTR_UPDATE_AVAILABLE: installed.need_update if installed else False,
ATTR_URL: app.url,
ATTR_VERSION_LATEST: app.latest_version,
ATTR_URL: addon.url,
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_VERSION: installed.version if installed else None,
}
if extended:
data.update(
{
ATTR_APPARMOR: app.apparmor,
ATTR_AUTH_API: app.access_auth_api,
ATTR_DETACHED: app.is_detached,
ATTR_DOCKER_API: app.access_docker_api,
ATTR_FULL_ACCESS: app.with_full_access,
ATTR_HASSIO_API: app.access_hassio_api,
ATTR_HASSIO_ROLE: app.hassio_role,
ATTR_HOMEASSISTANT_API: app.access_homeassistant_api,
ATTR_HOST_NETWORK: app.host_network,
ATTR_HOST_PID: app.host_pid,
ATTR_INGRESS: app.with_ingress,
ATTR_LONG_DESCRIPTION: await app.long_description(),
ATTR_RATING: rating_security(app),
ATTR_SIGNED: app.signed,
ATTR_APPARMOR: addon.apparmor,
ATTR_AUTH_API: addon.access_auth_api,
ATTR_DETACHED: addon.is_detached,
ATTR_DOCKER_API: addon.access_docker_api,
ATTR_FULL_ACCESS: addon.with_full_access,
ATTR_HASSIO_API: addon.access_hassio_api,
ATTR_HASSIO_ROLE: addon.hassio_role,
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
ATTR_HOST_NETWORK: addon.host_network,
ATTR_HOST_PID: addon.host_pid,
ATTR_INGRESS: addon.with_ingress,
ATTR_LONG_DESCRIPTION: await addon.long_description(),
ATTR_RATING: rating_security(addon),
ATTR_SIGNED: addon.signed,
}
)
@@ -191,27 +192,21 @@ class APIStore(CoreSysAttributes):
ATTR_MAINTAINER: repository.maintainer,
}
async def _all_store_apps_info(self) -> list[dict[str, Any]]:
"""Return gathered info for all apps in the store."""
return list(
await asyncio.gather(
*[
self._generate_app_information(self.sys_apps.store[app])
for app in self.sys_apps.store
]
)
)
@api_process
async def reload(self, request: web.Request) -> None:
"""Reload all app data from store."""
"""Reload all add-on data from store."""
await asyncio.shield(self.sys_store.reload())
@api_process
async def store_info(self, request: web.Request) -> dict[str, Any]:
"""Return store information (v2: uses "apps" key)."""
"""Return store information."""
return {
ATTR_APPS: await self._all_store_apps_info(),
ATTR_ADDONS: await asyncio.gather(
*[
self._generate_addon_information(self.sys_addons.store[addon])
for addon in self.sys_addons.store
]
),
ATTR_REPOSITORIES: [
self._generate_repository_information(repository)
for repository in self.sys_store.all
@@ -219,36 +214,27 @@ class APIStore(CoreSysAttributes):
}
@api_process
async def store_info_v1(self, request: web.Request) -> dict[str, Any]:
"""Return store information (v1: uses "addons" key)."""
async def addons_list(self, request: web.Request) -> dict[str, Any]:
"""Return all store add-ons."""
return {
ATTR_ADDONS: await self._all_store_apps_info(),
ATTR_REPOSITORIES: [
self._generate_repository_information(repository)
for repository in self.sys_store.all
],
ATTR_ADDONS: await asyncio.gather(
*[
self._generate_addon_information(self.sys_addons.store[addon])
for addon in self.sys_addons.store
]
)
}
@api_process
async def apps_list(self, request: web.Request) -> dict[str, Any]:
"""Return all store apps (v2: uses "apps" key)."""
return {ATTR_APPS: await self._all_store_apps_info()}
@api_process
async def apps_list_v1(self, request: web.Request) -> dict[str, Any]:
"""Return all store apps (v1: uses "addons" key)."""
return {ATTR_ADDONS: await self._all_store_apps_info()}
@api_process
async def apps_app_install(self, request: web.Request) -> dict[str, str] | None:
"""Install app."""
app = self._extract_app(request)
async def addons_addon_install(self, request: web.Request) -> dict[str, str] | None:
"""Install add-on."""
addon = self._extract_addon(request)
body = await api_validate(SCHEMA_INSTALL, request)
background = body[ATTR_BACKGROUND]
install_task, job_id = await background_task(
self, self.sys_apps.install, app.slug
self, self.sys_addons.install, addon.slug
)
if background and not install_task.done():
@@ -257,19 +243,19 @@ class APIStore(CoreSysAttributes):
return await install_task
@api_process
async def apps_app_update(self, request: web.Request) -> dict[str, str] | None:
"""Update app."""
app = self._extract_app(request, installed=True)
if app == request.get(REQUEST_FROM):
raise APIForbidden(f"App {app.slug} can't update itself!")
async def addons_addon_update(self, request: web.Request) -> dict[str, str] | None:
"""Update add-on."""
addon = self._extract_addon(request, installed=True)
if addon == request.get(REQUEST_FROM):
raise APIForbidden(f"Add-on {addon.slug} can't update itself!")
body = await api_validate(SCHEMA_UPDATE, request)
background = body[ATTR_BACKGROUND]
update_task, job_id = await background_task(
self,
self.sys_apps.update,
app.slug,
self.sys_addons.update,
addon.slug,
backup=body.get(ATTR_BACKUP),
)
@@ -281,71 +267,71 @@ class APIStore(CoreSysAttributes):
return None
@api_process
async def apps_app_info(self, request: web.Request) -> dict[str, Any]:
"""Return app information."""
return await self.apps_app_info_wrapped(request)
async def addons_addon_info(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information."""
return await self.addons_addon_info_wrapped(request)
# Used by legacy routing for apps/{app}/info, can be refactored out when that is removed (1/2023)
async def apps_app_info_wrapped(self, request: web.Request) -> dict[str, Any]:
"""Return app information directly (not api)."""
app = cast(AppStore, self._extract_app(request))
return await self._generate_app_information(app, True)
# Used by legacy routing for addons/{addon}/info, can be refactored out when that is removed (1/2023)
async def addons_addon_info_wrapped(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information directly (not api)."""
addon = cast(AddonStore, self._extract_addon(request))
return await self._generate_addon_information(addon, True)
@api_process_raw(CONTENT_TYPE_PNG)
async def apps_app_icon(self, request: web.Request) -> bytes:
"""Return icon from app."""
app = self._extract_app(request)
if not app.with_icon:
raise APIError(f"No icon found for app {app.slug}!")
async def addons_addon_icon(self, request: web.Request) -> bytes:
"""Return icon from add-on."""
addon = self._extract_addon(request)
if not addon.with_icon:
raise APIError(f"No icon found for add-on {addon.slug}!")
return await self.sys_run_in_executor(_read_static_binary_file, app.path_icon)
return await self.sys_run_in_executor(_read_static_binary_file, addon.path_icon)
@api_process_raw(CONTENT_TYPE_PNG)
async def apps_app_logo(self, request: web.Request) -> bytes:
"""Return logo from app."""
app = self._extract_app(request)
if not app.with_logo:
raise APIError(f"No logo found for app {app.slug}!")
async def addons_addon_logo(self, request: web.Request) -> bytes:
"""Return logo from add-on."""
addon = self._extract_addon(request)
if not addon.with_logo:
raise APIError(f"No logo found for add-on {addon.slug}!")
return await self.sys_run_in_executor(_read_static_binary_file, app.path_logo)
return await self.sys_run_in_executor(_read_static_binary_file, addon.path_logo)
@api_process_raw(CONTENT_TYPE_TEXT)
async def apps_app_changelog(self, request: web.Request) -> str:
"""Return changelog from app."""
async def addons_addon_changelog(self, request: web.Request) -> str:
"""Return changelog from add-on."""
# Frontend can't handle error response here, need to return 200 and error as text for now
try:
app = self._extract_app(request)
addon = self._extract_addon(request)
except APIError as err:
return str(err)
if not app.with_changelog:
return f"No changelog found for app {app.slug}!"
if not addon.with_changelog:
return f"No changelog found for add-on {addon.slug}!"
return await self.sys_run_in_executor(
_read_static_text_file, app.path_changelog
_read_static_text_file, addon.path_changelog
)
@api_process_raw(CONTENT_TYPE_TEXT)
async def apps_app_documentation(self, request: web.Request) -> str:
"""Return documentation from app."""
async def addons_addon_documentation(self, request: web.Request) -> str:
"""Return documentation from add-on."""
# Frontend can't handle error response here, need to return 200 and error as text for now
try:
app = self._extract_app(request)
addon = self._extract_addon(request)
except APIError as err:
return str(err)
if not app.with_documentation:
return f"No documentation found for app {app.slug}!"
if not addon.with_documentation:
return f"No documentation found for add-on {addon.slug}!"
return await self.sys_run_in_executor(
_read_static_text_file, app.path_documentation
_read_static_text_file, addon.path_documentation
)
@api_process
async def apps_app_availability(self, request: web.Request) -> None:
"""Check app availability for current system."""
app = cast(AppStore, self._extract_app(request))
app.validate_availability()
async def addons_addon_availability(self, request: web.Request) -> None:
"""Check add-on availability for current system."""
addon = cast(AddonStore, self._extract_addon(request))
addon.validate_availability()
@api_process
async def repositories_list(self, request: web.Request) -> list[dict[str, Any]]:

View File

@@ -10,7 +10,7 @@ import voluptuous as vol
from ..const import (
ATTR_ADDONS,
ATTR_APPS_REPOSITORIES,
ATTR_ADDONS_REPOSITORIES,
ATTR_ARCH,
ATTR_AUTO_UPDATE,
ATTR_BLK_READ,
@@ -22,7 +22,6 @@ from ..const import (
ATTR_DEBUG_BLOCK,
ATTR_DETECT_BLOCKING_IO,
ATTR_DIAGNOSTICS,
ATTR_FEATURE_FLAGS,
ATTR_HEALTHY,
ATTR_ICON,
ATTR_IP_ADDRESS,
@@ -42,7 +41,6 @@ from ..const import (
ATTR_VERSION,
ATTR_VERSION_LATEST,
ATTR_WAIT_BOOT,
FeatureFlag,
LogLevel,
UpdateChannel,
)
@@ -62,7 +60,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_CHANNEL): vol.Coerce(UpdateChannel),
vol.Optional(ATTR_APPS_REPOSITORIES): repositories,
vol.Optional(ATTR_ADDONS_REPOSITORIES): repositories,
vol.Optional(ATTR_TIMEZONE): str,
vol.Optional(ATTR_WAIT_BOOT): wait_boot,
vol.Optional(ATTR_LOGGING): vol.Coerce(LogLevel),
@@ -72,9 +70,6 @@ SCHEMA_OPTIONS = vol.Schema(
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_DETECT_BLOCKING_IO): vol.Coerce(DetectBlockingIO),
vol.Optional(ATTR_COUNTRY): str,
vol.Optional(ATTR_FEATURE_FLAGS): vol.Schema(
{vol.Coerce(FeatureFlag): vol.Boolean()}
),
}
)
@@ -109,26 +104,22 @@ class APISupervisor(CoreSysAttributes):
ATTR_AUTO_UPDATE: self.sys_updater.auto_update,
ATTR_DETECT_BLOCKING_IO: BlockBusterManager.is_enabled(),
ATTR_COUNTRY: self.sys_config.country,
ATTR_FEATURE_FLAGS: {
feature.value: self.sys_config.feature_flags.get(feature, False)
for feature in FeatureFlag
},
# Deprecated
# Depricated
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
ATTR_ADDONS: [
{
ATTR_NAME: app.name,
ATTR_SLUG: app.slug,
ATTR_VERSION: app.version,
ATTR_VERSION_LATEST: app.latest_version,
ATTR_UPDATE_AVAILABLE: app.need_update,
ATTR_STATE: app.state,
ATTR_REPOSITORY: app.repository,
ATTR_ICON: app.with_icon,
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_VERSION: addon.version,
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_UPDATE_AVAILABLE: addon.need_update,
ATTR_STATE: addon.state,
ATTR_REPOSITORY: addon.repository,
ATTR_ICON: addon.with_icon,
}
for app in self.sys_apps.local.values()
for addon in self.sys_addons.local.values()
],
ATTR_APPS_REPOSITORIES: [
ATTR_ADDONS_REPOSITORIES: [
{ATTR_NAME: store.name, ATTR_SLUG: store.slug}
for store in self.sys_store.all
],
@@ -191,18 +182,14 @@ class APISupervisor(CoreSysAttributes):
if ATTR_WAIT_BOOT in body:
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_FEATURE_FLAGS in body:
for feature, enabled in body[ATTR_FEATURE_FLAGS].items():
self.sys_config.set_feature_flag(feature, enabled)
# Save changes before processing apps in case of errors
# Save changes before processing addons in case of errors
await self.sys_updater.save_data()
await self.sys_config.save_data()
# Remove: 2022.9
if ATTR_APPS_REPOSITORIES in body:
if ATTR_ADDONS_REPOSITORIES in body:
await asyncio.shield(
self.sys_store.update_repositories(set(body[ATTR_APPS_REPOSITORIES]))
self.sys_store.update_repositories(set(body[ATTR_ADDONS_REPOSITORIES]))
)
await self.sys_resolution.evaluate.evaluate_system()
@@ -243,7 +230,7 @@ class APISupervisor(CoreSysAttributes):
@api_process
async def reload(self, request: web.Request) -> None:
"""Reload apps, configuration, etc."""
"""Reload add-ons, configuration, etc."""
await asyncio.gather(
asyncio.shield(self.sys_updater.reload()),
asyncio.shield(self.sys_homeassistant.secrets.reload()),

View File

@@ -3,7 +3,6 @@
import asyncio
from collections.abc import Callable, Mapping
import json
import logging
from typing import Any, cast
from aiohttp import web
@@ -32,11 +31,8 @@ from ..jobs import JobSchedulerOptions, SupervisorJob
from ..utils import check_exception_chain, get_message_from_exception_chain
from ..utils.json import json_dumps, json_loads as json_loads_util
from ..utils.log_format import format_message
from ..utils.sentry import async_capture_exception
from . import const
_LOGGER: logging.Logger = logging.getLogger(__name__)
def extract_supervisor_token(request: web.Request) -> str | None:
"""Extract Supervisor token from request."""
@@ -76,8 +72,6 @@ def api_process(method):
err, status=err.status, job_id=err.job_id, headers=err.headers
)
except HassioError as err:
_LOGGER.exception("Unexpected error during API call: %s", err)
await async_capture_exception(err)
return api_return_error(err)
if isinstance(answer, (dict, list)):
@@ -125,8 +119,6 @@ def api_process_raw(content, *, error_type=None):
job_id=err.job_id,
)
except HassioError as err:
_LOGGER.exception("Unexpected error during API call: %s", err)
await async_capture_exception(err)
return api_return_error(
err, error_type=error_type or const.CONTENT_TYPE_BINARY
)
@@ -156,7 +148,7 @@ def api_return_error(
if check_exception_chain(error, DockerAPIError):
message = format_message(message)
if not message:
message = "Unknown error, see Supervisor logs"
message = "Unknown error, see Supervisor logs (check with 'ha supervisor logs')"
match error_type:
case const.CONTENT_TYPE_TEXT:

View File

@@ -1,11 +1,11 @@
"""Manage SSO for Apps with Home Assistant user."""
"""Manage SSO for Add-ons with Home Assistant user."""
import asyncio
import hashlib
import logging
from typing import Any, TypedDict, cast
from .addons.addon import App
from .addons.addon import Addon
from .const import ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH, HomeAssistantUser
from .coresys import CoreSys, CoreSysAttributes
from .exceptions import (
@@ -34,7 +34,7 @@ class BackendAuthRequest(TypedDict):
class Auth(FileConfiguration, CoreSysAttributes):
"""Manage SSO for Apps with Home Assistant user."""
"""Manage SSO for Add-ons with Home Assistant user."""
def __init__(self, coresys: CoreSys) -> None:
"""Initialize updater."""
@@ -81,13 +81,13 @@ class Auth(FileConfiguration, CoreSysAttributes):
await self.save_data()
async def check_login(
self, app: App, username: str | None, password: str | None
self, addon: Addon, username: str | None, password: str | None
) -> bool:
"""Check username login."""
if username is None or password is None:
raise AuthInvalidNonStringValueError(_LOGGER.error)
_LOGGER.info("Auth request from '%s' for '%s'", app.slug, username)
_LOGGER.info("Auth request from '%s' for '%s'", addon.slug, username)
# Get from cache
cache_hit = self._check_cache(username, password)
@@ -99,18 +99,18 @@ class Auth(FileConfiguration, CoreSysAttributes):
# No cache hit
if cache_hit is None:
return await self._backend_login(app, username, password)
return await self._backend_login(addon, username, password)
# Home Assistant Core take over 1-2sec to validate it
# Let's use the cache and update the cache in background
if username not in self._running:
self._running[username] = self.sys_create_task(
self._backend_login(app, username, password)
self._backend_login(addon, username, password)
)
return cache_hit
async def _backend_login(self, app: App, username: str, password: str) -> bool:
async def _backend_login(self, addon: Addon, username: str, password: str) -> bool:
"""Check username login on core."""
try:
async with self.sys_homeassistant.api.make_request(
@@ -119,7 +119,7 @@ class Auth(FileConfiguration, CoreSysAttributes):
json=cast(
dict[str, Any],
BackendAuthRequest(
username=username, password=password, addon=app.slug
username=username, password=password, addon=addon.slug
),
),
) as req:

View File

@@ -3,7 +3,7 @@
import asyncio
from collections import defaultdict
from collections.abc import AsyncGenerator, Awaitable
from contextlib import asynccontextmanager, suppress
from contextlib import asynccontextmanager
from copy import deepcopy
from dataclasses import dataclass
from datetime import timedelta
@@ -28,10 +28,11 @@ from securetar import (
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..addons.manager import App
from ..addons.manager import Addon
from ..const import (
ATTR_ADDONS,
ATTR_COMPRESSED,
ATTR_CRYPTO,
ATTR_DATE,
ATTR_DOCKER,
ATTR_EXCLUDE_DATABASE,
@@ -47,12 +48,12 @@ from ..const import (
ATTR_SUPERVISOR_VERSION,
ATTR_TYPE,
ATTR_VERSION,
CRYPTO_AES128,
)
from ..coresys import CoreSys
from ..exceptions import (
AppsError,
AddonsError,
BackupError,
BackupFatalIOError,
BackupFileExistError,
BackupFileNotFoundError,
BackupInvalidError,
@@ -81,7 +82,7 @@ from .const import (
)
from .validate import SCHEMA_BACKUP
IGNORED_COMPARISON_FIELDS = {ATTR_PROTECTED, ATTR_DOCKER}
IGNORED_COMPARISON_FIELDS = {ATTR_PROTECTED, ATTR_CRYPTO, ATTR_DOCKER}
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -164,14 +165,14 @@ class Backup(JobGroup):
return self._data[ATTR_COMPRESSED]
@property
def apps(self) -> list[dict[str, Any]]:
"""Return the apps included in the backup."""
def addons(self) -> list[dict[str, Any]]:
"""Return backup date."""
return self._data[ATTR_ADDONS]
@property
def app_list(self) -> list[str]:
"""Return a list of apps slugs."""
return [app_data[ATTR_SLUG] for app_data in self.apps]
def addon_list(self) -> list[str]:
"""Return a list of add-ons slugs."""
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
@property
def folders(self) -> list[str]:
@@ -180,12 +181,12 @@ class Backup(JobGroup):
@property
def repositories(self) -> list[str]:
"""Return app store repositories."""
"""Return add-on store repositories."""
return self._data[ATTR_REPOSITORIES]
@repositories.setter
def repositories(self, value: list[str]) -> None:
"""Set app store repositories."""
"""Set add-on store repositories."""
self._data[ATTR_REPOSITORIES] = value
@property
@@ -338,6 +339,7 @@ class Backup(JobGroup):
if password:
self._password = password
self._data[ATTR_PROTECTED] = True
self._data[ATTR_CRYPTO] = CRYPTO_AES128
self._locations[self.location].protected = True
if not compressed:
@@ -506,20 +508,10 @@ class Backup(JobGroup):
try:
yield
except Exception:
self._outer_secure_tarfile = None
# Close may fail (e.g. ENOSPC writing end-of-archive
# markers), but tarfile's finally ensures the file handle
# is released regardless. The file is unlinked by the caller.
with suppress(Exception):
await self.sys_run_in_executor(outer_secure_tarfile.close)
raise
try:
finally:
await self._create_finalize(outer_secure_tarfile)
size_bytes = await self.sys_run_in_executor(_close_outer_tarfile)
self._locations[self.location].size_bytes = size_bytes
finally:
self._outer_secure_tarfile = None
@asynccontextmanager
@@ -604,49 +596,45 @@ class Backup(JobGroup):
try:
await self.sys_run_in_executor(_add_backup_json)
except OSError as err:
raise BackupFatalIOError(
f"Can't write backup metadata: {err!s}", _LOGGER.error
) from err
except json.JSONDecodeError as err:
except (OSError, json.JSONDecodeError) as err:
self.sys_jobs.current.capture_error(BackupError("Can't write backup"))
_LOGGER.error("Can't write backup: %s", err)
@Job(name="backup_addon_save", cleanup=False)
async def _app_save(self, app: App) -> asyncio.Task | None:
"""Store an app into backup."""
self.sys_jobs.current.reference = slug = app.slug
async def _addon_save(self, addon: Addon) -> asyncio.Task | None:
"""Store an add-on into backup."""
self.sys_jobs.current.reference = slug = addon.slug
if not self._outer_secure_tarfile:
raise RuntimeError(
"Cannot backup components without initializing backup tar"
)
# Ensure it is still installed and get current data before proceeding
if not (curr_app := self.sys_apps.get_local_only(slug)):
if not (curr_addon := self.sys_addons.get_local_only(slug)):
_LOGGER.warning(
"Skipping backup of app %s because it has been uninstalled",
"Skipping backup of add-on %s because it has been uninstalled",
slug,
)
return None
tar_name = f"{slug}.tar{'.gz' if self.compressed else ''}"
app_file = self._outer_secure_tarfile.create_tar(
addon_file = self._outer_secure_tarfile.create_tar(
f"./{tar_name}",
gzip=self.compressed,
)
# Take backup
try:
start_task = await curr_app.backup(app_file)
except AppsError as err:
start_task = await curr_addon.backup(addon_file)
except AddonsError as err:
raise BackupError(str(err)) from err
# Store to config
self._data[ATTR_ADDONS].append(
{
ATTR_SLUG: slug,
ATTR_NAME: curr_app.name,
ATTR_VERSION: curr_app.version,
ATTR_NAME: curr_addon.name,
ATTR_VERSION: curr_addon.version,
# Bug - addon_file.size used to give us this information
# It always returns 0 in current securetar. Skipping until fixed
ATTR_SIZE: 0,
@@ -656,67 +644,63 @@ class Backup(JobGroup):
return start_task
@Job(name="backup_store_addons", cleanup=False)
async def store_apps(self, app_list: list[App]) -> list[asyncio.Task]:
"""Add a list of apps into backup.
async def store_addons(self, addon_list: list[Addon]) -> list[asyncio.Task]:
"""Add a list of add-ons into backup.
For each app that needs to be started after backup, returns a Task which
completes when that app has state 'started' (see app.start).
For each addon that needs to be started after backup, returns a Task which
completes when that addon has state 'started' (see addon.start).
"""
# Save Apps sequential avoid issue on slow IO
# Save Add-ons sequential avoid issue on slow IO
start_tasks: list[asyncio.Task] = []
for app in app_list:
for addon in addon_list:
try:
if start_task := await self._app_save(app):
if start_task := await self._addon_save(addon):
start_tasks.append(start_task)
except BackupFatalIOError:
raise
except BackupError as err:
self.sys_jobs.current.capture_error(err)
return start_tasks
@Job(name="backup_addon_restore", cleanup=False)
async def _app_restore(self, app_slug: str) -> asyncio.Task | None:
"""Restore an app from backup."""
self.sys_jobs.current.reference = app_slug
async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None:
"""Restore an add-on from backup."""
self.sys_jobs.current.reference = addon_slug
if not self._tmp:
raise RuntimeError("Cannot restore components without opening backup tar")
tar_name = f"{app_slug}.tar{'.gz' if self.compressed else ''}"
tar_path = Path(self._tmp.name, tar_name)
# Verify the backup exists before trying to restore it
if not await self.sys_run_in_executor(tar_path.exists):
raise BackupError(f"Can't find backup {app_slug}", _LOGGER.error)
app_file = SecureTarFile(
tar_path,
tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}"
addon_file = SecureTarFile(
Path(self._tmp.name, tar_name),
gzip=self.compressed,
bufsize=BUF_SIZE,
password=self._password,
)
# If exists inside backup
if not await self.sys_run_in_executor(addon_file.path.exists):
raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error)
# Perform a restore
try:
return await self.sys_apps.restore(app_slug, app_file)
except AppsError as err:
return await self.sys_addons.restore(addon_slug, addon_file)
except AddonsError as err:
raise BackupError(
f"Can't restore backup {app_slug}", _LOGGER.error
f"Can't restore backup {addon_slug}", _LOGGER.error
) from err
@Job(name="backup_restore_addons", cleanup=False)
async def restore_apps(
self, app_list: list[str]
async def restore_addons(
self, addon_list: list[str]
) -> tuple[bool, list[asyncio.Task]]:
"""Restore a list app from backup."""
# Save Apps sequential avoid issue on slow IO
"""Restore a list add-on from backup."""
# Save Add-ons sequential avoid issue on slow IO
start_tasks: list[asyncio.Task] = []
success = True
for slug in app_list:
for slug in addon_list:
try:
start_task = await self._app_restore(slug)
start_task = await self._addon_restore(slug)
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't restore app %s: %s", slug, err)
_LOGGER.warning("Can't restore Add-on %s: %s", slug, err)
success = False
else:
if start_task:
@@ -725,20 +709,20 @@ class Backup(JobGroup):
return (success, start_tasks)
@Job(name="backup_remove_delta_addons", cleanup=False)
async def remove_delta_apps(self) -> bool:
"""Remove apps which are not in this backup."""
async def remove_delta_addons(self) -> bool:
"""Remove addons which are not in this backup."""
success = True
for app in self.sys_apps.installed:
if app.slug in self.app_list:
for addon in self.sys_addons.installed:
if addon.slug in self.addon_list:
continue
# Remove App because it's not a part of the new env
# Remove Add-on because it's not a part of the new env
# Do it sequential avoid issue on slow IO
try:
await self.sys_apps.uninstall(app.slug)
except AppsError as err:
await self.sys_addons.uninstall(addon.slug)
except AddonsError as err:
self.sys_jobs.current.capture_error(err)
_LOGGER.warning("Can't uninstall app %s: %s", app.slug, err)
_LOGGER.warning("Can't uninstall Add-on %s: %s", addon.slug, err)
success = False
return success
@@ -799,12 +783,8 @@ class Backup(JobGroup):
try:
if await self.sys_run_in_executor(_save):
self._data[ATTR_FOLDERS].append(name)
except OSError as err:
raise BackupFatalIOError(
f"Can't write tarfile: {err!s}", _LOGGER.error
) from err
except (tarfile.TarError, AddFileError) as err:
raise BackupError(f"Can't write tarfile: {err!s}") from err
except (tarfile.TarError, OSError, AddFileError) as err:
raise BackupError(f"Can't write tarfile: {str(err)}") from err
@Job(name="backup_store_folders", cleanup=False)
async def store_folders(self, folder_list: list[str]):
@@ -813,8 +793,6 @@ class Backup(JobGroup):
for folder in folder_list:
try:
await self._folder_save(folder)
except BackupFatalIOError:
raise
except BackupError as err:
err = BackupError(
f"Can't backup folder {folder}: {str(err)}", _LOGGER.error
@@ -1037,11 +1015,7 @@ class Backup(JobGroup):
try:
await self.sys_run_in_executor(_save)
except OSError as err:
raise BackupFatalIOError(
f"Can't write supervisor config tarfile: {err!s}", _LOGGER.error
) from err
except tarfile.TarError as err:
except (tarfile.TarError, OSError) as err:
raise BackupError(
f"Can't write supervisor config tarfile: {err!s}"
) from err

View File

@@ -10,7 +10,7 @@ from pathlib import Path
from shutil import copy
from typing import cast
from ..addons.addon import App
from ..addons.addon import Addon
from ..const import (
ATTR_DAYS_UNTIL_STALE,
FILE_HASSIO_BACKUPS,
@@ -502,7 +502,7 @@ class BackupManager(FileConfiguration, JobGroup):
async def _do_backup(
self,
backup: Backup,
app_list: list[App],
addon_list: list[Addon],
folder_list: list[str],
homeassistant: bool,
homeassistant_exclude_database: bool | None,
@@ -513,15 +513,11 @@ class BackupManager(FileConfiguration, JobGroup):
Must be called from an existing backup job. If the backup failed, the
backup file is being deleted and None is returned.
"""
app_start_tasks: list[Awaitable[None]] | None = None
addon_start_tasks: list[Awaitable[None]] | None = None
try:
await self.sys_core.set_state(CoreState.FREEZE)
# Any exception leaving create() means the backup is incomplete
# and will be discarded (file unlinked below). Individual
# app/folder errors are captured inside store_addons/
# store_folders and do not propagate.
async with backup.create():
# HomeAssistant Folder is for v1
if homeassistant:
@@ -532,10 +528,10 @@ class BackupManager(FileConfiguration, JobGroup):
else homeassistant_exclude_database
)
# Backup apps
if app_list:
# Backup add-ons
if addon_list:
self._change_stage(BackupJobStage.ADDONS, backup)
app_start_tasks = await backup.store_apps(app_list)
addon_start_tasks = await backup.store_addons(addon_list)
# Backup folders
if folder_list:
@@ -568,10 +564,10 @@ class BackupManager(FileConfiguration, JobGroup):
self._change_stage(BackupJobStage.COPY_ADDITONAL_LOCATIONS, backup)
await self._copy_to_additional_locations(backup, additional_locations)
if app_start_tasks:
if addon_start_tasks:
self._change_stage(BackupJobStage.AWAIT_ADDON_RESTARTS, backup)
# Ignore exceptions from waiting for app startup, app errors handled elsewhere
await asyncio.gather(*app_start_tasks, return_exceptions=True)
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
return backup
finally:
@@ -619,7 +615,7 @@ class BackupManager(FileConfiguration, JobGroup):
_LOGGER.info("Creating new full backup with slug %s", new_backup.slug)
backup = await self._do_backup(
new_backup,
self.sys_apps.installed,
self.sys_addons.installed,
ALL_FOLDERS,
True,
homeassistant_exclude_database,
@@ -641,7 +637,7 @@ class BackupManager(FileConfiguration, JobGroup):
name: str = "",
filename: str | None = None,
*,
apps: list[str] | None = None,
addons: list[str] | None = None,
folders: list[str] | None = None,
password: str | None = None,
homeassistant: bool = False,
@@ -663,7 +659,7 @@ class BackupManager(FileConfiguration, JobGroup):
self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_partial"
)
apps = apps or []
addons = addons or []
folders = folders or []
# HomeAssistant Folder is for v1
@@ -671,7 +667,7 @@ class BackupManager(FileConfiguration, JobGroup):
folders.remove(FOLDER_HOMEASSISTANT)
homeassistant = True
if len(apps) == 0 and len(folders) == 0 and not homeassistant:
if len(addons) == 0 and len(folders) == 0 and not homeassistant:
_LOGGER.error("Nothing to create backup for")
new_backup = self._create_backup(
@@ -679,13 +675,13 @@ class BackupManager(FileConfiguration, JobGroup):
)
_LOGGER.info("Creating new partial backup with slug %s", new_backup.slug)
app_list = []
for app_slug in apps:
app = self.sys_apps.get(app_slug)
if app and app.is_installed:
app_list.append(cast(App, app))
addon_list = []
for addon_slug in addons:
addon = self.sys_addons.get(addon_slug)
if addon and addon.is_installed:
addon_list.append(cast(Addon, addon))
continue
_LOGGER.warning("App %s not found/installed", app_slug)
_LOGGER.warning("Add-on %s not found/installed", addon_slug)
# If being run in the background, notify caller that validation has completed
if validation_complete:
@@ -693,7 +689,7 @@ class BackupManager(FileConfiguration, JobGroup):
backup = await self._do_backup(
new_backup,
app_list,
addon_list,
folders,
homeassistant,
homeassistant_exclude_database,
@@ -706,7 +702,7 @@ class BackupManager(FileConfiguration, JobGroup):
async def _do_restore(
self,
backup: Backup,
app_list: list[str],
addon_list: list[str],
folder_list: list[str],
homeassistant: bool,
replace: bool,
@@ -716,7 +712,7 @@ class BackupManager(FileConfiguration, JobGroup):
Must be called from an existing restore job.
"""
app_start_tasks: list[Awaitable[None]] | None = None
addon_start_tasks: list[Awaitable[None]] | None = None
success = True
try:
@@ -732,18 +728,18 @@ class BackupManager(FileConfiguration, JobGroup):
self._change_stage(RestoreJobStage.HOME_ASSISTANT, backup)
task_hass = await backup.restore_homeassistant()
# Delete delta apps
# Delete delta add-ons
if replace:
self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup)
success = success and await backup.remove_delta_apps()
success = success and await backup.remove_delta_addons()
if app_list:
if addon_list:
self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup)
await backup.restore_repositories(replace)
self._change_stage(RestoreJobStage.ADDONS, backup)
restore_success, app_start_tasks = await backup.restore_apps(
app_list
restore_success, addon_start_tasks = await backup.restore_addons(
addon_list
)
success = success and restore_success
@@ -767,10 +763,12 @@ class BackupManager(FileConfiguration, JobGroup):
f"Restore {backup.slug} error, see supervisor logs"
) from err
else:
if app_start_tasks:
if addon_start_tasks:
self._change_stage(RestoreJobStage.AWAIT_ADDON_RESTARTS, backup)
# Failure to resume apps post restore is still a restore failure
if any(await asyncio.gather(*app_start_tasks, return_exceptions=True)):
# Failure to resume addons post restore is still a restore failure
if any(
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
):
return False
# Wait for mount activations (failures don't affect restore success
@@ -869,12 +867,12 @@ class BackupManager(FileConfiguration, JobGroup):
await self.sys_core.set_state(CoreState.FREEZE)
try:
# Stop Home-Assistant / Apps
# Stop Home-Assistant / Add-ons
await self.sys_core.shutdown(remove_homeassistant_container=True)
success = await self._do_restore(
backup,
backup.app_list,
backup.addon_list,
backup.folders,
homeassistant=True,
replace=True,
@@ -905,7 +903,7 @@ class BackupManager(FileConfiguration, JobGroup):
backup: Backup,
*,
homeassistant: bool = False,
apps: list[str] | None = None,
addons: list[str] | None = None,
folders: list[str] | None = None,
password: str | None = None,
location: str | None | type[DEFAULT] = DEFAULT,
@@ -915,7 +913,7 @@ class BackupManager(FileConfiguration, JobGroup):
# Add backup ID to job
self.sys_jobs.current.reference = backup.slug
app_list = apps or []
addon_list = addons or []
folder_list = folders or []
# Version 1
@@ -947,7 +945,7 @@ class BackupManager(FileConfiguration, JobGroup):
try:
success = await self._do_restore(
backup,
app_list,
addon_list,
folder_list,
homeassistant=homeassistant,
replace=False,
@@ -970,27 +968,27 @@ class BackupManager(FileConfiguration, JobGroup):
"""Freeze system to prepare for an external backup such as an image snapshot."""
await self.sys_core.set_state(CoreState.FREEZE)
# Determine running apps
installed = self.sys_apps.installed.copy()
# Determine running addons
installed = self.sys_addons.installed.copy()
is_running: list[bool] = await asyncio.gather(
*[app.is_running() for app in installed]
*[addon.is_running() for addon in installed]
)
running_apps = [
running_addons = [
installed[ind] for ind in range(len(installed)) if is_running[ind]
]
# Create thaw task first to ensure we eventually undo freezes even if the below fails
self._thaw_task = asyncio.shield(
self.sys_create_task(self._thaw_all(running_apps, timeout))
self.sys_create_task(self._thaw_all(running_addons, timeout))
)
# Tell Home Assistant to freeze for a backup
self._change_stage(BackupJobStage.HOME_ASSISTANT)
await self.sys_homeassistant.begin_backup()
# Run all pre-backup tasks for apps
# Run all pre-backup tasks for addons
self._change_stage(BackupJobStage.ADDONS)
await asyncio.gather(*[app.begin_backup() for app in running_apps])
await asyncio.gather(*[addon.begin_backup() for addon in running_addons])
@Job(
name="backup_manager_thaw_all",
@@ -998,7 +996,7 @@ class BackupManager(FileConfiguration, JobGroup):
on_condition=BackupJobError,
)
async def _thaw_all(
self, running_apps: list[App], timeout: float = DEFAULT_FREEZE_TIMEOUT
self, running_addons: list[Addon], timeout: float = DEFAULT_FREEZE_TIMEOUT
) -> None:
"""Thaw system after user signal or timeout."""
try:
@@ -1013,10 +1011,10 @@ class BackupManager(FileConfiguration, JobGroup):
await self.sys_homeassistant.end_backup()
self._change_stage(BackupJobStage.ADDONS)
app_start_tasks: list[asyncio.Task] = [
addon_start_tasks: list[asyncio.Task] = [
task
for task in await asyncio.gather(
*[app.end_backup() for app in running_apps]
*[addon.end_backup() for addon in running_addons]
)
if task
]
@@ -1025,9 +1023,9 @@ class BackupManager(FileConfiguration, JobGroup):
self._thaw_event.clear()
self._thaw_task = None
if app_start_tasks:
if addon_start_tasks:
self._change_stage(BackupJobStage.AWAIT_ADDON_RESTARTS)
await asyncio.gather(*app_start_tasks, return_exceptions=True)
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
@Job(
name="backup_manager_signal_thaw",

View File

@@ -1,4 +1,4 @@
"""Util app functions."""
"""Util add-on functions."""
import hashlib
import re

View File

@@ -11,6 +11,7 @@ from ..backups.const import BackupType
from ..const import (
ATTR_ADDONS,
ATTR_COMPRESSED,
ATTR_CRYPTO,
ATTR_DATE,
ATTR_DAYS_UNTIL_STALE,
ATTR_EXCLUDE_DATABASE,
@@ -25,6 +26,7 @@ from ..const import (
ATTR_SUPERVISOR_VERSION,
ATTR_TYPE,
ATTR_VERSION,
CRYPTO_AES128,
FOLDER_ADDONS,
FOLDER_HOMEASSISTANT,
FOLDER_MEDIA,
@@ -42,13 +44,13 @@ ALL_FOLDERS = [
]
def unique_apps(apps_list):
"""Validate that an app is unique."""
single = {app[ATTR_SLUG] for app in apps_list}
def unique_addons(addons_list):
"""Validate that an add-on is unique."""
single = {addon[ATTR_SLUG] for addon in addons_list}
if len(single) != len(apps_list):
raise vol.Invalid("Invalid app list in backup!") from None
return apps_list
if len(single) != len(addons_list):
raise vol.Invalid("Invalid addon list in backup!") from None
return addons_list
def v1_homeassistant(
@@ -95,7 +97,7 @@ SCHEMA_BACKUP = vol.Schema(
vol.Optional(ATTR_PROTECTED, default=False): vol.All(
v1_protected, vol.Boolean()
),
vol.Remove("crypto"): vol.Maybe("aes128"),
vol.Optional(ATTR_CRYPTO, default=None): vol.Maybe(CRYPTO_AES128),
vol.Optional(ATTR_HOMEASSISTANT, default=None): vol.All(
v1_homeassistant,
vol.Maybe(
@@ -126,7 +128,7 @@ SCHEMA_BACKUP = vol.Schema(
extra=vol.REMOVE_EXTRA,
)
],
unique_apps,
unique_addons,
),
vol.Optional(ATTR_REPOSITORIES, default=list): repositories,
vol.Optional(ATTR_EXTRA, default=dict): dict,

View File

@@ -7,12 +7,11 @@ from importlib import import_module
import logging
import os
import signal
import threading
import warnings
from colorlog import ColoredFormatter
from .addons.manager import AppManager
from .addons.manager import AddonManager
from .api import RestAPI
from .arch import CpuArchManager
from .auth import Auth
@@ -78,7 +77,7 @@ async def initialize_coresys() -> CoreSys:
coresys.api = RestAPI(coresys)
coresys.supervisor = Supervisor(coresys)
coresys.homeassistant = await HomeAssistant(coresys).load_config()
coresys.apps = await AppManager(coresys).load_config()
coresys.addons = await AddonManager(coresys).load_config()
coresys.backups = await BackupManager(coresys).load_config()
coresys.host = await HostManager(coresys).post_init()
coresys.hardware = await HardwareManager.create(coresys)
@@ -130,26 +129,26 @@ def initialize_system(coresys: CoreSys) -> None:
_LOGGER.debug("Creating Supervisor SSL/TLS folder at '%s'", config.path_ssl)
config.path_ssl.mkdir()
# Supervisor app data folder
if not config.path_apps_data.is_dir():
# Supervisor addon data folder
if not config.path_addons_data.is_dir():
_LOGGER.debug(
"Creating Supervisor app data folder at '%s'", config.path_apps_data
"Creating Supervisor Add-on data folder at '%s'", config.path_addons_data
)
config.path_apps_data.mkdir(parents=True)
config.path_addons_data.mkdir(parents=True)
if not config.path_apps_local.is_dir():
if not config.path_addons_local.is_dir():
_LOGGER.debug(
"Creating Supervisor app local repository folder at '%s'",
config.path_apps_local,
"Creating Supervisor Add-on local repository folder at '%s'",
config.path_addons_local,
)
config.path_apps_local.mkdir(parents=True)
config.path_addons_local.mkdir(parents=True)
if not config.path_apps_git.is_dir():
if not config.path_addons_git.is_dir():
_LOGGER.debug(
"Creating Supervisor app git repositories folder at '%s'",
config.path_apps_git,
"Creating Supervisor Add-on git repositories folder at '%s'",
config.path_addons_git,
)
config.path_apps_git.mkdir(parents=True)
config.path_addons_git.mkdir(parents=True)
# Supervisor tmp folder
if not config.path_tmp.is_dir():
@@ -219,13 +218,13 @@ def initialize_system(coresys: CoreSys) -> None:
)
config.path_emergency.mkdir()
# App Configs folder
if not config.path_app_configs.is_dir():
# Addon Configs folder
if not config.path_addon_configs.is_dir():
_LOGGER.debug(
"Creating Supervisor app configs folder at '%s'",
config.path_app_configs,
"Creating Supervisor add-on configs folder at '%s'",
config.path_addon_configs,
)
config.path_app_configs.mkdir()
config.path_addon_configs.mkdir()
if not config.path_cid_files.is_dir():
_LOGGER.debug("Creating Docker cidfiles folder at '%s'", config.path_cid_files)
@@ -236,11 +235,6 @@ def warning_handler(message, category, filename, lineno, file=None, line=None):
"""Warning handler which logs warnings using the logging module."""
_LOGGER.warning("%s:%s: %s: %s", filename, lineno, category.__name__, message)
if isinstance(message, Exception):
# Don't capture warnings originating from Sentry SDK threads to
# avoid a feedback loop: sending an event can trigger urllib3
# warnings which would be captured and sent as new events.
if threading.current_thread().name.startswith("sentry-sdk."):
return
capture_exception(message)

View File

@@ -9,13 +9,12 @@ from pathlib import Path, PurePath
from awesomeversion import AwesomeVersion
from .const import (
ATTR_APPS_CUSTOM_LIST,
ATTR_ADDONS_CUSTOM_LIST,
ATTR_COUNTRY,
ATTR_DEBUG,
ATTR_DEBUG_BLOCK,
ATTR_DETECT_BLOCKING_IO,
ATTR_DIAGNOSTICS,
ATTR_FEATURE_FLAGS,
ATTR_IMAGE,
ATTR_LAST_BOOT,
ATTR_LOGGING,
@@ -25,7 +24,6 @@ from .const import (
ENV_SUPERVISOR_SHARE,
FILE_HASSIO_CONFIG,
SUPERVISOR_DATA,
FeatureFlag,
LogLevel,
)
from .utils.common import FileConfiguration
@@ -197,17 +195,6 @@ class CoreConfig(FileConfiguration):
lvl = getattr(logging, self.logging.value.upper())
logging.getLogger("supervisor").setLevel(lvl)
@property
def feature_flags(self) -> dict[FeatureFlag, bool]:
"""Return current state of explicitly configured experimental feature flags."""
return self._data.get(ATTR_FEATURE_FLAGS, {})
def set_feature_flag(self, feature: FeatureFlag, enabled: bool) -> None:
"""Enable or disable an experimental feature flag."""
if ATTR_FEATURE_FLAGS not in self._data:
self._data[ATTR_FEATURE_FLAGS] = {}
self._data[ATTR_FEATURE_FLAGS][feature] = enabled
@property
def last_boot(self) -> datetime:
"""Return last boot datetime."""
@@ -254,43 +241,43 @@ class CoreConfig(FileConfiguration):
return self.path_supervisor / HASSIO_SSL
@property
def path_apps_core(self) -> Path:
"""Return git path for core Apps."""
def path_addons_core(self) -> Path:
"""Return git path for core Add-ons."""
return self.path_supervisor / ADDONS_CORE
@property
def path_apps_git(self) -> Path:
"""Return path for Git App."""
def path_addons_git(self) -> Path:
"""Return path for Git Add-on."""
return self.path_supervisor / ADDONS_GIT
@property
def path_apps_local(self) -> Path:
"""Return path for custom Apps."""
def path_addons_local(self) -> Path:
"""Return path for custom Add-ons."""
return self.path_supervisor / ADDONS_LOCAL
@property
def path_extern_apps_local(self) -> PurePath:
"""Return path for custom Apps."""
def path_extern_addons_local(self) -> PurePath:
"""Return path for custom Add-ons."""
return PurePath(self.path_extern_supervisor, ADDONS_LOCAL)
@property
def path_apps_data(self) -> Path:
"""Return root App data folder."""
def path_addons_data(self) -> Path:
"""Return root Add-on data folder."""
return self.path_supervisor / ADDONS_DATA
@property
def path_extern_apps_data(self) -> PurePath:
"""Return root app data folder external for Docker."""
def path_extern_addons_data(self) -> PurePath:
"""Return root add-on data folder external for Docker."""
return PurePath(self.path_extern_supervisor, ADDONS_DATA)
@property
def path_app_configs(self) -> Path:
"""Return root App configs folder."""
def path_addon_configs(self) -> Path:
"""Return root Add-on configs folder."""
return self.path_supervisor / ADDON_CONFIGS
@property
def path_extern_app_configs(self) -> PurePath:
"""Return root App configs folder external for Docker."""
def path_extern_addon_configs(self) -> PurePath:
"""Return root Add-on configs folder external for Docker."""
return PurePath(self.path_extern_supervisor, ADDON_CONFIGS)
@property
@@ -424,23 +411,23 @@ class CoreConfig(FileConfiguration):
return PurePath(self.path_extern_supervisor, CID_FILES)
@property
def apps_repositories(self) -> list[str]:
"""Return list of custom App repositories."""
return self._data[ATTR_APPS_CUSTOM_LIST]
def addons_repositories(self) -> list[str]:
"""Return list of custom Add-on repositories."""
return self._data[ATTR_ADDONS_CUSTOM_LIST]
def add_app_repository(self, repo: str) -> None:
def add_addon_repository(self, repo: str) -> None:
"""Add a custom repository to list."""
if repo in self._data[ATTR_APPS_CUSTOM_LIST]:
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
return
self._data[ATTR_APPS_CUSTOM_LIST].append(repo)
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
def drop_app_repository(self, repo: str) -> None:
def drop_addon_repository(self, repo: str) -> None:
"""Remove a custom repository from list."""
if repo not in self._data[ATTR_APPS_CUSTOM_LIST]:
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
return
self._data[ATTR_APPS_CUSTOM_LIST].remove(repo)
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
def local_to_extern_path(self, path: PurePath) -> PurePath:
"""Translate a path relative to supervisor data in the container to its extern path."""

View File

@@ -39,10 +39,9 @@ FILE_HASSIO_SECURITY = Path(SUPERVISOR_DATA, "security.json")
FILE_SUFFIX_CONFIGURATION = [".yaml", ".yml", ".json"]
MACHINE_ID = Path("/etc/machine-id")
RUN_SUPERVISOR_STATE = Path("/run/supervisor")
SOCKET_CORE = Path("/run/os/core.sock")
SOCKET_DBUS = Path("/run/dbus/system_bus_socket")
SOCKET_DOCKER = Path("/run/docker.sock")
RUN_SUPERVISOR_STATE = Path("/run/supervisor")
SYSTEMD_JOURNAL_PERSISTENT = Path("/var/log/journal")
SYSTEMD_JOURNAL_VOLATILE = Path("/run/log/journal")
@@ -66,15 +65,11 @@ DOCKER_CPU_RUNTIME_ALLOCATION = int(DOCKER_CPU_RUNTIME_TOTAL / 5)
DNS_SUFFIX = "local.hass.io"
LABEL_ARCH = "io.hass.arch"
LABEL_DESCRIPTION = "io.hass.description"
LABEL_MACHINE = "io.hass.machine"
LABEL_NAME = "io.hass.name"
LABEL_TYPE = "io.hass.type"
LABEL_URL = "io.hass.url"
LABEL_VERSION = "io.hass.version"
META_ADDON = "addon" # legacy label for app
META_APP = "app"
META_ADDON = "addon"
META_HOMEASSISTANT = "homeassistant"
META_SUPERVISOR = "supervisor"
@@ -107,11 +102,10 @@ ATTR_ACCESS_TOKEN = "access_token"
ATTR_ACCESSPOINTS = "accesspoints"
ATTR_ACTIVE = "active"
ATTR_ACTIVITY_LED = "activity_led"
ATTR_ADDON = "addon"
ATTR_ADDONS = "addons"
ATTR_APP = "addon"
ATTR_APPS = "apps"
ATTR_APPS_CUSTOM_LIST = "addons_custom_list"
ATTR_APPS_REPOSITORIES = "addons_repositories"
ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list"
ATTR_ADDONS_REPOSITORIES = "addons_repositories"
ATTR_ADDR_GEN_MODE = "addr_gen_mode"
ATTR_ADDRESS = "address"
ATTR_ADDRESS_DATA = "address-data"
@@ -159,6 +153,7 @@ ATTR_CONTENT_TRUST = "content_trust"
ATTR_COUNTRY = "country"
ATTR_CPE = "cpe"
ATTR_CPU_PERCENT = "cpu_percent"
ATTR_CRYPTO = "crypto"
ATTR_DATA = "data"
ATTR_DATE = "date"
ATTR_DAYS_UNTIL_STALE = "days_until_stale"
@@ -194,7 +189,6 @@ ATTR_ENVIRONMENT = "environment"
ATTR_EVENT = "event"
ATTR_EXCLUDE_DATABASE = "exclude_database"
ATTR_EXTRA = "extra"
ATTR_FEATURE_FLAGS = "feature_flags"
ATTR_FEATURES = "features"
ATTR_FIELDS = "fields"
ATTR_FILENAME = "filename"
@@ -418,6 +412,8 @@ FOLDER_ADDONS = "addons/local"
FOLDER_SSL = "ssl"
FOLDER_MEDIA = "media"
CRYPTO_AES128 = "aes128"
SECURITY_PROFILE = "profile"
SECURITY_DEFAULT = "default"
SECURITY_DISABLE = "disable"
@@ -436,16 +432,16 @@ OBSERVER_PORT = 4357
DEFAULT_CHUNK_SIZE = 2**16 # 64KiB
class AppBootConfig(StrEnum):
"""Boot mode config for the app."""
class AddonBootConfig(StrEnum):
"""Boot mode config for the add-on."""
AUTO = "auto"
MANUAL = "manual"
MANUAL_ONLY = "manual_only"
class AppBoot(StrEnum):
"""Boot mode for the app."""
class AddonBoot(StrEnum):
"""Boot mode for the add-on."""
AUTO = "auto"
MANUAL = "manual"
@@ -453,15 +449,15 @@ class AppBoot(StrEnum):
@classmethod
def _missing_(cls, value: object) -> Self | None:
"""Convert 'forced' config values to their counterpart."""
if value == AppBootConfig.MANUAL_ONLY:
if value == AddonBootConfig.MANUAL_ONLY:
for member in cls:
if member == AppBoot.MANUAL:
if member == AddonBoot.MANUAL:
return member
return None
class AppStartup(StrEnum):
"""Startup types of App."""
class AddonStartup(StrEnum):
"""Startup types of Add-on."""
INITIALIZE = "initialize"
SYSTEM = "system"
@@ -470,16 +466,16 @@ class AppStartup(StrEnum):
ONCE = "once"
class AppStage(StrEnum):
"""Stage types of app."""
class AddonStage(StrEnum):
"""Stage types of add-on."""
STABLE = "stable"
EXPERIMENTAL = "experimental"
DEPRECATED = "deprecated"
class AppState(StrEnum):
"""State of app."""
class AddonState(StrEnum):
"""State of add-on."""
STARTUP = "startup"
STARTED = "started"
@@ -551,13 +547,6 @@ class CpuArch(StrEnum):
AMD64 = "amd64"
class FeatureFlag(StrEnum):
"""Development features that can be toggled."""
SUPERVISOR_V2_API = "supervisor_v2_api"
UNIX_SOCKET_CORE_API = "unix_socket_core_api"
@dataclass
class HomeAssistantUser:
"""A Home Assistant Core user.

View File

@@ -11,12 +11,12 @@ from .const import (
ATTR_STARTUP,
RUN_SUPERVISOR_STATE,
STARTING_STATES,
AppStartup,
AddonStartup,
BusEvent,
CoreState,
)
from .coresys import CoreSys, CoreSysAttributes
from .dbus.const import StopUnitMode, UnitActiveState
from .dbus.const import StopUnitMode
from .exceptions import (
HassioError,
HomeAssistantCrashError,
@@ -169,8 +169,8 @@ class Core(CoreSysAttributes):
self.sys_arch.load(),
# Load Stores
self.sys_store.load(),
# Load Apps
self.sys_apps.load(),
# Load Add-ons
self.sys_addons.load(),
# load last available data
self.sys_backups.load(),
# load services
@@ -235,8 +235,8 @@ class Core(CoreSysAttributes):
return
try:
# Start app mark as initialize
await self.sys_apps.boot(AppStartup.INITIALIZE)
# Start addon mark as initialize
await self.sys_addons.boot(AddonStartup.INITIALIZE)
# HomeAssistant is already running, only Supervisor restarted
if await self.sys_hardware.helper.last_boot() == self.sys_config.last_boot:
@@ -246,11 +246,11 @@ class Core(CoreSysAttributes):
# reset register services / discovery
await self.sys_services.reset()
# start app mark as system
await self.sys_apps.boot(AppStartup.SYSTEM)
# start addon mark as system
await self.sys_addons.boot(AddonStartup.SYSTEM)
# start app mark as services
await self.sys_apps.boot(AppStartup.SERVICES)
# start addon mark as services
await self.sys_addons.boot(AddonStartup.SERVICES)
# run HomeAssistant
if (
@@ -279,8 +279,8 @@ class Core(CoreSysAttributes):
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
# start app mark as application
await self.sys_apps.boot(AppStartup.APPLICATION)
# start addon mark as application
await self.sys_addons.boot(AddonStartup.APPLICATION)
# store new last boot
await self._update_last_boot()
@@ -338,7 +338,6 @@ class Core(CoreSysAttributes):
self.sys_create_task(coro)
for coro in (
self.sys_websession.close(),
self.sys_homeassistant.api.close(),
self.sys_ingress.unload(),
self.sys_hardware.unload(),
self.sys_dbus.unload(),
@@ -358,8 +357,8 @@ class Core(CoreSysAttributes):
if self.state == CoreState.RUNNING:
await self.set_state(CoreState.SHUTDOWN)
# Shutdown Application Apps, using Home Assistant API
await self.sys_apps.shutdown(AppStartup.APPLICATION)
# Shutdown Application Add-ons, using Home Assistant API
await self.sys_addons.shutdown(AddonStartup.APPLICATION)
# Close Home Assistant
with suppress(HassioError):
@@ -367,10 +366,10 @@ class Core(CoreSysAttributes):
remove_container=remove_homeassistant_container
)
# Shutdown System Apps
await self.sys_apps.shutdown(AppStartup.SERVICES)
await self.sys_apps.shutdown(AppStartup.SYSTEM)
await self.sys_apps.shutdown(AppStartup.INITIALIZE)
# Shutdown System Add-ons
await self.sys_addons.shutdown(AddonStartup.SERVICES)
await self.sys_addons.shutdown(AddonStartup.SYSTEM)
await self.sys_addons.shutdown(AddonStartup.INITIALIZE)
# Shutdown all Plugins
if self.state in (CoreState.STOPPING, CoreState.SHUTDOWN):
@@ -433,31 +432,18 @@ class Core(CoreSysAttributes):
if self.sys_host.info.use_ntp:
# Stop timesyncd if NTP is enabled, as set_time is blocked while it runs.
# timedated rejects set_time while an NTP unit is active. We listen
# for the unit's ActiveState to become inactive before proceeding.
_LOGGER.info("Stopping systemd-timesyncd to allow manual time adjustment")
timesync_unit = await self.sys_dbus.systemd.get_unit(
"systemd-timesyncd.service"
await self.sys_dbus.systemd.stop_unit(
"systemd-timesyncd.service", StopUnitMode.REPLACE
)
try:
async with asyncio.timeout(10):
await self.sys_dbus.systemd.stop_unit(
"systemd-timesyncd.service", StopUnitMode.REPLACE
)
await timesync_unit.wait_for_active_state(
{UnitActiveState.INACTIVE}
)
except TimeoutError:
_LOGGER.warning(
"Timeout waiting for systemd-timesyncd to stop, "
"attempting time sync anyway"
)
# Create a repair issue so the user knows NTP was disabled
# Keep service disabled and create a repair issue
self.sys_resolution.create_issue(
IssueType.NTP_SYNC_FAILED,
ContextType.SYSTEM,
suggestions=[SuggestionType.ENABLE_NTP],
)
# We need to wait a bit for the service to stop.
await asyncio.sleep(1)
await self.sys_host.control.set_datetime(data.dt_utc)
await self.sys_supervisor.check_connectivity()
@@ -471,7 +457,7 @@ class Core(CoreSysAttributes):
await self.sys_plugins.repair()
# Restore core functionality
await self.sys_apps.repair()
await self.sys_addons.repair()
await self.sys_homeassistant.core.repair()
# Tag version for latest

View File

@@ -27,7 +27,7 @@ from .const import (
)
if TYPE_CHECKING:
from .addons.manager import AppManager
from .addons.manager import AddonManager
from .api import RestAPI
from .arch import CpuArchManager
from .auth import Auth
@@ -82,7 +82,7 @@ class CoreSys:
self._auth: Auth | None = None
self._homeassistant: HomeAssistant | None = None
self._supervisor: Supervisor | None = None
self._apps: AppManager | None = None
self._addons: AddonManager | None = None
self._api: RestAPI | None = None
self._updater: Updater | None = None
self._backups: BackupManager | None = None
@@ -350,18 +350,18 @@ class CoreSys:
self._updater = value
@property
def apps(self) -> AppManager:
"""Return AppManager object."""
if self._apps is None:
raise RuntimeError("AppManager not set!")
return self._apps
def addons(self) -> AddonManager:
"""Return AddonManager object."""
if self._addons is None:
raise RuntimeError("AddonManager not set!")
return self._addons
@apps.setter
def apps(self, value: AppManager) -> None:
"""Set a AppManager object."""
if self._apps:
raise RuntimeError("AppManager already set!")
self._apps = value
@addons.setter
def addons(self, value: AddonManager) -> None:
"""Set a AddonManager object."""
if self._addons:
raise RuntimeError("AddonManager already set!")
self._addons = value
@property
def store(self) -> StoreManager:
@@ -771,9 +771,9 @@ class CoreSysAttributes:
return self.coresys.updater
@property
def sys_apps(self) -> AppManager:
"""Return AppManager object."""
return self.coresys.apps
def sys_addons(self) -> AddonManager:
"""Return AddonManager object."""
return self.coresys.addons
@property
def sys_store(self) -> StoreManager:

View File

@@ -272,7 +272,7 @@ def get_connection_from_interface(
wireless = {
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC: Variant("s", "preserve"),
CONF_ATTR_802_WIRELESS_MODE: Variant("s", "infrastructure"),
CONF_ATTR_802_WIRELESS_POWERSAVE: Variant("i", 0),
CONF_ATTR_802_WIRELESS_POWERSAVE: Variant("i", 1),
}
if interface.wifi and interface.wifi.ssid:
wireless[CONF_ATTR_802_WIRELESS_SSID] = Variant(

View File

@@ -16,7 +16,6 @@ from ..exceptions import (
)
from ..utils.dbus import DBusSignalWrapper
from .const import (
DBUS_ATTR_ACTIVE_STATE,
DBUS_ATTR_FINISH_TIMESTAMP,
DBUS_ATTR_FIRMWARE_TIMESTAMP_MONOTONIC,
DBUS_ATTR_KERNEL_TIMESTAMP_MONOTONIC,
@@ -25,7 +24,6 @@ from .const import (
DBUS_ATTR_VIRTUALIZATION,
DBUS_ERR_SYSTEMD_NO_SUCH_UNIT,
DBUS_IFACE_SYSTEMD_MANAGER,
DBUS_IFACE_SYSTEMD_UNIT,
DBUS_NAME_SYSTEMD,
DBUS_OBJECT_SYSTEMD,
DBUS_SIGNAL_PROPERTIES_CHANGED,
@@ -88,25 +86,6 @@ class SystemdUnit(DBusInterface):
"""Return signal wrapper for properties changed."""
return self.connected_dbus.signal(DBUS_SIGNAL_PROPERTIES_CHANGED)
@dbus_connected
async def wait_for_active_state(
self, target_states: set[UnitActiveState]
) -> UnitActiveState:
"""Wait for unit to reach one of the target active states.
Caller must handle TimeoutError if a timeout is desired.
"""
async with self.properties_changed() as signal:
state = await self.get_active_state()
while state not in target_states:
interface, changed, _ = await signal.wait_for_signal()
if (
interface == DBUS_IFACE_SYSTEMD_UNIT
and DBUS_ATTR_ACTIVE_STATE in changed
):
state = UnitActiveState(changed[DBUS_ATTR_ACTIVE_STATE].value)
return state
class Systemd(DBusInterfaceProxy):
"""Systemd function handler.

View File

@@ -15,7 +15,7 @@ from ..utils.common import FileConfiguration
from .validate import SCHEMA_DISCOVERY_CONFIG
if TYPE_CHECKING:
from ..addons.addon import App
from ..addons.addon import Addon
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -71,10 +71,10 @@ class Discovery(CoreSysAttributes, FileConfiguration):
"""Return list of available discovery messages."""
return list(self.message_obj.values())
async def send(self, app: App, service: str, config: dict[str, Any]) -> Message:
async def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message:
"""Send a discovery message to Home Assistant."""
# Create message
message = Message(app.slug, service, config)
message = Message(addon.slug, service, config)
# Already exists?
for exists_msg in self.list_messages:
@@ -84,12 +84,12 @@ class Discovery(CoreSysAttributes, FileConfiguration):
message = exists_msg
message.config = config
else:
_LOGGER.debug("Duplicate discovery message from %s", app.slug)
_LOGGER.debug("Duplicate discovery message from %s", addon.slug)
return exists_msg
break
_LOGGER.info(
"Sending discovery to Home Assistant %s from %s", service, app.slug
"Sending discovery to Home Assistant %s from %s", service, addon.slug
)
self.message_obj[message.uuid] = message
await self.save()

View File

@@ -2,7 +2,7 @@
import voluptuous as vol
from ..const import ATTR_APP, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID
from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID
from ..utils.validate import schema_or
from ..validate import uuid_match
@@ -11,7 +11,7 @@ SCHEMA_DISCOVERY = vol.Schema(
vol.Schema(
{
vol.Required(ATTR_UUID): uuid_match,
vol.Required(ATTR_APP): str,
vol.Required(ATTR_ADDON): str,
vol.Required(ATTR_SERVICE): str,
vol.Required(ATTR_CONFIG): vol.Maybe(dict),
},

View File

@@ -1,4 +1,4 @@
"""Init file for Supervisor app Docker object."""
"""Init file for Supervisor add-on Docker object."""
from __future__ import annotations
@@ -14,7 +14,7 @@ import aiodocker
from attr import evolve
from awesomeversion import AwesomeVersion
from ..addons.build import AppBuild
from ..addons.build import AddonBuild
from ..addons.const import MappingType
from ..bus import EventListener
from ..const import (
@@ -71,7 +71,7 @@ from .const import (
from .interface import DockerInterface
if TYPE_CHECKING:
from ..addons.addon import App
from ..addons.addon import Addon
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -79,12 +79,12 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
NO_ADDDRESS = IPv4Address("0.0.0.0")
class DockerApp(DockerInterface):
class DockerAddon(DockerInterface):
"""Docker Supervisor wrapper for Home Assistant."""
def __init__(self, coresys: CoreSys, app: App):
def __init__(self, coresys: CoreSys, addon: Addon):
"""Initialize Docker Home Assistant wrapper."""
self.app: App = app
self.addon: Addon = addon
super().__init__(coresys)
self._hw_listener: EventListener | None = None
@@ -97,12 +97,12 @@ class DockerApp(DockerInterface):
@property
def image(self) -> str | None:
"""Return name of Docker image."""
return self.app.image
return self.addon.image
@property
def ip_address(self) -> IPv4Address:
"""Return IP address of this container."""
if self.app.host_network:
if self.addon.host_network:
return self.sys_docker.network.gateway
if not self._meta:
return NO_ADDDRESS
@@ -112,49 +112,49 @@ class DockerApp(DockerInterface):
return IPv4Address(
self._meta["NetworkSettings"]["Networks"]["hassio"]["IPAddress"]
)
except KeyError, TypeError, ValueError:
except (KeyError, TypeError, ValueError):
return NO_ADDDRESS
@property
def timeout(self) -> int:
"""Return timeout for Docker actions."""
return self.app.timeout
return self.addon.timeout
@property
def version(self) -> AwesomeVersion:
"""Return version of Docker image."""
return self.app.version
return self.addon.version
@property
def arch(self) -> str | None:
"""Return arch of Docker image."""
if self.app.legacy:
if self.addon.legacy:
return str(self.sys_arch.default)
return super().arch
@property
def name(self) -> str:
"""Return name of Docker container."""
return DockerApp.slug_to_name(self.app.slug)
return DockerAddon.slug_to_name(self.addon.slug)
@property
def environment(self) -> dict[str, str | int | None]:
"""Return environment for Docker app."""
app_env = cast(dict[str, str | int | None], self.app.environment or {})
"""Return environment for Docker add-on."""
addon_env = cast(dict[str, str | int | None], self.addon.environment or {})
# Provide options for legacy apps
if self.app.legacy:
for key, value in self.app.options.items():
# Provide options for legacy add-ons
if self.addon.legacy:
for key, value in self.addon.options.items():
if isinstance(value, (int, str)):
app_env[key] = value
addon_env[key] = value
else:
_LOGGER.warning("Can not set nested option %s as Docker env", key)
return {
**app_env,
**addon_env,
ENV_TIME: self.sys_timezone,
ENV_TOKEN: self.app.supervisor_token,
ENV_TOKEN_OLD: self.app.supervisor_token,
ENV_TOKEN: self.addon.supervisor_token,
ENV_TOKEN_OLD: self.addon.supervisor_token,
}
@property
@@ -163,7 +163,7 @@ class DockerApp(DockerInterface):
rules = set()
# Attach correct cgroups for static devices
for device_path in self.app.static_devices:
for device_path in self.addon.static_devices:
try:
device = self.sys_hardware.get_by_path(device_path)
except HardwareNotFound:
@@ -173,42 +173,42 @@ class DockerApp(DockerInterface):
# Check access
if not self.sys_hardware.policy.allowed_for_access(device):
_LOGGER.error(
"App %s tried to access blocked device %s!",
self.app.name,
"Add-on %s try to access to blocked device %s!",
self.addon.name,
device.name,
)
continue
rules.add(self.sys_hardware.policy.get_cgroups_rule(device))
# Attach correct cgroups for devices
for device in self.app.devices:
for device in self.addon.devices:
if not self.sys_hardware.policy.allowed_for_access(device):
_LOGGER.error(
"App %s tried to access blocked device %s!",
self.app.name,
"Add-on %s try to access to blocked device %s!",
self.addon.name,
device.name,
)
continue
rules.add(self.sys_hardware.policy.get_cgroups_rule(device))
# Video
if self.app.with_video:
if self.addon.with_video:
rules.update(self.sys_hardware.policy.get_cgroups_rules(PolicyGroup.VIDEO))
# GPIO
if self.app.with_gpio:
if self.addon.with_gpio:
rules.update(self.sys_hardware.policy.get_cgroups_rules(PolicyGroup.GPIO))
# UART
if self.app.with_uart:
if self.addon.with_uart:
rules.update(self.sys_hardware.policy.get_cgroups_rules(PolicyGroup.UART))
# USB
if self.app.with_usb:
if self.addon.with_usb:
rules.update(self.sys_hardware.policy.get_cgroups_rules(PolicyGroup.USB))
# Full Access
if not self.app.protected and self.app.with_full_access:
if not self.addon.protected and self.addon.with_full_access:
return [self.sys_hardware.policy.get_full_access()]
# Return None if no rules is present
@@ -218,13 +218,13 @@ class DockerApp(DockerInterface):
@property
def ports(self) -> dict[str, str | int | None] | None:
"""Filter None from app ports."""
if self.app.host_network or not self.app.ports:
"""Filter None from add-on ports."""
if self.addon.host_network or not self.addon.ports:
return None
return {
container_port: host_port
for container_port, host_port in self.app.ports.items()
for container_port, host_port in self.addon.ports.items()
if host_port
}
@@ -236,23 +236,23 @@ class DockerApp(DockerInterface):
# AppArmor
if (
not self.sys_host.apparmor.available
or self.app.apparmor == SECURITY_DISABLE
or self.addon.apparmor == SECURITY_DISABLE
):
security.append("apparmor=unconfined")
elif self.app.apparmor == SECURITY_PROFILE:
security.append(f"apparmor={self.app.slug}")
elif self.addon.apparmor == SECURITY_PROFILE:
security.append(f"apparmor={self.addon.slug}")
return security
@property
def tmpfs(self) -> dict[str, str] | None:
"""Return tmpfs for Docker app."""
"""Return tmpfs for Docker add-on."""
tmpfs = {}
if self.app.with_tmpfs:
if self.addon.with_tmpfs:
tmpfs["/tmp"] = "" # noqa: S108
if not self.app.host_ipc:
if not self.addon.host_ipc:
tmpfs["/dev/shm"] = "" # noqa: S108
# Return None if no tmpfs is present
@@ -270,36 +270,36 @@ class DockerApp(DockerInterface):
@property
def network_mode(self) -> Literal["host"] | None:
"""Return network mode for app."""
if self.app.host_network:
"""Return network mode for add-on."""
if self.addon.host_network:
return "host"
return None
@property
def pid_mode(self) -> str | None:
"""Return PID mode for app."""
if not self.app.protected and self.app.host_pid:
"""Return PID mode for add-on."""
if not self.addon.protected and self.addon.host_pid:
return "host"
return None
@property
def uts_mode(self) -> str | None:
"""Return UTS mode for app."""
if self.app.host_uts:
"""Return UTS mode for add-on."""
if self.addon.host_uts:
return "host"
return None
@property
def capabilities(self) -> list[Capabilities] | None:
"""Generate needed capabilities."""
capabilities: set[Capabilities] = set(self.app.privileged)
capabilities: set[Capabilities] = set(self.addon.privileged)
# Need work with kernel modules
if self.app.with_kernel_modules:
if self.addon.with_kernel_modules:
capabilities.add(Capabilities.SYS_MODULE)
# Need schedule functions
if self.app.with_realtime:
if self.addon.with_realtime:
capabilities.add(Capabilities.SYS_NICE)
# Return None if no capabilities is present
@@ -309,19 +309,19 @@ class DockerApp(DockerInterface):
@property
def ulimits(self) -> list[Ulimit] | None:
"""Generate ulimits for app."""
"""Generate ulimits for add-on."""
limits: list[Ulimit] = []
# Need schedule functions
if self.app.with_realtime:
if self.addon.with_realtime:
limits.append(Ulimit(name="rtprio", soft=90, hard=99))
# Set available memory for memlock to 128MB
mem = 128 * 1024 * 1024
limits.append(Ulimit(name="memlock", soft=mem, hard=mem))
# Add configurable ulimits from app config
for name, config in self.app.ulimits.items():
# Add configurable ulimits from add-on config
for name, config in self.addon.ulimits.items():
if isinstance(config, int):
# Simple format: both soft and hard limits are the same
limits.append(Ulimit(name=name, soft=config, hard=config))
@@ -343,129 +343,131 @@ class DockerApp(DockerInterface):
return None
# If need CPU RT
if self.app.with_realtime:
if self.addon.with_realtime:
return DOCKER_CPU_RUNTIME_ALLOCATION
return None
@property
def mounts(self) -> list[DockerMount]:
"""Return mounts for container."""
app_mapping = self.app.map_volumes
addon_mapping = self.addon.map_volumes
target_data_path: str | None = None
if MappingType.DATA in app_mapping:
target_data_path = app_mapping[MappingType.DATA].path
if MappingType.DATA in addon_mapping:
target_data_path = addon_mapping[MappingType.DATA].path
mounts = [
MOUNT_DEV,
DockerMount(
type=MountType.BIND,
source=self.app.path_extern_data.as_posix(),
source=self.addon.path_extern_data.as_posix(),
target=target_data_path or PATH_PRIVATE_DATA.as_posix(),
read_only=False,
),
]
# setup config mappings
if MappingType.CONFIG in app_mapping:
if MappingType.CONFIG in addon_mapping:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.sys_config.path_extern_homeassistant.as_posix(),
target=app_mapping[MappingType.CONFIG].path
target=addon_mapping[MappingType.CONFIG].path
or PATH_HOMEASSISTANT_CONFIG_LEGACY.as_posix(),
read_only=app_mapping[MappingType.CONFIG].read_only,
read_only=addon_mapping[MappingType.CONFIG].read_only,
)
)
else:
# Map app's public config folder if not using deprecated config option
if self.app.app_config_used:
# Map addon's public config folder if not using deprecated config option
if self.addon.addon_config_used:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.app.path_extern_config.as_posix(),
target=app_mapping[MappingType.ADDON_CONFIG].path
source=self.addon.path_extern_config.as_posix(),
target=addon_mapping[MappingType.ADDON_CONFIG].path
or PATH_PUBLIC_CONFIG.as_posix(),
read_only=app_mapping[MappingType.ADDON_CONFIG].read_only,
read_only=addon_mapping[MappingType.ADDON_CONFIG].read_only,
)
)
# Map Home Assistant config in new way
if MappingType.HOMEASSISTANT_CONFIG in app_mapping:
if MappingType.HOMEASSISTANT_CONFIG in addon_mapping:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.sys_config.path_extern_homeassistant.as_posix(),
target=app_mapping[MappingType.HOMEASSISTANT_CONFIG].path
target=addon_mapping[MappingType.HOMEASSISTANT_CONFIG].path
or PATH_HOMEASSISTANT_CONFIG.as_posix(),
read_only=app_mapping[
read_only=addon_mapping[
MappingType.HOMEASSISTANT_CONFIG
].read_only,
)
)
if MappingType.ALL_ADDON_CONFIGS in app_mapping:
if MappingType.ALL_ADDON_CONFIGS in addon_mapping:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.sys_config.path_extern_app_configs.as_posix(),
target=app_mapping[MappingType.ALL_ADDON_CONFIGS].path
source=self.sys_config.path_extern_addon_configs.as_posix(),
target=addon_mapping[MappingType.ALL_ADDON_CONFIGS].path
or PATH_ALL_ADDON_CONFIGS.as_posix(),
read_only=app_mapping[MappingType.ALL_ADDON_CONFIGS].read_only,
read_only=addon_mapping[MappingType.ALL_ADDON_CONFIGS].read_only,
)
)
if MappingType.SSL in app_mapping:
if MappingType.SSL in addon_mapping:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.sys_config.path_extern_ssl.as_posix(),
target=app_mapping[MappingType.SSL].path or PATH_SSL.as_posix(),
read_only=app_mapping[MappingType.SSL].read_only,
target=addon_mapping[MappingType.SSL].path or PATH_SSL.as_posix(),
read_only=addon_mapping[MappingType.SSL].read_only,
)
)
if MappingType.ADDONS in app_mapping:
if MappingType.ADDONS in addon_mapping:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.sys_config.path_extern_apps_local.as_posix(),
target=app_mapping[MappingType.ADDONS].path
source=self.sys_config.path_extern_addons_local.as_posix(),
target=addon_mapping[MappingType.ADDONS].path
or PATH_LOCAL_ADDONS.as_posix(),
read_only=app_mapping[MappingType.ADDONS].read_only,
read_only=addon_mapping[MappingType.ADDONS].read_only,
)
)
if MappingType.BACKUP in app_mapping:
if MappingType.BACKUP in addon_mapping:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.sys_config.path_extern_backup.as_posix(),
target=app_mapping[MappingType.BACKUP].path
target=addon_mapping[MappingType.BACKUP].path
or PATH_BACKUP.as_posix(),
read_only=app_mapping[MappingType.BACKUP].read_only,
read_only=addon_mapping[MappingType.BACKUP].read_only,
)
)
if MappingType.SHARE in app_mapping:
if MappingType.SHARE in addon_mapping:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.sys_config.path_extern_share.as_posix(),
target=app_mapping[MappingType.SHARE].path or PATH_SHARE.as_posix(),
read_only=app_mapping[MappingType.SHARE].read_only,
target=addon_mapping[MappingType.SHARE].path
or PATH_SHARE.as_posix(),
read_only=addon_mapping[MappingType.SHARE].read_only,
bind_options=MountBindOptions(propagation=PropagationMode.RSLAVE),
)
)
if MappingType.MEDIA in app_mapping:
if MappingType.MEDIA in addon_mapping:
mounts.append(
DockerMount(
type=MountType.BIND,
source=self.sys_config.path_extern_media.as_posix(),
target=app_mapping[MappingType.MEDIA].path or PATH_MEDIA.as_posix(),
read_only=app_mapping[MappingType.MEDIA].read_only,
target=addon_mapping[MappingType.MEDIA].path
or PATH_MEDIA.as_posix(),
read_only=addon_mapping[MappingType.MEDIA].read_only,
bind_options=MountBindOptions(propagation=PropagationMode.RSLAVE),
)
)
@@ -473,7 +475,7 @@ class DockerApp(DockerInterface):
# Init other hardware mappings
# GPIO support
if self.app.with_gpio and self.sys_hardware.helper.support_gpio:
if self.addon.with_gpio and self.sys_hardware.helper.support_gpio:
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
if not Path(gpio_path).exists():
continue
@@ -487,7 +489,7 @@ class DockerApp(DockerInterface):
)
# DeviceTree support
if self.app.with_devicetree:
if self.addon.with_devicetree:
mounts.append(
DockerMount(
type=MountType.BIND,
@@ -498,11 +500,11 @@ class DockerApp(DockerInterface):
)
# Host udev support
if self.app.with_udev:
if self.addon.with_udev:
mounts.append(MOUNT_UDEV)
# Kernel Modules support
if self.app.with_kernel_modules:
if self.addon.with_kernel_modules:
mounts.append(
DockerMount(
type=MountType.BIND,
@@ -513,19 +515,19 @@ class DockerApp(DockerInterface):
)
# Docker API support
if not self.app.protected and self.app.access_docker_api:
if not self.addon.protected and self.addon.access_docker_api:
mounts.append(MOUNT_DOCKER)
# Host D-Bus system
if self.app.host_dbus:
if self.addon.host_dbus:
mounts.append(MOUNT_DBUS)
# Configuration Audio
if self.app.with_audio:
if self.addon.with_audio:
mounts += [
DockerMount(
type=MountType.BIND,
source=self.app.path_extern_pulse.as_posix(),
source=self.addon.path_extern_pulse.as_posix(),
target="/etc/pulse/client.conf",
read_only=True,
),
@@ -544,7 +546,7 @@ class DockerApp(DockerInterface):
]
# System Journal access
if self.app.with_journald:
if self.addon.with_journald:
mounts += [
DockerMount(
type=MountType.BIND,
@@ -570,21 +572,21 @@ class DockerApp(DockerInterface):
async def run(self) -> None:
"""Run Docker image."""
# Security check
if not self.app.protected:
_LOGGER.warning("%s running with disabled protected mode!", self.app.name)
if not self.addon.protected:
_LOGGER.warning("%s running with disabled protected mode!", self.addon.name)
# Don't set a hostname if no separate UTS namespace is used
hostname = None if self.uts_mode else self.app.hostname
hostname = None if self.uts_mode else self.addon.hostname
# Create & Run container
try:
await self._run(
tag=str(self.app.version),
tag=str(self.addon.version),
name=self.name,
hostname=hostname,
detach=True,
init=self.app.default_init,
stdin_open=self.app.with_stdin,
init=self.addon.default_init,
stdin_open=self.addon.with_stdin,
network_mode=self.network_mode,
pid_mode=self.pid_mode,
uts_mode=self.uts_mode,
@@ -604,24 +606,26 @@ class DockerApp(DockerInterface):
self.sys_resolution.create_issue(
IssueType.MISSING_IMAGE,
ContextType.ADDON,
reference=self.app.slug,
reference=self.addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
raise
_LOGGER.info("Starting Docker app %s with version %s", self.image, self.version)
_LOGGER.info(
"Starting Docker add-on %s with version %s", self.image, self.version
)
# Write data to DNS server
try:
await self.sys_plugins.dns.add_host(
ipv4=self.ip_address, names=[self.app.hostname]
ipv4=self.ip_address, names=[self.addon.hostname]
)
except CoreDNSError as err:
_LOGGER.warning("Can't update DNS for %s", self.name)
await async_capture_exception(err)
# Hardware Access
if self.app.static_devices:
if self.addon.static_devices:
self._hw_listener = self.sys_bus.register_event(
BusEvent.HARDWARE_NEW_DEVICE, self._hardware_events
)
@@ -651,7 +655,7 @@ class DockerApp(DockerInterface):
image=image,
latest=latest,
arch=arch,
need_build=self.app.latest_need_build,
need_build=self.addon.latest_need_build,
)
@Job(
@@ -669,27 +673,32 @@ class DockerApp(DockerInterface):
need_build: bool | None = None,
) -> None:
"""Pull Docker image or build it."""
if need_build is None and self.app.need_build or need_build:
if need_build is None and self.addon.need_build or need_build:
await self._build(version, image)
else:
await super().install(version, image, latest, arch)
async def _build(self, version: AwesomeVersion, image: str | None = None) -> None:
"""Build a Docker container."""
build_env = await AppBuild.create(self.coresys, self.app)
build_env = await AddonBuild(self.coresys, self.addon).load_config()
# Check if the build environment is valid, raises if not
await build_env.is_valid()
_LOGGER.info("Starting build for %s:%s", self.image, version)
if build_env.squash:
_LOGGER.warning(
"Ignoring squash build option for %s as Docker BuildKit does not support it.",
self.addon.slug,
)
app_image_tag = f"{image or self.app.image}:{version!s}"
addon_image_tag = f"{image or self.addon.image}:{version!s}"
docker_version = self.sys_docker.info.version
builder_version_tag = (
f"{docker_version.major}.{docker_version.minor}.{docker_version.micro}-cli"
)
builder_name = f"addon_builder_{self.app.slug}"
builder_name = f"addon_builder_{self.addon.slug}"
# Remove dangling builder container if it exists by any chance
# E.g. because of an abrupt host shutdown/reboot during a build
@@ -730,7 +739,7 @@ class DockerApp(DockerInterface):
return (
temp_dir,
build_env.get_docker_args(
version, app_image_tag, docker_config_path
version, addon_image_tag, docker_config_path
),
)
@@ -751,10 +760,10 @@ class DockerApp(DockerInterface):
if temp_dir:
await self.sys_run_in_executor(temp_dir.cleanup)
logs = "".join(result.log)
logs = "\n".join(result.log)
if result.exit_code != 0:
raise DockerBuildError(
f"Docker build failed for {app_image_tag} (exit code {result.exit_code}). Build output:\n{logs}",
f"Docker build failed for {addon_image_tag} (exit code {result.exit_code}). Build output:\n{logs}",
_LOGGER.error,
)
@@ -762,23 +771,14 @@ class DockerApp(DockerInterface):
try:
# Update meta data
self._meta = await self.sys_docker.images.inspect(app_image_tag)
self._meta = await self.sys_docker.images.inspect(addon_image_tag)
except aiodocker.DockerError as err:
raise DockerBuildError(
f"Can't get image metadata for {app_image_tag} after build: {err!s}"
f"Can't get image metadata for {addon_image_tag} after build: {err!s}"
) from err
_LOGGER.info("Build %s:%s done", self.image, version)
# Clean up old add-on builder images from previous Docker versions.
# Done here after build because cleanup_old_images needs the current
# image to exist, and the builder image is only pulled on first build
# (in run_command) after a Docker engine update.
with suppress(DockerError):
await self.sys_docker.cleanup_old_images(
ADDON_BUILDER_IMAGE, AwesomeVersion(builder_version_tag)
)
async def export_image(self, tar_file: Path) -> None:
"""Export current images into a tar file."""
if not self.image:
@@ -817,11 +817,11 @@ class DockerApp(DockerInterface):
use_version,
{old_image} if old_image else None,
keep_images={
f"{app.image}:{app.version}"
for app in self.sys_apps.installed
if app.slug != self.app.slug
and app.image
and app.image in {old_image, use_image}
f"{addon.image}:{addon.version}"
for addon in self.sys_addons.installed
if addon.slug != self.addon.slug
and addon.image
and addon.image in {old_image, use_image}
},
)
@@ -831,7 +831,7 @@ class DockerApp(DockerInterface):
concurrency=JobConcurrency.GROUP_REJECT,
)
async def write_stdin(self, data: bytes) -> None:
"""Write to app stdin."""
"""Write to add-on stdin."""
try:
# Load needed docker objects
container = await self.sys_docker.containers.get(self.name)
@@ -861,7 +861,7 @@ class DockerApp(DockerInterface):
# DNS
if self.ip_address != NO_ADDDRESS:
try:
await self.sys_plugins.dns.delete_host(self.app.hostname)
await self.sys_plugins.dns.delete_host(self.addon.hostname)
except CoreDNSError as err:
_LOGGER.warning("Can't update DNS for %s", self.name)
await async_capture_exception(err)
@@ -876,7 +876,7 @@ class DockerApp(DockerInterface):
# If there is a device access issue and the container is removed, clear it
if remove_container and (
issue := self.sys_resolution.get_issue_if_present(
self.app.device_access_missing_issue
self.addon.device_access_missing_issue
)
):
self.sys_resolution.dismiss_issue(issue)
@@ -891,7 +891,7 @@ class DockerApp(DockerInterface):
"""Process Hardware events for adjust device access."""
if not any(
device_path in (device.path, device.sysfs)
for device_path in self.app.static_devices
for device_path in self.addon.static_devices
):
return
@@ -912,7 +912,7 @@ class DockerApp(DockerInterface):
and not self.sys_os.available
):
self.sys_resolution.add_issue(
evolve(self.app.device_access_missing_issue),
evolve(self.addon.device_access_missing_issue),
suggestions=[SuggestionType.EXECUTE_RESTART],
)
return

View File

@@ -23,9 +23,6 @@ DOCKER_HUB_API = "registry-1.docker.io"
# Legacy Docker Hub identifier for backward compatibility
DOCKER_HUB_LEGACY = "hub.docker.com"
# GitHub Container Registry identifier
GITHUB_CONTAINER_REGISTRY = "ghcr.io"
class Capabilities(StrEnum):
"""Linux Capabilities."""
@@ -143,7 +140,6 @@ class Ulimit:
}
ENV_CORE_API_SOCKET = "SUPERVISOR_CORE_API_SOCKET"
ENV_DUPLICATE_LOG_FILE = "HA_DUPLICATE_LOG_FILE"
ENV_TIME = "TZ"
ENV_TOKEN = "SUPERVISOR_TOKEN"
@@ -173,12 +169,6 @@ MOUNT_MACHINE_ID = DockerMount(
target=MACHINE_ID.as_posix(),
read_only=True,
)
MOUNT_CORE_RUN = DockerMount(
type=MountType.BIND,
source="/run/supervisor",
target="/run/supervisor",
read_only=False,
)
MOUNT_UDEV = DockerMount(
type=MountType.BIND, source="/run/udev", target="/run/udev", read_only=True
)
@@ -195,6 +185,4 @@ PATH_SHARE = PurePath("/share")
PATH_MEDIA = PurePath("/media")
# https://hub.docker.com/_/docker
# Use short name as Docker stores it this way; the canonical docker.io/library/docker
# does not match the reference filter used by cleanup_old_images.
ADDON_BUILDER_IMAGE = "docker"
ADDON_BUILDER_IMAGE = "docker.io/library/docker"

View File

@@ -13,12 +13,10 @@ from ..homeassistant.const import LANDINGPAGE
from ..jobs.const import JobConcurrency
from ..jobs.decorator import Job
from .const import (
ENV_CORE_API_SOCKET,
ENV_DUPLICATE_LOG_FILE,
ENV_TIME,
ENV_TOKEN,
ENV_TOKEN_OLD,
MOUNT_CORE_RUN,
MOUNT_DBUS,
MOUNT_DEV,
MOUNT_MACHINE_ID,
@@ -164,9 +162,6 @@ class DockerHomeAssistant(DockerInterface):
if self.sys_machine_id:
mounts.append(MOUNT_MACHINE_ID)
if self.sys_homeassistant.api.supports_unix_socket:
mounts.append(MOUNT_CORE_RUN)
return mounts
@Job(
@@ -185,8 +180,6 @@ class DockerHomeAssistant(DockerInterface):
}
if restore_job_id:
environment[ENV_RESTORE_JOB_ID] = restore_job_id
if self.sys_homeassistant.api.supports_unix_socket:
environment[ENV_CORE_API_SOCKET] = "/run/supervisor/core.sock"
if self.sys_homeassistant.duplicate_log_file:
environment[ENV_DUPLICATE_LOG_FILE] = "1"
await self._run(

View File

@@ -33,27 +33,17 @@ from ..exceptions import (
DockerHubRateLimitExceeded,
DockerJobError,
DockerNotFound,
DockerRegistryAuthError,
DockerRegistryRateLimitExceeded,
GithubContainerRegistryRateLimitExceeded,
)
from ..jobs.const import JOB_GROUP_DOCKER_INTERFACE, JobConcurrency
from ..jobs.decorator import Job
from ..jobs.job_group import JobGroup
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..utils.sentry import async_capture_exception
from .const import (
DOCKER_HUB,
DOCKER_HUB_LEGACY,
GITHUB_CONTAINER_REGISTRY,
ContainerState,
RestartPolicy,
)
from .const import DOCKER_HUB, DOCKER_HUB_LEGACY, ContainerState, RestartPolicy
from .manager import CommandReturn, ExecReturn, PullLogEntry
from .monitor import DockerContainerStateEvent
from .pull_progress import ImagePullProgress
from .stats import DockerStats
from .utils import get_registry_from_image
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -124,11 +114,6 @@ class DockerInterface(JobGroup, ABC):
def name(self) -> str:
"""Return name of Docker container."""
@property
def attached(self) -> bool:
"""Return True if container/image metadata has been loaded."""
return self._meta is not None
@property
def meta_config(self) -> dict[str, Any]:
"""Return meta data of configuration for container/image."""
@@ -231,29 +216,6 @@ class DockerInterface(JobGroup, ABC):
return credentials, qualified_image
def _registry_rate_limit_exception(
self, image: str
) -> DockerRegistryRateLimitExceeded:
"""Return typed rate-limit exception and maybe create a resolution issue.
The registry is derived from the image reference. Docker Hub gets a
DOCKER_RATELIMIT resolution issue with a registry-login suggestion
(actionable - logging in lifts the unauthenticated quota). GHCR and
unknown registries only produce a typed exception and a log entry;
no resolution issue since there's nothing actionable for the user.
"""
registry = get_registry_from_image(image)
if registry == GITHUB_CONTAINER_REGISTRY:
return GithubContainerRegistryRateLimitExceeded(_LOGGER.warning)
if registry is None or registry in (DOCKER_HUB, DOCKER_HUB_LEGACY):
self.sys_resolution.create_issue(
IssueType.DOCKER_RATELIMIT,
ContextType.SYSTEM,
suggestions=[SuggestionType.REGISTRY_LOGIN],
)
return DockerHubRateLimitExceeded(_LOGGER.warning)
return DockerRegistryRateLimitExceeded(_LOGGER.warning)
@Job(
name="docker_interface_install",
on_condition=DockerJobError,
@@ -358,29 +320,14 @@ class DockerInterface(JobGroup, ABC):
await self.sys_docker.images.tag(
docker_image["Id"], image, tag="latest"
)
except DockerRegistryRateLimitExceeded as err:
# Rate limit surfaced via the streaming pull protocol (no HTTP
# status to key off of). Refine into a registry-specific exception
# now that we know which image was being pulled.
raise self._registry_rate_limit_exception(image) from err
except aiodocker.DockerError as err:
# Pre-28.3.0 daemons wrap registry rate limits as HTTP 500
# instead of forwarding 429: api/server/httpstatus/status.go
# mapped cerrdefs.IsUnknown to 500. Fixed upstream by moby/moby
# commit 23fa0ae74a ("Cleanup http status error checks",
# first released in Docker 28.3.0). We still need to detect it
# for the large fleet on older daemons — match on the message
# body since the HTTP status is useless for that window.
message = str(err.message) if err.message else ""
if err.status == HTTPStatus.TOO_MANY_REQUESTS or (
err.status == HTTPStatus.INTERNAL_SERVER_ERROR
and "toomanyrequests" in message
):
raise self._registry_rate_limit_exception(image) from err
if err.status == HTTPStatus.UNAUTHORIZED and credentials:
raise DockerRegistryAuthError(
_LOGGER.error, registry=credentials[ATTR_REGISTRY]
) from err
if err.status == HTTPStatus.TOO_MANY_REQUESTS:
self.sys_resolution.create_issue(
IssueType.DOCKER_RATELIMIT,
ContextType.SYSTEM,
suggestions=[SuggestionType.REGISTRY_LOGIN],
)
raise DockerHubRateLimitExceeded(_LOGGER.error) from err
await async_capture_exception(err)
raise DockerError(
f"Can't install {image}:{version!s}: {err}", _LOGGER.error

View File

@@ -43,7 +43,6 @@ from ..exceptions import (
DockerError,
DockerNoSpaceOnDevice,
DockerNotFound,
DockerRegistryRateLimitExceeded,
)
from ..utils.common import FileConfiguration
from ..validate import SCHEMA_DOCKER_CONFIG
@@ -192,12 +191,6 @@ class PullLogEntry:
raise RuntimeError("No error to convert to exception!")
if self.error.endswith("no space left on device"):
return DockerNoSpaceOnDevice(_LOGGER.error)
if "toomanyrequests" in self.error:
# Registry rate limit. The streaming pull protocol doesn't carry
# HTTP status codes, so the error only surfaces as a text message
# here. Install() refines this into a Docker Hub / GHCR specific
# exception based on the image being pulled.
return DockerRegistryRateLimitExceeded(_LOGGER.warning)
return DockerError(self.error, _LOGGER.error)

View File

@@ -85,13 +85,10 @@ class DockerNetwork:
)
current_mtu = int(current_mtu_str) if current_mtu_str is not None else None
# Check if settings differ from what is set. Use default if not explicitly set.
# Check if we have explicitly provided settings that differ from what is set
changes = []
effective_ipv6 = (
enable_ipv6 if enable_ipv6 is not None else DOCKER_ENABLE_IPV6_DEFAULT
)
if current_ipv6 != effective_ipv6:
changes.append("IPv4/IPv6 Dual-Stack" if effective_ipv6 else "IPv4-Only")
if enable_ipv6 is not None and current_ipv6 != enable_ipv6:
changes.append("IPv4/IPv6 Dual-Stack" if enable_ipv6 else "IPv4-Only")
if mtu is not None and current_mtu != mtu:
changes.append(f"MTU {mtu}")

View File

@@ -5,6 +5,11 @@ from typing import Any
from .const import OBSERVER_PORT
MESSAGE_CHECK_SUPERVISOR_LOGS = (
"Check supervisor logs for details (check with '{logs_command}')"
)
EXTRA_FIELDS_LOGS_COMMAND = {"logs_command": "ha supervisor logs"}
class HassioError(Exception):
"""Root exception."""
@@ -97,8 +102,8 @@ class APIInternalServerError(APIError):
status = 500
class APIAppNotInstalled(APIError):
"""Not installed app requested at apps API."""
class APIAddonNotInstalled(APIError):
"""Not installed addon requested at addons API."""
class APIDBMigrationInProgress(APIError):
@@ -120,8 +125,9 @@ class APIUnknownSupervisorError(APIError):
) -> None:
"""Initialize exception."""
self.message_template = (
f"{self.message_template}. Check Supervisor logs for details"
f"{self.message_template}. {MESSAGE_CHECK_SUPERVISOR_LOGS}"
)
self.extra_fields = (self.extra_fields or {}) | EXTRA_FIELDS_LOGS_COMMAND
super().__init__(None, logger, job_id=job_id)
@@ -342,68 +348,70 @@ class AudioJobError(AudioError, PluginJobError):
"""Raise on job error with audio plugin."""
# Apps
# Addons
class AppsError(HassioError):
"""Apps exception."""
class AddonsError(HassioError):
"""Addons exception."""
class AppConfigurationError(AppsError):
"""Error with app configuration."""
class AddonConfigurationError(AddonsError):
"""Error with add-on configuration."""
class AppConfigurationInvalidError(AppConfigurationError, APIError):
"""Raise if invalid configuration provided for app."""
class AddonConfigurationInvalidError(AddonConfigurationError, APIError):
"""Raise if invalid configuration provided for addon."""
error_key = "addon_configuration_invalid_error"
message_template = "App {addon} has invalid options: {validation_error}"
message_template = "Add-on {addon} has invalid options: {validation_error}"
def __init__(
self,
logger: Callable[..., None] | None = None,
*,
app: str,
addon: str,
validation_error: str,
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app, "validation_error": validation_error}
self.extra_fields = {"addon": addon, "validation_error": validation_error}
super().__init__(None, logger)
class AppBootConfigCannotChangeError(AppsError, APIError):
"""Raise if user attempts to change app boot config when it can't be changed."""
class AddonBootConfigCannotChangeError(AddonsError, APIError):
"""Raise if user attempts to change addon boot config when it can't be changed."""
error_key = "addon_boot_config_cannot_change_error"
message_template = (
"App {addon} boot option is set to {boot_config} so it cannot be changed"
"Addon {addon} boot option is set to {boot_config} so it cannot be changed"
)
def __init__(
self, logger: Callable[..., None] | None = None, *, app: str, boot_config: str
self, logger: Callable[..., None] | None = None, *, addon: str, boot_config: str
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app, "boot_config": boot_config}
self.extra_fields = {"addon": addon, "boot_config": boot_config}
super().__init__(None, logger)
class AppNotRunningError(AppsError, APIError):
"""Raise when an app is not running."""
class AddonNotRunningError(AddonsError, APIError):
"""Raise when an addon is not running."""
error_key = "addon_not_running_error"
message_template = "App {addon} is not running"
message_template = "Add-on {addon} is not running"
def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None:
def __init__(
self, logger: Callable[..., None] | None = None, *, addon: str
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app}
self.extra_fields = {"addon": addon}
super().__init__(None, logger)
class AppPortConflict(AppsError, APIError):
"""Raise if app cannot start due to a port conflict."""
class AddonPortConflict(AddonsError, APIError):
"""Raise if addon cannot start due to a port conflict."""
error_key = "addon_port_conflict"
message_template = "Cannot start app {name} because port {port} is already in use"
message_template = "Cannot start addon {name} because port {port} is already in use"
def __init__(
self, logger: Callable[..., None] | None = None, *, name: str, port: int
@@ -413,15 +421,15 @@ class AppPortConflict(AppsError, APIError):
super().__init__(None, logger)
class AppNotSupportedError(HassioNotSupportedError):
"""App doesn't support a function."""
class AddonNotSupportedError(HassioNotSupportedError):
"""Addon doesn't support a function."""
class AppNotSupportedArchitectureError(AppNotSupportedError):
"""App does not support system due to architecture."""
class AddonNotSupportedArchitectureError(AddonNotSupportedError):
"""Addon does not support system due to architecture."""
error_key = "addon_not_supported_architecture_error"
message_template = "App {slug} not supported on this platform, supported architectures: {architectures}"
message_template = "Add-on {slug} not supported on this platform, supported architectures: {architectures}"
def __init__(
self,
@@ -435,11 +443,11 @@ class AppNotSupportedArchitectureError(AppNotSupportedError):
super().__init__(None, logger)
class AppNotSupportedMachineTypeError(AppNotSupportedError):
"""App does not support system due to machine type."""
class AddonNotSupportedMachineTypeError(AddonNotSupportedError):
"""Addon does not support system due to machine type."""
error_key = "addon_not_supported_machine_type_error"
message_template = "App {slug} not supported on this machine, supported machine types: {machine_types}"
message_template = "Add-on {slug} not supported on this machine, supported machine types: {machine_types}"
def __init__(
self,
@@ -453,11 +461,11 @@ class AppNotSupportedMachineTypeError(AppNotSupportedError):
super().__init__(None, logger)
class AppNotSupportedHomeAssistantVersionError(AppNotSupportedError):
"""App does not support system due to Home Assistant version."""
class AddonNotSupportedHomeAssistantVersionError(AddonNotSupportedError):
"""Addon does not support system due to Home Assistant version."""
error_key = "addon_not_supported_home_assistant_version_error"
message_template = "App {slug} not supported on this system, requires Home Assistant version {version} or greater"
message_template = "Add-on {slug} not supported on this system, requires Home Assistant version {version} or greater"
def __init__(
self,
@@ -471,40 +479,44 @@ class AppNotSupportedHomeAssistantVersionError(AppNotSupportedError):
super().__init__(None, logger)
class AppNotSupportedWriteStdinError(AppNotSupportedError, APIError):
"""App does not support writing to stdin."""
class AddonNotSupportedWriteStdinError(AddonNotSupportedError, APIError):
"""Addon does not support writing to stdin."""
error_key = "addon_not_supported_write_stdin_error"
message_template = "App {addon} does not support writing to stdin"
message_template = "Add-on {addon} does not support writing to stdin"
def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None:
def __init__(
self, logger: Callable[..., None] | None = None, *, addon: str
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app}
self.extra_fields = {"addon": addon}
super().__init__(None, logger)
class AppBuildDockerfileMissingError(AppNotSupportedError, APIError):
"""Raise when app build invalid because dockerfile is missing."""
class AddonBuildDockerfileMissingError(AddonNotSupportedError, APIError):
"""Raise when addon build invalid because dockerfile is missing."""
error_key = "addon_build_dockerfile_missing_error"
message_template = (
"Cannot build app '{addon}' because dockerfile is missing. A repair "
"Cannot build addon '{addon}' because dockerfile is missing. A repair "
"using '{repair_command}' will fix this if the cause is data "
"corruption. Otherwise please report this to the app developer."
"corruption. Otherwise please report this to the addon developer."
)
def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None:
def __init__(
self, logger: Callable[..., None] | None = None, *, addon: str
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app, "repair_command": "ha supervisor repair"}
self.extra_fields = {"addon": addon, "repair_command": "ha supervisor repair"}
super().__init__(None, logger)
class AppBuildArchitectureNotSupportedError(AppNotSupportedError, APIError):
"""Raise when app cannot be built on system because it doesn't support its architecture."""
class AddonBuildArchitectureNotSupportedError(AddonNotSupportedError, APIError):
"""Raise when addon cannot be built on system because it doesn't support its architecture."""
error_key = "addon_build_architecture_not_supported_error"
message_template = (
"Cannot build app '{addon}' because its supported architectures "
"Cannot build addon '{addon}' because its supported architectures "
"({addon_arches}) do not match the system supported architectures ({system_arches})"
)
@@ -512,46 +524,50 @@ class AppBuildArchitectureNotSupportedError(AppNotSupportedError, APIError):
self,
logger: Callable[..., None] | None = None,
*,
app: str,
app_arch_list: list[str],
addon: str,
addon_arch_list: list[str],
system_arch_list: list[str],
) -> None:
"""Initialize exception."""
self.extra_fields = {
"addon": app,
"addon_arches": ", ".join(app_arch_list),
"addon": addon,
"addon_arches": ", ".join(addon_arch_list),
"system_arches": ", ".join(system_arch_list),
}
super().__init__(None, logger)
class AppUnknownError(AppsError, APIUnknownSupervisorError):
"""Raise when unknown error occurs taking an action for an app."""
class AddonUnknownError(AddonsError, APIUnknownSupervisorError):
"""Raise when unknown error occurs taking an action for an addon."""
error_key = "addon_unknown_error"
message_template = "An unknown error occurred with app {addon}"
message_template = "An unknown error occurred with addon {addon}"
def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None:
def __init__(
self, logger: Callable[..., None] | None = None, *, addon: str
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app}
self.extra_fields = {"addon": addon}
super().__init__(logger)
class AppBuildFailedUnknownError(AppsError, APIUnknownSupervisorError):
"""Raise when the build failed for an app due to an unknown error."""
class AddonBuildFailedUnknownError(AddonsError, APIUnknownSupervisorError):
"""Raise when the build failed for an addon due to an unknown error."""
error_key = "addon_build_failed_unknown_error"
message_template = (
"An unknown error occurred while trying to build the image for app {addon}"
"An unknown error occurred while trying to build the image for addon {addon}"
)
def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None:
def __init__(
self, logger: Callable[..., None] | None = None, *, addon: str
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app}
self.extra_fields = {"addon": addon}
super().__init__(logger)
class AppsJobError(AppsError, JobException):
class AddonsJobError(AddonsError, JobException):
"""Raise on job errors."""
@@ -878,36 +894,8 @@ class DockerContainerPortConflict(DockerError, APIError):
super().__init__(None, logger)
class DockerRegistryAuthError(DockerError, APIError):
"""Raise when Docker registry authentication fails."""
error_key = "docker_registry_auth_error"
message_template = (
"Docker registry authentication failed for {registry}. "
"Check your registry credentials"
)
def __init__(
self, logger: Callable[..., None] | None = None, *, registry: str
) -> None:
"""Raise & log."""
self.extra_fields = {"registry": registry}
super().__init__(None, logger=logger)
class DockerRegistryRateLimitExceeded(DockerError, APITooManyRequests):
"""Raise when a container registry rate limits requests."""
error_key = "container_registry_rate_limit_exceeded"
message_template = "Container registry rate limit exceeded"
def __init__(self, logger: Callable[..., None] | None = None) -> None:
"""Raise & log."""
super().__init__(None, logger=logger)
class DockerHubRateLimitExceeded(DockerRegistryRateLimitExceeded):
"""Raise for Docker Hub rate limit exceeded error."""
class DockerHubRateLimitExceeded(DockerError, APITooManyRequests):
"""Raise for docker hub rate limit exceeded error."""
error_key = "dockerhub_rate_limit_exceeded"
message_template = (
@@ -918,15 +906,9 @@ class DockerHubRateLimitExceeded(DockerRegistryRateLimitExceeded):
"dockerhub_rate_limit_url": "https://www.home-assistant.io/more-info/dockerhub-rate-limit"
}
class GithubContainerRegistryRateLimitExceeded(DockerRegistryRateLimitExceeded):
"""Raise for GitHub Container Registry rate limit exceeded error."""
error_key = "ghcr_rate_limit_exceeded"
message_template = (
"GitHub Container Registry rate limited the request. "
"This is typically transient; the update will be retried."
)
def __init__(self, logger: Callable[..., None] | None = None) -> None:
"""Raise & log."""
super().__init__(None, logger=logger)
class DockerJobError(DockerError, JobException):
@@ -1035,20 +1017,22 @@ class StoreNotFound(StoreError):
"""Raise if slug is not known."""
class StoreAppNotFoundError(StoreError, APINotFound):
"""Raise if a requested app is not in the store."""
class StoreAddonNotFoundError(StoreError, APINotFound):
"""Raise if a requested addon is not in the store."""
error_key = "store_addon_not_found_error"
message_template = "App {addon} does not exist in the store"
message_template = "Addon {addon} does not exist in the store"
def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None:
def __init__(
self, logger: Callable[..., None] | None = None, *, addon: str
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app}
self.extra_fields = {"addon": addon}
super().__init__(None, logger)
class StoreRepositoryLocalCannotReset(StoreError, APIError):
"""Raise if user requests a reset on the local app repository."""
"""Raise if user requests a reset on the local addon repository."""
error_key = "store_repository_local_cannot_reset"
message_template = "Can't reset repository {local_repo} as it is not git based!"
@@ -1063,15 +1047,15 @@ class StoreJobError(StoreError, JobException):
"""Raise on job error with git."""
class StoreInvalidAppRepo(StoreError):
"""Raise on invalid app repo."""
class StoreInvalidAddonRepo(StoreError):
"""Raise on invalid addon repo."""
class StoreRepositoryUnknownError(StoreError, APIUnknownSupervisorError):
"""Raise when unknown error occurs taking an action for a store repository."""
error_key = "store_repository_unknown_error"
message_template = "An unknown error occurred with app repository {repo}"
message_template = "An unknown error occurred with addon repository {repo}"
def __init__(self, logger: Callable[..., None] | None = None, *, repo: str) -> None:
"""Initialize exception."""
@@ -1118,46 +1102,42 @@ class BackupFileExistError(BackupError):
"""Raise if the backup file already exists."""
class BackupFatalIOError(BackupError):
"""Raise on write-side I/O errors that leave the backup tar corrupt."""
class AppBackupMetadataInvalidError(BackupError, APIError):
"""Raise if invalid metadata file provided for app in backup."""
class AddonBackupMetadataInvalidError(BackupError, APIError):
"""Raise if invalid metadata file provided for addon in backup."""
error_key = "addon_backup_metadata_invalid_error"
message_template = (
"Metadata file for app {addon} in backup is invalid: {validation_error}"
"Metadata file for add-on {addon} in backup is invalid: {validation_error}"
)
def __init__(
self,
logger: Callable[..., None] | None = None,
*,
app: str,
addon: str,
validation_error: str,
) -> None:
"""Initialize exception."""
self.extra_fields = {"addon": app, "validation_error": validation_error}
self.extra_fields = {"addon": addon, "validation_error": validation_error}
super().__init__(None, logger)
class AppPrePostBackupCommandReturnedError(BackupError, APIError):
"""Raise when app's pre/post backup command returns an error."""
class AddonPrePostBackupCommandReturnedError(BackupError, APIError):
"""Raise when addon's pre/post backup command returns an error."""
error_key = "addon_pre_post_backup_command_returned_error"
message_template = (
"Pre-/Post backup command for app {addon} returned error code: "
"{exit_code}. Please report this to the app developer. Enable debug "
"Pre-/Post backup command for add-on {addon} returned error code: "
"{exit_code}. Please report this to the addon developer. Enable debug "
"logging to capture complete command output using {debug_logging_command}"
)
def __init__(
self, logger: Callable[..., None] | None = None, *, app: str, exit_code: int
self, logger: Callable[..., None] | None = None, *, addon: str, exit_code: int
) -> None:
"""Initialize exception."""
self.extra_fields = {
"addon": app,
"addon": addon,
"exit_code": exit_code,
"debug_logging_command": "ha supervisor options --logging debug",
}

View File

@@ -13,20 +13,13 @@ from aiohttp import hdrs
from awesomeversion import AwesomeVersion
from multidict import MultiMapping
from ..const import SOCKET_CORE, FeatureFlag
from ..coresys import CoreSys, CoreSysAttributes
from ..docker.const import ENV_CORE_API_SOCKET, ContainerState
from ..docker.monitor import DockerContainerStateEvent
from ..exceptions import HomeAssistantAPIError, HomeAssistantAuthError
from ..utils import version_is_new_enough
from .const import LANDINGPAGE
from .websocket import WSClient
_LOGGER: logging.Logger = logging.getLogger(__name__)
CORE_UNIX_SOCKET_MIN_VERSION: AwesomeVersion = AwesomeVersion(
"2026.4.0.dev202603250907"
)
GET_CORE_STATE_MIN_VERSION: AwesomeVersion = AwesomeVersion("2023.8.0.dev20230720")
@@ -46,102 +39,11 @@ class HomeAssistantAPI(CoreSysAttributes):
self.coresys: CoreSys = coresys
# We don't persist access tokens. Instead we fetch new ones when needed
self._access_token: str | None = None
self.access_token: str | None = None
self._access_token_expires: datetime | None = None
self._token_lock: asyncio.Lock = asyncio.Lock()
self._unix_session: aiohttp.ClientSession | None = None
self._core_connected: bool = False
@property
def supports_unix_socket(self) -> bool:
"""Return True if the installed Core version supports Unix socket communication.
Used to decide whether to configure the env var when starting Core.
"""
return (
self.sys_config.feature_flags.get(FeatureFlag.UNIX_SOCKET_CORE_API, False)
and self.sys_homeassistant.version is not None
and self.sys_homeassistant.version != LANDINGPAGE
and version_is_new_enough(
self.sys_homeassistant.version, CORE_UNIX_SOCKET_MIN_VERSION
)
)
@property
def use_unix_socket(self) -> bool:
"""Return True if the running Core container is configured for Unix socket.
Checks both version support and that the container was actually started
with the SUPERVISOR_CORE_API_SOCKET env var. This prevents failures
during Supervisor upgrades where Core is still running with a container
started by the old Supervisor.
Requires container metadata to be available (via attach() or run()).
Callers should ensure the container is running before using this.
"""
if not self.supports_unix_socket:
return False
instance = self.sys_homeassistant.core.instance
if not instance.attached:
raise HomeAssistantAPIError(
"Cannot determine Core connection mode: container metadata not available"
)
return any(
env.startswith(f"{ENV_CORE_API_SOCKET}=")
for env in instance.meta_config.get("Env", [])
)
@property
def session(self) -> aiohttp.ClientSession:
"""Return session for Core communication.
Uses a Unix socket session when the installed Core version supports it,
otherwise falls back to the default TCP websession. If the socket does
not exist yet (e.g. during Core startup), requests will fail with a
connection error handled by the caller.
"""
if not self.use_unix_socket:
return self.sys_websession
if self._unix_session is None or self._unix_session.closed:
self._unix_session = aiohttp.ClientSession(
connector=aiohttp.UnixConnector(path=str(SOCKET_CORE))
)
return self._unix_session
@property
def api_url(self) -> str:
"""Return API base url for internal Supervisor to Core communication."""
if self.use_unix_socket:
return "http://localhost"
return self.sys_homeassistant.api_url
@property
def ws_url(self) -> str:
"""Return WebSocket url for internal Supervisor to Core communication."""
if self.use_unix_socket:
return "ws://localhost/api/websocket"
return self.sys_homeassistant.ws_url
async def container_state_changed(self, event: DockerContainerStateEvent) -> None:
"""Process Core container state changes."""
if event.name != self.sys_homeassistant.core.instance.name:
return
if event.state not in (ContainerState.STOPPED, ContainerState.FAILED):
return
self._core_connected = False
if self._unix_session and not self._unix_session.closed:
await self._unix_session.close()
self._unix_session = None
async def close(self) -> None:
"""Close the Unix socket session."""
if self._unix_session and not self._unix_session.closed:
await self._unix_session.close()
self._unix_session = None
async def _ensure_access_token(self) -> None:
async def ensure_access_token(self) -> None:
"""Ensure there is a valid access token.
Raises:
@@ -153,7 +55,7 @@ class HomeAssistantAPI(CoreSysAttributes):
# Fast path check without lock (avoid unnecessary locking
# for the majority of calls).
if (
self._access_token
self.access_token
and self._access_token_expires
and self._access_token_expires > datetime.now(tz=UTC)
):
@@ -162,7 +64,7 @@ class HomeAssistantAPI(CoreSysAttributes):
async with self._token_lock:
# Double-check after acquiring lock (avoid race condition)
if (
self._access_token
self.access_token
and self._access_token_expires
and self._access_token_expires > datetime.now(tz=UTC)
):
@@ -184,43 +86,11 @@ class HomeAssistantAPI(CoreSysAttributes):
_LOGGER.info("Updated Home Assistant API token")
tokens = await resp.json()
self._access_token = tokens["access_token"]
self.access_token = tokens["access_token"]
self._access_token_expires = datetime.now(tz=UTC) + timedelta(
seconds=tokens["expires_in"]
)
async def connect_websocket(self) -> WSClient:
"""Connect a WebSocket to Core, handling auth as appropriate.
For Unix socket connections, no authentication is needed.
For TCP connections, handles token management with one retry
on auth failure.
Raises:
HomeAssistantAPIError: On connection or auth failure.
"""
if not await self.sys_homeassistant.core.instance.is_running():
raise HomeAssistantAPIError("Core container is not running", _LOGGER.debug)
if self.use_unix_socket:
return await WSClient.connect(self.session, self.ws_url)
for attempt in (1, 2):
try:
await self._ensure_access_token()
assert self._access_token
return await WSClient.connect_with_auth(
self.session, self.ws_url, self._access_token
)
except HomeAssistantAuthError:
self._access_token = None
if attempt == 2:
raise
# Unreachable, but satisfies type checker
raise RuntimeError("Unreachable")
@asynccontextmanager
async def make_request(
self,
@@ -233,16 +103,15 @@ class HomeAssistantAPI(CoreSysAttributes):
params: MultiMapping[str] | None = None,
headers: dict[str, str] | None = None,
) -> AsyncIterator[aiohttp.ClientResponse]:
"""Async context manager to make requests to Home Assistant Core API.
"""Async context manager to make authenticated requests to Home Assistant API.
This context manager handles transport and authentication automatically.
For Unix socket connections, requests are made directly without auth.
For TCP connections, it manages access tokens and retries once on 401.
It yields the HTTP response for the caller to handle.
This context manager handles authentication token management automatically,
including token refresh on 401 responses. It yields the HTTP response
for the caller to handle.
Error Handling:
- HTTP error status codes (4xx, 5xx) are preserved in the response
- Authentication is handled transparently (TCP only)
- Authentication is handled transparently with one retry on 401
- Network/connection failures raise HomeAssistantAPIError
- No logging is performed - callers should handle logging as needed
@@ -264,22 +133,19 @@ class HomeAssistantAPI(CoreSysAttributes):
network errors, timeouts, or connection failures
"""
if not await self.sys_homeassistant.core.instance.is_running():
raise HomeAssistantAPIError("Core container is not running", _LOGGER.debug)
url = f"{self.api_url}/{path}"
url = f"{self.sys_homeassistant.api_url}/{path}"
headers = headers or {}
client_timeout = aiohttp.ClientTimeout(total=timeout)
# Passthrough content type
if content_type is not None:
headers[hdrs.CONTENT_TYPE] = content_type
for _ in (1, 2):
try:
if not self.use_unix_socket:
await self._ensure_access_token()
headers[hdrs.AUTHORIZATION] = f"Bearer {self._access_token}"
async with self.session.request(
await self.ensure_access_token()
headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}"
async with self.sys_websession.request(
method,
url,
data=data,
@@ -289,8 +155,9 @@ class HomeAssistantAPI(CoreSysAttributes):
params=params,
ssl=False,
) as resp:
if resp.status == 401 and not self.use_unix_socket:
self._access_token = None
# Access token expired
if resp.status == 401:
self.access_token = None
continue
yield resp
return
@@ -317,10 +184,7 @@ class HomeAssistantAPI(CoreSysAttributes):
async def get_core_state(self) -> dict[str, Any]:
"""Return Home Assistant core state."""
state = await self._get_json("api/core/state")
if state is None or not isinstance(state, dict):
raise HomeAssistantAPIError("No state received from Home Assistant API")
return state
return await self._get_json("api/core/state")
async def get_api_state(self) -> APIState | None:
"""Return state of Home Assistant Core or None."""
@@ -342,22 +206,14 @@ class HomeAssistantAPI(CoreSysAttributes):
data = await self.get_core_state()
else:
data = await self.get_config()
if not self._core_connected:
self._core_connected = True
transport = (
f"Unix socket {SOCKET_CORE}"
if self.use_unix_socket
else f"TCP {self.sys_homeassistant.api_url}"
)
_LOGGER.info("Connected to Core via %s", transport)
state = data.get("state", "RUNNING")
# Recorder state was added in HA Core 2024.8
recorder_state = data.get("recorder_state", {})
migrating = recorder_state.get("migration_in_progress", False)
live_migration = recorder_state.get("migration_is_live", False)
return APIState(state, migrating and not live_migration)
# Older versions of home assistant does not expose the state
if data:
state = data.get("state", "RUNNING")
# Recorder state was added in HA Core 2024.8
recorder_state = data.get("recorder_state", {})
migrating = recorder_state.get("migration_in_progress", False)
live_migration = recorder_state.get("migration_is_live", False)
return APIState(state, migrating and not live_migration)
except HomeAssistantAPIError as err:
_LOGGER.debug("Can't connect to Home Assistant API: %s", err)

View File

@@ -321,6 +321,8 @@ class HomeAssistantCore(JobGroup):
# Successfull - last step
await self.sys_homeassistant.save_data()
with suppress(DockerError):
await self.instance.cleanup(old_image=old_image)
# Update Home Assistant
with suppress(HomeAssistantError):
@@ -330,24 +332,22 @@ class HomeAssistantCore(JobGroup):
try:
data = await self.sys_homeassistant.api.get_config()
except HomeAssistantError:
# The API stopped responding between the update and now
# The API stoped responding between the up checks an now
self._error_state = True
return
# Verify that the frontend is loaded
if "frontend" not in data.get("components", []):
_LOGGER.error("API responds but frontend is not loaded")
self._error_state = True
# Check that the frontend is actually accessible
elif not await self.sys_homeassistant.api.check_frontend_available():
_LOGGER.error(
"Frontend component loaded but frontend is not accessible"
)
self._error_state = True
else:
# Verify that the frontend is loaded
if "frontend" not in data.get("components", []):
_LOGGER.error("API responds but frontend is not loaded")
self._error_state = True
# Check that the frontend is actually accessible
elif not await self.sys_homeassistant.api.check_frontend_available():
_LOGGER.error(
"Frontend component loaded but frontend is not accessible"
)
self._error_state = True
else:
# Health checks passed, clean up old image
with suppress(DockerError):
await self.instance.cleanup(old_image=old_image)
return
return
# Update going wrong, revert it
if self.error_state and rollback:
@@ -506,7 +506,7 @@ class HomeAssistantCore(JobGroup):
raise HomeAssistantError("Fatal error on config check!", _LOGGER.error)
# Convert output
log = remove_colors("".join(result.log))
log = remove_colors("\n".join(result.log))
_LOGGER.debug("Result config check: %s", log.strip())
# Parse output

View File

@@ -318,10 +318,6 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes):
)
# Register for events
self.sys_bus.register_event(
BusEvent.DOCKER_CONTAINER_STATE_CHANGE,
self._api.container_state_changed,
)
self.sys_bus.register_event(BusEvent.HARDWARE_NEW_DEVICE, self._hardware_events)
self.sys_bus.register_event(
BusEvent.HARDWARE_REMOVE_DEVICE, self._hardware_events

View File

@@ -1,4 +1,4 @@
"""Handle Home Assistant secrets to apps."""
"""Handle Home Assistant secrets to add-ons."""
from datetime import timedelta
import logging

View File

@@ -3,8 +3,9 @@
from __future__ import annotations
import asyncio
from contextlib import suppress
import logging
from typing import Any, TypeVar
from typing import Any, TypeVar, cast
import aiohttp
from aiohttp.http_websocket import WSMsgType
@@ -33,11 +34,6 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
T = TypeVar("T")
# Maximum message size for WebSocket messages from Core. Matches the cap used
# by the ingress proxy; Supervisor's own control channel never gets close to
# this but shares the setting for simplicity. See issue #4392.
MAX_MESSAGE_SIZE_FROM_CORE = 64 * 1024 * 1024
class WSClient:
"""Home Assistant Websocket client."""
@@ -49,14 +45,14 @@ class WSClient:
):
"""Initialise the WS client."""
self.ha_version = ha_version
self.client = client
self._client = client
self._message_id: int = 0
self._futures: dict[int, asyncio.Future[T]] = {} # type: ignore
@property
def connected(self) -> bool:
"""Return if we're currently connected."""
return self.client is not None and not self.client.closed
return self._client is not None and not self._client.closed
async def close(self) -> None:
"""Close down the client."""
@@ -66,8 +62,8 @@ class WSClient:
HomeAssistantWSConnectionError("Connection was closed")
)
if not self.client.closed:
await self.client.close()
if not self._client.closed:
await self._client.close()
async def async_send_command(self, message: dict[str, Any]) -> T:
"""Send a websocket message, and return the response."""
@@ -76,7 +72,7 @@ class WSClient:
self._futures[message["id"]] = asyncio.get_running_loop().create_future()
_LOGGER.debug("Sending: %s", message)
try:
await self.client.send_json(message, dumps=json_dumps)
await self._client.send_json(message, dumps=json_dumps)
except ConnectionError as err:
raise HomeAssistantWSConnectionError(str(err)) from err
@@ -101,7 +97,7 @@ class WSClient:
async def _receive_json(self) -> None:
"""Receive json."""
msg = await self.client.receive()
msg = await self._client.receive()
_LOGGER.debug("Received: %s", msg)
if msg.type == WSMsgType.CLOSE:
@@ -143,101 +139,27 @@ class WSClient:
)
@classmethod
async def _ws_connect(
cls,
session: aiohttp.ClientSession,
url: str,
) -> aiohttp.ClientWebSocketResponse:
"""Open a raw WebSocket connection to Core."""
async def connect_with_auth(
cls, session: aiohttp.ClientSession, url: str, token: str
) -> WSClient:
"""Create an authenticated websocket client."""
try:
return await session.ws_connect(
url, ssl=False, max_msg_size=MAX_MESSAGE_SIZE_FROM_CORE
)
client = await session.ws_connect(url, ssl=False)
except aiohttp.client_exceptions.ClientConnectorError:
raise HomeAssistantWSConnectionError("Can't connect") from None
@classmethod
async def connect(
cls,
session: aiohttp.ClientSession,
url: str,
) -> WSClient:
"""Connect via Unix socket (no auth exchange).
hello_message = await client.receive_json()
Core authenticates the peer by the socket connection itself
and sends auth_ok immediately.
"""
client = await cls._ws_connect(session, url)
try:
first_message = await client.receive_json()
await client.send_json(
{ATTR_TYPE: WSType.AUTH, ATTR_ACCESS_TOKEN: token}, dumps=json_dumps
)
if first_message[ATTR_TYPE] != "auth_ok":
raise HomeAssistantAPIError(
f"Expected auth_ok on Unix socket, got {first_message[ATTR_TYPE]}"
)
auth_ok_message = await client.receive_json()
return cls(AwesomeVersion(first_message["ha_version"]), client)
except HomeAssistantAPIError:
await client.close()
raise
except (
KeyError,
ValueError,
TypeError,
aiohttp.ClientError,
TimeoutError,
) as err:
await client.close()
raise HomeAssistantWSConnectionError(
f"Unexpected error during WebSocket handshake: {err}"
) from err
if auth_ok_message[ATTR_TYPE] != "auth_ok":
raise HomeAssistantAPIError("AUTH NOT OK")
@classmethod
async def connect_with_auth(
cls,
session: aiohttp.ClientSession,
url: str,
token: str,
) -> WSClient:
"""Connect via TCP with token authentication.
Expects auth_required from Core, sends the token, then expects auth_ok.
The auth_required message also carries ha_version.
"""
client = await cls._ws_connect(session, url)
try:
# auth_required message also carries ha_version
first_message = await client.receive_json()
if first_message[ATTR_TYPE] != "auth_required":
raise HomeAssistantAPIError(
f"Expected auth_required, got {first_message[ATTR_TYPE]}"
)
await client.send_json(
{ATTR_TYPE: WSType.AUTH, ATTR_ACCESS_TOKEN: token}, dumps=json_dumps
)
auth_ok_message = await client.receive_json()
if auth_ok_message[ATTR_TYPE] != "auth_ok":
raise HomeAssistantAPIError("AUTH NOT OK")
return cls(AwesomeVersion(first_message["ha_version"]), client)
except HomeAssistantAPIError:
await client.close()
raise
except (
KeyError,
ValueError,
TypeError,
aiohttp.ClientError,
TimeoutError,
) as err:
await client.close()
raise HomeAssistantWSConnectionError(
f"Unexpected error during WebSocket handshake: {err}"
) from err
return cls(AwesomeVersion(hello_message["ha_version"]), client)
class HomeAssistantWebSocket(CoreSysAttributes):
@@ -246,7 +168,7 @@ class HomeAssistantWebSocket(CoreSysAttributes):
def __init__(self, coresys: CoreSys):
"""Initialize Home Assistant object."""
self.coresys: CoreSys = coresys
self.client: WSClient | None = None
self._client: WSClient | None = None
self._lock: asyncio.Lock = asyncio.Lock()
self._queue: list[dict[str, Any]] = []
@@ -261,10 +183,16 @@ class HomeAssistantWebSocket(CoreSysAttributes):
async def _get_ws_client(self) -> WSClient:
"""Return a websocket client."""
async with self._lock:
if self.client is not None and self.client.connected:
return self.client
if self._client is not None and self._client.connected:
return self._client
client = await self.sys_homeassistant.api.connect_websocket()
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
await self.sys_homeassistant.api.ensure_access_token()
client = await WSClient.connect_with_auth(
self.sys_websession,
self.sys_homeassistant.ws_url,
cast(str, self.sys_homeassistant.api.access_token),
)
self.sys_create_task(client.start_listener())
return client
@@ -280,7 +208,7 @@ class HomeAssistantWebSocket(CoreSysAttributes):
"WebSocket not available, system is shutting down"
)
connected = self.client and self.client.connected
connected = self._client and self._client.connected
# If we are already connected, we can avoid the check_api_state call
# since it makes a new socket connection and we already have one.
if not connected and not await self.sys_homeassistant.api.check_api_state():
@@ -288,8 +216,8 @@ class HomeAssistantWebSocket(CoreSysAttributes):
"Can't connect to Home Assistant Core WebSocket, the API is not reachable"
)
if not self.client or not self.client.connected:
self.client = await self._get_ws_client()
if not self._client or not self._client.connected:
self._client = await self._get_ws_client()
async def load(self) -> None:
"""Set up queue processor after startup completes."""
@@ -310,19 +238,19 @@ class HomeAssistantWebSocket(CoreSysAttributes):
try:
await self._ensure_connected()
except HomeAssistantWSError as err:
_LOGGER.warning("Can't send WebSocket command: %s", err)
_LOGGER.debug("Can't send WebSocket command: %s", err)
return
# _ensure_connected guarantees self.client is set
assert self.client
# _ensure_connected guarantees self._client is set
assert self._client
try:
await self.client.async_send_command(message)
await self._client.async_send_command(message)
except HomeAssistantWSConnectionError as err:
_LOGGER.debug("Fire-and-forget WebSocket command failed: %s", err)
if self.client:
await self.client.close()
self.client = None
if self._client:
await self._client.close()
self._client = None
async def async_send_command(self, message: dict[str, Any]) -> T:
"""Send a command and return the response.
@@ -330,14 +258,14 @@ class HomeAssistantWebSocket(CoreSysAttributes):
Raises HomeAssistantWSError on WebSocket connection or communication failure.
"""
await self._ensure_connected()
# _ensure_connected guarantees self.client is set
assert self.client
# _ensure_connected guarantees self._client is set
assert self._client
try:
return await self.client.async_send_command(message)
return await self._client.async_send_command(message)
except HomeAssistantWSConnectionError:
if self.client:
await self.client.close()
self.client = None
if self._client:
await self._client.close()
self._client = None
raise
def send_command(self, message: dict[str, Any]) -> None:

View File

@@ -8,7 +8,12 @@ from dbus_fast import Variant
from ..const import DOCKER_IPV4_NETWORK_MASK, DOCKER_IPV6_NETWORK_MASK, DOCKER_NETWORK
from ..coresys import CoreSys, CoreSysAttributes
from ..dbus.const import StartUnitMode, UnitActiveState
from ..dbus.const import (
DBUS_ATTR_ACTIVE_STATE,
DBUS_IFACE_SYSTEMD_UNIT,
StartUnitMode,
UnitActiveState,
)
from ..dbus.systemd import ExecStartEntry
from ..exceptions import DBusError
from ..resolution.const import UnhealthyReason
@@ -120,8 +125,18 @@ class FirewallManager(CoreSysAttributes):
# Wait for the oneshot unit to finish and verify it succeeded
try:
unit = await self.sys_dbus.systemd.get_unit(FIREWALL_SERVICE)
async with asyncio.timeout(FIREWALL_UNIT_TIMEOUT):
state = await unit.wait_for_active_state(TERMINAL_STATES)
async with (
asyncio.timeout(FIREWALL_UNIT_TIMEOUT),
unit.properties_changed() as signal,
):
state = await unit.get_active_state()
while state not in TERMINAL_STATES:
props = await signal.wait_for_signal()
if (
props[0] == DBUS_IFACE_SYSTEMD_UNIT
and DBUS_ATTR_ACTIVE_STATE in props[1]
):
state = UnitActiveState(props[1][DBUS_ATTR_ACTIVE_STATE].value)
except (DBusError, TimeoutError) as err:
_LOGGER.error(
"Failed waiting for gateway firewall unit to complete: %s", err
@@ -138,6 +153,10 @@ class FirewallManager(CoreSysAttributes):
async def apply_gateway_firewall_rules(self) -> None:
"""Apply gateway firewall rules, marking unsupported on failure."""
if self.sys_dev:
_LOGGER.info("Skipping gateway firewall rules in development mode")
return
if await self._apply_gateway_firewall_rules():
_LOGGER.info("Gateway firewall rules applied")
else:

View File

@@ -37,7 +37,7 @@ class AudioApplication:
stream_type: StreamType
volume: float
mute: bool
app: str
addon: str
@dataclass(slots=True, frozen=True)

View File

@@ -5,7 +5,7 @@ import logging
import random
import secrets
from .addons.addon import App
from .addons.addon import Addon
from .const import (
ATTR_PORTS,
ATTR_SESSION,
@@ -33,11 +33,11 @@ class Ingress(FileConfiguration, CoreSysAttributes):
self.coresys: CoreSys = coresys
self.tokens: dict[str, str] = {}
def get(self, token: str) -> App | None:
"""Return app they have this ingress token."""
def get(self, token: str) -> Addon | None:
"""Return addon they have this ingress token."""
if token not in self.tokens:
return None
return self.sys_apps.get_local_only(self.tokens[token])
return self.sys_addons.get_local_only(self.tokens[token])
def get_session_data(self, session_id: str) -> IngressSessionData | None:
"""Return complementary data of current session or None."""
@@ -61,14 +61,14 @@ class Ingress(FileConfiguration, CoreSysAttributes):
return self._data[ATTR_PORTS]
@property
def apps(self) -> list[App]:
"""Return list of ingress Apps."""
apps = []
for app in self.sys_apps.installed:
if not app.with_ingress:
def addons(self) -> list[Addon]:
"""Return list of ingress Add-ons."""
addons = []
for addon in self.sys_addons.installed:
if not addon.with_ingress:
continue
apps.append(app)
return apps
addons.append(addon)
return addons
async def load(self) -> None:
"""Update internal data."""
@@ -115,13 +115,13 @@ class Ingress(FileConfiguration, CoreSysAttributes):
self.sessions_data.update(sessions_data)
def _update_token_list(self) -> None:
"""Regenerate token <-> App map."""
"""Regenerate token <-> Add-on map."""
self.tokens.clear()
# Read all ingress token and build a map
for app in self.apps:
if app.ingress_token:
self.tokens[app.ingress_token] = app.slug
for addon in self.addons:
if addon.ingress_token:
self.tokens[addon.ingress_token] = addon.slug
def create_session(self, data: IngressSessionData | None = None) -> str:
"""Create new session."""
@@ -158,10 +158,10 @@ class Ingress(FileConfiguration, CoreSysAttributes):
return True
async def get_dynamic_port(self, app_slug: str) -> int:
async def get_dynamic_port(self, addon_slug: str) -> int:
"""Get/Create a dynamic port from range."""
if app_slug in self.ports:
return self.ports[app_slug]
if addon_slug in self.ports:
return self.ports[addon_slug]
port = None
while (
@@ -172,32 +172,37 @@ class Ingress(FileConfiguration, CoreSysAttributes):
port = random.randint(62000, 65500)
# Save port for next time
self.ports[app_slug] = port
self.ports[addon_slug] = port
await self.save_data()
return port
async def del_dynamic_port(self, app_slug: str) -> None:
async def del_dynamic_port(self, addon_slug: str) -> None:
"""Remove a previously assigned dynamic port."""
if app_slug not in self.ports:
if addon_slug not in self.ports:
return
del self.ports[app_slug]
del self.ports[addon_slug]
await self.save_data()
async def update_hass_panel(self, app: App):
"""Update the ingress panel registration in Home Assistant."""
method = "post" if app.ingress_panel else "delete"
async def update_hass_panel(self, addon: Addon):
"""Return True if Home Assistant up and running."""
if not await self.sys_homeassistant.core.is_running():
_LOGGER.debug("Ignoring panel update on Core")
return
# Update UI
method = "post" if addon.ingress_panel else "delete"
try:
async with self.sys_homeassistant.api.make_request(
method, f"api/hassio_push/panel/{app.slug}"
method, f"api/hassio_push/panel/{addon.slug}"
) as resp:
if resp.status in (200, 201):
_LOGGER.info("Update Ingress as panel for %s", app.slug)
_LOGGER.info("Update Ingress as panel for %s", addon.slug)
else:
_LOGGER.warning(
"Failed to update the Ingress panel for %s with %i",
app.slug,
addon.slug,
resp.status,
)
except HomeAssistantAPIError as err:
_LOGGER.error("Panel update request failed for %s: %s", app.slug, err)
_LOGGER.error("Panel update request failed for %s: %s", addon.slug, err)

View File

@@ -98,7 +98,9 @@ class SupervisorJobError:
"""Representation of an error occurring during a supervisor job."""
type_: type[HassioError] = HassioError
message: str = "Unknown error, see Supervisor logs"
message: str = (
"Unknown error, see Supervisor logs (check with 'ha supervisor logs')"
)
stage: str | None = None
error_key: str | None = None
extra_fields: dict[str, Any] | None = None

View File

@@ -1,7 +1,6 @@
"""Filter tools."""
import ipaddress
import logging
import os
import re
from typing import cast
@@ -12,9 +11,7 @@ from sentry_sdk.types import Event, Hint
from ..const import DOCKER_IPV4_NETWORK_MASK, HEADER_TOKEN, HEADER_TOKEN_OLD, CoreState
from ..coresys import CoreSys
from ..exceptions import APITooManyRequests, AppConfigurationError
_LOGGER: logging.Logger = logging.getLogger(__name__)
from ..exceptions import AddonConfigurationError
RE_URL: re.Pattern = re.compile(r"(\w+:\/\/)(.*\.\w+)(.*)")
@@ -46,21 +43,11 @@ def sanitize_url(url: str) -> str:
def filter_data(coresys: CoreSys, event: Event, hint: Hint) -> Event | None:
"""Filter event data before sending to sentry."""
# Ignore some exceptions. Walk the __cause__ chain because rate-limit
# errors are often wrapped (e.g. DockerHubRateLimitExceeded wrapped in
# SupervisorUpdateError by supervisor.update()).
# Ignore some exceptions
if "exc_info" in hint:
_, exc_value, _ = hint["exc_info"]
err: BaseException | None = exc_value
while err is not None:
if isinstance(err, (AppConfigurationError, APITooManyRequests)):
_LOGGER.debug(
"Skipping Sentry event for %s: %s",
type(err).__name__,
exc_value,
)
return None
err = err.__cause__
if isinstance(exc_value, (AddonConfigurationError)):
return None
# Ignore issue if system is not supported or diagnostics is disabled
if not coresys.config.diagnostics or not coresys.core.supported or coresys.dev:
@@ -95,10 +82,10 @@ def filter_data(coresys: CoreSys, event: Event, hint: Hint) -> Event | None:
)
return event
# List installed apps
installed_apps = [
{"slug": app.slug, "repository": app.repository, "name": app.name}
for app in coresys.apps.installed
# List installed addons
installed_addons = [
{"slug": addon.slug, "repository": addon.repository, "name": addon.name}
for addon in coresys.addons.installed
]
# Update information
@@ -106,7 +93,7 @@ def filter_data(coresys: CoreSys, event: Event, hint: Hint) -> Event | None:
{
"supervisor": {
"channel": coresys.updater.channel,
"installed_addons": installed_apps,
"installed_addons": installed_addons,
},
"host": {
"arch": str(coresys.arch.default),
@@ -136,7 +123,7 @@ def filter_data(coresys: CoreSys, event: Event, hint: Hint) -> Event | None:
attr.asdict(suggestion)
for suggestion in coresys.resolution.suggestions
],
"unhealthy": sorted(coresys.resolution.unhealthy),
"unhealthy": coresys.resolution.unhealthy,
},
"store": {
"repositories": coresys.store.repository_urls,

View File

@@ -5,12 +5,12 @@ from datetime import datetime, timedelta
import logging
from typing import cast
from ..addons.const import APP_UPDATE_CONDITIONS
from ..addons.const import ADDON_UPDATE_CONDITIONS
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
from ..const import ATTR_TYPE, AppState
from ..const import ATTR_TYPE, AddonState
from ..coresys import CoreSysAttributes
from ..exceptions import (
AppsError,
AddonsError,
BackupFileNotFoundError,
HomeAssistantError,
HomeAssistantWSError,
@@ -31,6 +31,7 @@ HASS_WATCHDOG_REANIMATE_FAILURES = "HASS_WATCHDOG_REANIMATE_FAILURES"
HASS_WATCHDOG_MAX_API_ATTEMPTS = 2
HASS_WATCHDOG_MAX_REANIMATE_ATTEMPTS = 5
RUN_UPDATE_SUPERVISOR = 86400 # 24h
RUN_UPDATE_ADDONS = 57600
RUN_UPDATE_CLI = 43200 # 12h, staggered +2min per plugin
RUN_UPDATE_DNS = 43320
@@ -41,7 +42,7 @@ RUN_UPDATE_OBSERVER = 43680
RUN_RELOAD_ADDONS = 10800
RUN_RELOAD_BACKUPS = 72000
RUN_RELOAD_HOST = 7600
RUN_RELOAD_UPDATER = 86400 # 24h
RUN_RELOAD_UPDATER = 27100
RUN_RELOAD_INGRESS = 930
RUN_RELOAD_MOUNTS = 900
@@ -71,7 +72,8 @@ class Tasks(CoreSysAttributes):
async def load(self):
"""Add Tasks to scheduler."""
# Update
self.sys_scheduler.register_task(self._update_apps, RUN_UPDATE_ADDONS)
self.sys_scheduler.register_task(self._update_addons, RUN_UPDATE_ADDONS)
self.sys_scheduler.register_task(self._update_supervisor, RUN_UPDATE_SUPERVISOR)
self.sys_scheduler.register_task(self._update_cli, RUN_UPDATE_CLI)
self.sys_scheduler.register_task(self._update_dns, RUN_UPDATE_DNS)
self.sys_scheduler.register_task(self._update_audio, RUN_UPDATE_AUDIO)
@@ -94,7 +96,7 @@ class Tasks(CoreSysAttributes):
self._watchdog_observer_application, RUN_WATCHDOG_OBSERVER_APPLICATION
)
self.sys_scheduler.register_task(
self._watchdog_app_application, RUN_WATCHDOG_ADDON_APPLICATON
self._watchdog_addon_application, RUN_WATCHDOG_ADDON_APPLICATON
)
# Cleanup
@@ -106,60 +108,90 @@ class Tasks(CoreSysAttributes):
@Job(
name="tasks_update_addons",
conditions=APP_UPDATE_CONDITIONS + [JobCondition.RUNNING],
conditions=ADDON_UPDATE_CONDITIONS + [JobCondition.RUNNING],
)
async def _update_apps(self):
"""Check if an update is available for an App and update it."""
for app in self.sys_apps.all:
if not app.is_installed or not app.auto_update:
async def _update_addons(self):
"""Check if an update is available for an Add-on and update it."""
for addon in self.sys_addons.all:
if not addon.is_installed or not addon.auto_update:
continue
# Evaluate available updates
if not app.need_update:
if not addon.need_update:
continue
if not app.auto_update_available:
if not addon.auto_update_available:
_LOGGER.debug(
"Not updating app %s from %s to %s as that would cross a known breaking version",
app.slug,
app.version,
app.latest_version,
"Not updating add-on %s from %s to %s as that would cross a known breaking version",
addon.slug,
addon.version,
addon.latest_version,
)
continue
# Delay auto-updates for a day in case of issues
if utcnow() < app.latest_version_timestamp + timedelta(days=1):
if utcnow() < addon.latest_version_timestamp + timedelta(days=1):
_LOGGER.debug(
"Not updating app %s from %s to %s as the latest version is less than a day old",
app.slug,
app.version,
app.latest_version,
"Not updating add-on %s from %s to %s as the latest version is less than a day old",
addon.slug,
addon.version,
addon.latest_version,
)
continue
if not app.test_update_schema():
_LOGGER.warning("App %s will be ignored, schema tests failed", app.slug)
if not addon.test_update_schema():
_LOGGER.warning(
"Add-on %s will be ignored, schema tests failed", addon.slug
)
continue
_LOGGER.info("App auto update process %s", app.slug)
# Call Home Assistant Core to update app to make sure that backups
_LOGGER.info("Add-on auto update process %s", addon.slug)
# Call Home Assistant Core to update add-on to make sure that backups
# get created through the Home Assistant Core API (categorized correctly).
# Ultimately auto updates should be handled by Home Assistant Core itself
# through a update entity feature.
message = {
ATTR_TYPE: WSType.HASSIO_UPDATE_ADDON,
"addon": app.slug,
"addon": addon.slug,
"backup": True,
}
_LOGGER.debug(
"Sending update app WebSocket command to Home Assistant Core: %s",
"Sending update add-on WebSocket command to Home Assistant Core: %s",
message,
)
try:
await self.sys_homeassistant.websocket.async_send_command(message)
except HomeAssistantWSError as err:
_LOGGER.warning(
"Could not send app update command to Home Assistant Core: %s",
"Could not send add-on update command to Home Assistant Core: %s",
err,
)
@Job(
name="tasks_update_supervisor",
conditions=[
JobCondition.AUTO_UPDATE,
JobCondition.FREE_SPACE,
JobCondition.HEALTHY,
JobCondition.INTERNET_HOST,
JobCondition.OS_SUPPORTED,
JobCondition.RUNNING,
JobCondition.ARCHITECTURE_SUPPORTED,
],
concurrency=JobConcurrency.REJECT,
)
async def _update_supervisor(self):
"""Check and run update of Supervisor Supervisor."""
if not self.sys_supervisor.need_update:
return
_LOGGER.info(
"Found new Supervisor version %s, updating",
self.sys_supervisor.latest_version,
)
# Errors are logged by the exceptions, we can't really do something
# if an update fails here.
with suppress(SupervisorUpdateError):
await self.sys_supervisor.update()
async def _watchdog_homeassistant_api(self):
"""Create scheduler task for monitoring running state of API.
@@ -309,37 +341,37 @@ class Tasks(CoreSysAttributes):
except ObserverError:
_LOGGER.error("Observer watchdog reanimation failed!")
async def _watchdog_app_application(self):
async def _watchdog_addon_application(self):
"""Check running state of the application and start if they is hangs."""
for app in self.sys_apps.installed:
for addon in self.sys_addons.installed:
# if watchdog need looking for
if not app.watchdog or app.state != AppState.STARTED:
if not addon.watchdog or addon.state != AddonState.STARTED:
continue
# Init cache data
retry_scan = self._cache.get(app.slug, 0)
retry_scan = self._cache.get(addon.slug, 0)
# if App have running actions / Application work
if app.in_progress or await app.watchdog_application():
# if Addon have running actions / Application work
if addon.in_progress or await addon.watchdog_application():
continue
# Look like we run into a problem
retry_scan += 1
if retry_scan == 1:
self._cache[app.slug] = retry_scan
self._cache[addon.slug] = retry_scan
_LOGGER.warning(
"Watchdog missing application response from %s", app.slug
"Watchdog missing application response from %s", addon.slug
)
return
_LOGGER.warning("Watchdog found a problem with %s application!", app.slug)
_LOGGER.warning("Watchdog found a problem with %s application!", addon.slug)
try:
await (await app.restart())
except AppsError as err:
_LOGGER.error("%s watchdog reanimation failed with %s", app.slug, err)
await (await addon.restart())
except AddonsError as err:
_LOGGER.error("%s watchdog reanimation failed with %s", addon.slug, err)
await async_capture_exception(err)
finally:
self._cache[app.slug] = 0
self._cache[addon.slug] = 0
@Job(
name="tasks_reload_store",
@@ -350,7 +382,7 @@ class Tasks(CoreSysAttributes):
],
)
async def _reload_store(self) -> None:
"""Reload store and check for app updates."""
"""Reload store and check for addon updates."""
await self.sys_store.reload()
@Job(name="tasks_reload_updater")
@@ -358,34 +390,9 @@ class Tasks(CoreSysAttributes):
"""Check for new versions of Home Assistant, Supervisor, OS, etc."""
await self.sys_updater.reload()
# If there's a new version of supervisor, update immediately
# If there's a new version of supervisor, start update immediately
if self.sys_supervisor.need_update:
await self._auto_update_supervisor()
@Job(
name="tasks_update_supervisor",
conditions=[
JobCondition.AUTO_UPDATE,
JobCondition.FREE_SPACE,
JobCondition.HEALTHY,
JobCondition.INTERNET_HOST,
JobCondition.OS_SUPPORTED,
JobCondition.RUNNING,
JobCondition.ARCHITECTURE_SUPPORTED,
],
concurrency=JobConcurrency.REJECT,
)
async def _auto_update_supervisor(self):
"""Auto update Supervisor if enabled."""
if not self.sys_supervisor.need_update:
return
_LOGGER.info(
"Found new Supervisor version %s, updating",
self.sys_supervisor.latest_version,
)
with suppress(SupervisorUpdateError):
await self.sys_supervisor.update()
await self._update_supervisor()
@Job(name="tasks_core_backup_cleanup", conditions=[JobCondition.HEALTHY])
async def _core_backup_cleanup(self) -> None:

View File

@@ -12,10 +12,12 @@ from voluptuous import Coerce
from ..coresys import CoreSys, CoreSysAttributes
from ..dbus.const import (
DBUS_ATTR_ACTIVE_STATE,
DBUS_ATTR_DESCRIPTION,
DBUS_ATTR_OPTIONS,
DBUS_ATTR_TYPE,
DBUS_ATTR_WHAT,
DBUS_IFACE_SYSTEMD_UNIT,
StartUnitMode,
StopUnitMode,
UnitActiveState,
@@ -142,7 +144,7 @@ class Mount(CoreSysAttributes, ABC):
@property
def container_where(self) -> PurePath | None:
"""Return where this is made available in managed containers (core, apps, etc.).
"""Return where this is made available in managed containers (core, addons, etc.).
This returns none if it is not made available in managed containers.
"""
@@ -177,7 +179,7 @@ class Mount(CoreSysAttributes, ABC):
await self.mount()
return
await self._update_state_await(unit)
await self._update_state_await(unit, not_state=UnitActiveState.ACTIVATING)
# If mount is not available, try to reload it
if not await self.is_mounted():
@@ -223,20 +225,29 @@ class Mount(CoreSysAttributes, ABC):
async def _update_state_await(
self,
unit: SystemdUnit,
expected_states: set[UnitActiveState] | None = None,
expected_states: list[UnitActiveState] | None = None,
not_state: UnitActiveState = UnitActiveState.ACTIVATING,
) -> None:
"""Update state info about mount from dbus. Wait for one of expected_states to appear."""
if expected_states is None:
expected_states = {
UnitActiveState.ACTIVE,
UnitActiveState.FAILED,
UnitActiveState.INACTIVE,
}
"""Update state info about mount from dbus. Wait for one of expected_states to appear or state to change from not_state."""
try:
async with asyncio.timeout(30):
self._state = await unit.wait_for_active_state(expected_states)
async with asyncio.timeout(30), unit.properties_changed() as signal:
await self._update_state(unit)
while (
expected_states
and self.state not in expected_states
or not expected_states
and self.state == not_state
):
prop_change_signal = await signal.wait_for_signal()
if (
prop_change_signal[0] == DBUS_IFACE_SYSTEMD_UNIT
and DBUS_ATTR_ACTIVE_STATE in prop_change_signal[1]
):
self._state = prop_change_signal[1][
DBUS_ATTR_ACTIVE_STATE
].value
except TimeoutError:
await self._update_state(unit)
_LOGGER.warning(
"Mount %s still in state %s after waiting for 30 seconds to complete",
self.name,
@@ -289,7 +300,7 @@ class Mount(CoreSysAttributes, ABC):
) from err
if unit := await self._update_unit():
await self._update_state_await(unit)
await self._update_state_await(unit, not_state=UnitActiveState.ACTIVATING)
if not await self.is_mounted():
raise MountActivationError(
@@ -309,7 +320,7 @@ class Mount(CoreSysAttributes, ABC):
await self.sys_dbus.systemd.stop_unit(self.unit_name, StopUnitMode.FAIL)
await self._update_state_await(
unit, {UnitActiveState.INACTIVE, UnitActiveState.FAILED}
unit, [UnitActiveState.INACTIVE, UnitActiveState.FAILED]
)
if self.state == UnitActiveState.FAILED:
@@ -338,7 +349,9 @@ class Mount(CoreSysAttributes, ABC):
await self._restart()
else:
if unit := await self._update_unit():
await self._update_state_await(unit)
await self._update_state_await(
unit, not_state=UnitActiveState.ACTIVATING
)
if not await self.is_mounted():
_LOGGER.info(
@@ -367,7 +380,7 @@ class Mount(CoreSysAttributes, ABC):
) from err
if unit := await self._update_unit():
await self._update_state_await(unit)
await self._update_state_await(unit, not_state=UnitActiveState.ACTIVATING)
if not await self.is_mounted():
raise MountActivationError(

View File

@@ -22,6 +22,7 @@ from ..exceptions import (
)
from ..jobs.const import JobConcurrency, JobCondition
from ..jobs.decorator import Job
from ..utils.sentry import async_capture_exception
from .data_disk import DataDisk
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -362,6 +363,7 @@ class OSManager(CoreSysAttributes):
RaucState.ACTIVE, self.get_slot_name(boot_name)
)
except DBusError as err:
await async_capture_exception(err)
raise HassOSSlotUpdateError(
f"Can't mark {boot_name} as active!", _LOGGER.error
) from err

View File

@@ -336,7 +336,7 @@ class PluginDns(PluginBase):
# Reset loop protection
self._loop = False
await self.sys_apps.sync_dns()
await self.sys_addons.sync_dns()
async def watchdog_container(self, event: DockerContainerStateEvent) -> None:
"""Check for loop on failure before processing state change event."""

View File

@@ -3,7 +3,7 @@
from datetime import timedelta
import logging
from ...const import AppState, CoreState
from ...const import AddonState, CoreState
from ...coresys import CoreSys
from ...exceptions import PwnedConnectivityError, PwnedError, PwnedSecret
from ...jobs.const import JobCondition, JobThrottle
@@ -16,11 +16,11 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def setup(coresys: CoreSys) -> CheckBase:
"""Check setup function."""
return CheckAppPwned(coresys)
return CheckAddonPwned(coresys)
class CheckAppPwned(CheckBase):
"""CheckAppPwned class for check."""
class CheckAddonPwned(CheckBase):
"""CheckAddonPwned class for check."""
@Job(
name="check_addon_pwned_run",
@@ -35,8 +35,8 @@ class CheckAppPwned(CheckBase):
return
await self.sys_homeassistant.secrets.reload()
for app in self.sys_apps.installed:
secrets = app.pwned
for addon in self.sys_addons.installed:
secrets = addon.pwned
if not secrets:
continue
@@ -49,7 +49,7 @@ class CheckAppPwned(CheckBase):
return
except PwnedSecret:
# Check possible suggestion
if app.state == AppState.STARTED:
if addon.state == AddonState.STARTED:
suggestions = [SuggestionType.EXECUTE_STOP]
else:
suggestions = None
@@ -57,7 +57,7 @@ class CheckAppPwned(CheckBase):
self.sys_resolution.create_issue(
IssueType.PWNED,
ContextType.ADDON,
reference=app.slug,
reference=addon.slug,
suggestions=suggestions,
)
break
@@ -71,11 +71,11 @@ class CheckAppPwned(CheckBase):
return False
# Uninstalled
if not (app := self.sys_apps.get_local_only(reference)):
if not (addon := self.sys_addons.get_local_only(reference)):
return False
# Not in use anymore
secrets = app.pwned
secrets = addon.pwned
if not secrets:
return False

View File

@@ -1,6 +1,6 @@
"""Helpers to check for deprecated apps."""
"""Helpers to check for deprecated addons."""
from ...const import AppStage, CoreState
from ...const import AddonStage, CoreState
from ...coresys import CoreSys
from ..const import ContextType, IssueType, SuggestionType
from .base import CheckBase
@@ -8,20 +8,20 @@ from .base import CheckBase
def setup(coresys: CoreSys) -> CheckBase:
"""Check setup function."""
return CheckDeprecatedApp(coresys)
return CheckDeprecatedAddon(coresys)
class CheckDeprecatedApp(CheckBase):
"""CheckDeprecatedApp class for check."""
class CheckDeprecatedAddon(CheckBase):
"""CheckDeprecatedAddon class for check."""
async def run_check(self) -> None:
"""Run check if not affected by issue."""
for app in self.sys_apps.installed:
if app.stage == AppStage.DEPRECATED:
for addon in self.sys_addons.installed:
if addon.stage == AddonStage.DEPRECATED:
self.sys_resolution.create_issue(
IssueType.DEPRECATED_ADDON,
ContextType.ADDON,
reference=app.slug,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REMOVE],
)
@@ -30,8 +30,8 @@ class CheckDeprecatedApp(CheckBase):
if not reference:
return False
app = self.sys_apps.get_local_only(reference)
return app is not None and app.stage == AppStage.DEPRECATED
addon = self.sys_addons.get_local_only(reference)
return addon is not None and addon.stage == AddonStage.DEPRECATED
@property
def issue(self) -> IssueType:

View File

@@ -1,6 +1,6 @@
"""Helpers to check for apps using deprecated compatibility entries."""
"""Helpers to check for add-ons using deprecated compatibility entries."""
from ...const import AppStage, CoreState
from ...const import AddonStage, CoreState
from ...coresys import CoreSys
from ..const import ContextType, IssueType, SuggestionType
from .base import CheckBase
@@ -8,25 +8,25 @@ from .base import CheckBase
def setup(coresys: CoreSys) -> CheckBase:
"""Check setup function."""
return CheckDeprecatedArchApp(coresys)
return CheckDeprecatedArchAddon(coresys)
class CheckDeprecatedArchApp(CheckBase):
"""CheckDeprecatedArchApp class for check."""
class CheckDeprecatedArchAddon(CheckBase):
"""CheckDeprecatedArchAddon class for check."""
async def run_check(self) -> None:
"""Run check if not affected by issue."""
for app in self.sys_apps.installed:
if app.stage == AppStage.DEPRECATED:
for addon in self.sys_addons.installed:
if addon.stage == AddonStage.DEPRECATED:
continue
if (app.has_deprecated_arch and not app.has_supported_arch) or (
app.has_deprecated_machine and not app.has_supported_machine
if (addon.has_deprecated_arch and not addon.has_supported_arch) or (
addon.has_deprecated_machine and not addon.has_supported_machine
):
self.sys_resolution.create_issue(
IssueType.DEPRECATED_ARCH_ADDON,
ContextType.ADDON,
reference=app.slug,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REMOVE],
)
@@ -35,13 +35,13 @@ class CheckDeprecatedArchApp(CheckBase):
if not reference:
return False
app = self.sys_apps.get_local_only(reference)
addon = self.sys_addons.get_local_only(reference)
return (
app is not None
and app.stage != AppStage.DEPRECATED
addon is not None
and addon.stage != AddonStage.DEPRECATED
and (
(app.has_deprecated_arch and not app.has_supported_arch)
or (app.has_deprecated_machine and not app.has_supported_machine)
(addon.has_deprecated_arch and not addon.has_supported_arch)
or (addon.has_deprecated_machine and not addon.has_supported_machine)
)
)

View File

@@ -1,4 +1,4 @@
"""Helpers to check for detached apps due to repo misisng."""
"""Helpers to check for detached addons due to repo misisng."""
from ...const import CoreState
from ...coresys import CoreSys
@@ -8,20 +8,23 @@ from .base import CheckBase
def setup(coresys: CoreSys) -> CheckBase:
"""Check setup function."""
return CheckDetachedAppMissing(coresys)
return CheckDetachedAddonMissing(coresys)
class CheckDetachedAppMissing(CheckBase):
"""CheckDetachedAppMissing class for check."""
class CheckDetachedAddonMissing(CheckBase):
"""CheckDetachedAddonMissing class for check."""
async def run_check(self) -> None:
"""Run check if not affected by issue."""
for app in self.sys_apps.installed:
if app.is_detached and app.repository not in self.sys_store.repositories:
for addon in self.sys_addons.installed:
if (
addon.is_detached
and addon.repository not in self.sys_store.repositories
):
self.sys_resolution.create_issue(
IssueType.DETACHED_ADDON_MISSING,
ContextType.ADDON,
reference=app.slug,
reference=addon.slug,
)
async def approve_check(self, reference: str | None = None) -> bool:
@@ -29,8 +32,8 @@ class CheckDetachedAppMissing(CheckBase):
if not reference:
return False
app = self.sys_apps.get_local_only(reference)
return app is not None and app.is_detached
addon = self.sys_addons.get_local_only(reference)
return addon is not None and addon.is_detached
@property
def issue(self) -> IssueType:

View File

@@ -1,4 +1,4 @@
"""Helpers to check for detached apps due to removal from repo."""
"""Helpers to check for detached addons due to removal from repo."""
from ...const import CoreState
from ...coresys import CoreSys
@@ -8,20 +8,20 @@ from .base import CheckBase
def setup(coresys: CoreSys) -> CheckBase:
"""Check setup function."""
return CheckDetachedAppRemoved(coresys)
return CheckDetachedAddonRemoved(coresys)
class CheckDetachedAppRemoved(CheckBase):
"""CheckDetachedAppRemoved class for check."""
class CheckDetachedAddonRemoved(CheckBase):
"""CheckDetachedAddonRemoved class for check."""
async def run_check(self) -> None:
"""Run check if not affected by issue."""
for app in self.sys_apps.installed:
if app.is_detached and app.repository in self.sys_store.repositories:
for addon in self.sys_addons.installed:
if addon.is_detached and addon.repository in self.sys_store.repositories:
self.sys_resolution.create_issue(
IssueType.DETACHED_ADDON_REMOVED,
ContextType.ADDON,
reference=app.slug,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REMOVE],
)
@@ -30,8 +30,8 @@ class CheckDetachedAppRemoved(CheckBase):
if not reference:
return False
app = self.sys_apps.get_local_only(reference)
return app is not None and app.is_detached
addon = self.sys_addons.get_local_only(reference)
return addon is not None and addon.is_detached
@property
def issue(self) -> IssueType:

View File

@@ -10,24 +10,24 @@ from ..data import Issue
from .base import CheckBase
def _check_container(container: DockerInterface, app=None) -> bool:
def _check_container(container: DockerInterface, addon=None) -> bool:
"""Check if container has mount propagation issues requiring recreate.
For apps, only validates mounts explicitly configured (not Docker VOLUMEs).
For add-ons, only validates mounts explicitly configured (not Docker VOLUMEs).
For Core/plugins, validates all /media and /share mounts.
"""
# For apps, check mounts against their actual configured targets
if app is not None:
app_mapping = app.map_volumes
# For add-ons, check mounts against their actual configured targets
if addon is not None:
addon_mapping = addon.map_volumes
configured_targets = set()
# Get actual target paths from app configuration
if MappingType.MEDIA in app_mapping:
target = app_mapping[MappingType.MEDIA].path or PATH_MEDIA.as_posix()
# Get actual target paths from add-on configuration
if MappingType.MEDIA in addon_mapping:
target = addon_mapping[MappingType.MEDIA].path or PATH_MEDIA.as_posix()
configured_targets.add(target)
if MappingType.SHARE in app_mapping:
target = app_mapping[MappingType.SHARE].path or PATH_SHARE.as_posix()
if MappingType.SHARE in addon_mapping:
target = addon_mapping[MappingType.SHARE].path or PATH_SHARE.as_posix()
configured_targets.add(target)
if not configured_targets:
@@ -82,11 +82,11 @@ class CheckDockerConfig(CheckBase):
if _check_container(self.sys_homeassistant.core.instance):
new_issues.add(Issue(IssueType.DOCKER_CONFIG, ContextType.CORE))
for app in self.sys_apps.installed:
if _check_container(app.instance, app):
for addon in self.sys_addons.installed:
if _check_container(addon.instance, addon):
new_issues.add(
Issue(
IssueType.DOCKER_CONFIG, ContextType.ADDON, reference=app.slug
IssueType.DOCKER_CONFIG, ContextType.ADDON, reference=addon.slug
)
)

View File

@@ -63,7 +63,7 @@ class EvaluateContainer(EvaluateBase):
self.sys_homeassistant.image,
self.sys_supervisor.image or self.sys_supervisor.default_image,
*(plugin.image for plugin in self.sys_plugins.all_plugins if plugin.image),
*(app.image for app in self.sys_apps.installed if app.image),
*(addon.image for addon in self.sys_addons.installed if addon.image),
ADDON_BUILDER_IMAGE,
}

View File

@@ -48,7 +48,7 @@ class EvaluateRestartPolicy(EvaluateBase):
for plug in self.sys_plugins.all_plugins
if plug != self.sys_plugins.observer
},
*{app.instance for app in self.sys_apps.installed},
*{addon.instance for addon in self.sys_addons.installed},
}
@property

View File

@@ -1,8 +1,8 @@
"""Helpers to fix app by disabling boot."""
"""Helpers to fix addon by disabling boot."""
import logging
from ...const import AppBoot
from ...const import AddonBoot
from ...coresys import CoreSys
from ..const import ContextType, IssueType, SuggestionType
from .base import FixupBase
@@ -12,10 +12,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def setup(coresys: CoreSys) -> FixupBase:
"""Check setup function."""
return FixupAppDisableBoot(coresys)
return FixupAddonDisableBoot(coresys)
class FixupAppDisableBoot(FixupBase):
class FixupAddonDisableBoot(FixupBase):
"""Storage class for fixup."""
async def process_fixup(self, reference: str | None = None) -> None:
@@ -23,12 +23,12 @@ class FixupAppDisableBoot(FixupBase):
if not reference:
return
if not (app := self.sys_apps.get_local_only(reference)):
_LOGGER.info("Cannot change app %s as it does not exist", reference)
if not (addon := self.sys_addons.get_local_only(reference)):
_LOGGER.info("Cannot change addon %s as it does not exist", reference)
return
# Disable boot on app
app.boot = AppBoot.MANUAL
# Disable boot on addon
addon.boot = AddonBoot.MANUAL
@property
def suggestion(self) -> SuggestionType:

View File

@@ -1,4 +1,4 @@
"""Helper to fix an issue with an app by rebuilding its container."""
"""Helper to fix an issue with an addon by rebuilding its container."""
import logging
@@ -12,39 +12,39 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def setup(coresys: CoreSys) -> FixupBase:
"""Check setup function."""
return FixupAppExecuteRebuild(coresys)
return FixupAddonExecuteRebuild(coresys)
class FixupAppExecuteRebuild(FixupBase):
class FixupAddonExecuteRebuild(FixupBase):
"""Storage class for fixup."""
async def process_fixup(self, reference: str | None = None) -> None:
"""Rebuild the app's container."""
"""Rebuild the addon's container."""
if not reference:
return
app = self.sys_apps.get_local_only(reference)
if not app:
addon = self.sys_addons.get_local_only(reference)
if not addon:
_LOGGER.info(
"Cannot rebuild app %s as it is not installed, dismissing suggestion",
"Cannot rebuild addon %s as it is not installed, dismissing suggestion",
reference,
)
return
state = await app.instance.current_state()
state = await addon.instance.current_state()
if state == ContainerState.UNKNOWN:
_LOGGER.info(
"Container for app %s does not exist, it will be rebuilt when started next",
"Container for addon %s does not exist, it will be rebuilt when started next",
reference,
)
elif state == ContainerState.STOPPED:
_LOGGER.info(
"App %s is stopped, removing its container so it rebuilds when started next",
"Addon %s is stopped, removing its container so it rebuilds when started next",
reference,
)
await app.stop()
await addon.stop()
else:
await (await app.restart())
await (await addon.restart())
@property
def suggestion(self) -> SuggestionType:

View File

@@ -1,9 +1,9 @@
"""Helpers to fix app issue by removing it."""
"""Helpers to fix addon issue by removing it."""
import logging
from ...coresys import CoreSys
from ...exceptions import AppsError, ResolutionFixupError
from ...exceptions import AddonsError, ResolutionFixupError
from ..const import ContextType, IssueType, SuggestionType
from .base import FixupBase
@@ -12,10 +12,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def setup(coresys: CoreSys) -> FixupBase:
"""Check setup function."""
return FixupAppExecuteRemove(coresys)
return FixupAddonExecuteRemove(coresys)
class FixupAppExecuteRemove(FixupBase):
class FixupAddonExecuteRemove(FixupBase):
"""Storage class for fixup."""
async def process_fixup(self, reference: str | None = None) -> None:
@@ -23,15 +23,15 @@ class FixupAppExecuteRemove(FixupBase):
if not reference:
return
if not (app := self.sys_apps.get_local_only(reference)):
_LOGGER.info("App %s already removed", reference)
if not (addon := self.sys_addons.get_local_only(reference)):
_LOGGER.info("Addon %s already removed", reference)
return
# Remove app
_LOGGER.info("Remove app: %s", reference)
# Remove addon
_LOGGER.info("Remove addon: %s", reference)
try:
await app.uninstall(remove_config=False)
except AppsError as err:
await addon.uninstall(remove_config=False)
except AddonsError as err:
_LOGGER.error("Could not remove %s due to %s", reference, err)
raise ResolutionFixupError() from None

View File

@@ -1,4 +1,4 @@
"""Helper to fix missing image for app."""
"""Helper to fix missing image for addon."""
import logging
@@ -12,39 +12,39 @@ MAX_AUTO_ATTEMPTS = 5
def setup(coresys: CoreSys) -> FixupBase:
"""Check setup function."""
return FixupAppExecuteRepair(coresys)
return FixupAddonExecuteRepair(coresys)
class FixupAppExecuteRepair(FixupBase):
class FixupAddonExecuteRepair(FixupBase):
"""Storage class for fixup."""
def __init__(self, coresys: CoreSys) -> None:
"""Initialize the app execute repair fixup class."""
"""Initialize the add-on execute repair fixup class."""
super().__init__(coresys)
self.attempts = 0
async def process_fixup(self, reference: str | None = None) -> None:
"""Pull the apps image."""
"""Pull the addons image."""
if not reference:
return
app = self.sys_apps.get_local_only(reference)
if not app:
addon = self.sys_addons.get_local_only(reference)
if not addon:
_LOGGER.info(
"Cannot repair app %s as it is not installed, dismissing suggestion",
"Cannot repair addon %s as it is not installed, dismissing suggestion",
reference,
)
return
if await app.instance.exists():
if await addon.instance.exists():
_LOGGER.info(
"App %s does not need repair, dismissing suggestion", reference
"Addon %s does not need repair, dismissing suggestion", reference
)
return
_LOGGER.info("Installing image for app %s", reference)
_LOGGER.info("Installing image for addon %s", reference)
self.attempts += 1
await app.instance.install(app.version)
await addon.instance.install(addon.version)
@property
def suggestion(self) -> SuggestionType:

View File

@@ -1,9 +1,9 @@
"""Helpers to fix app by restarting it."""
"""Helpers to fix addon by restarting it."""
import logging
from ...coresys import CoreSys
from ...exceptions import AppsError, ResolutionFixupError
from ...exceptions import AddonsError, ResolutionFixupError
from ..const import ContextType, IssueType, SuggestionType
from .base import FixupBase
@@ -12,10 +12,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def setup(coresys: CoreSys) -> FixupBase:
"""Check setup function."""
return FixupAppExecuteRestart(coresys)
return FixupAddonExecuteRestart(coresys)
class FixupAppExecuteRestart(FixupBase):
class FixupAddonExecuteRestart(FixupBase):
"""Storage class for fixup."""
async def process_fixup(self, reference: str | None = None) -> None:
@@ -23,23 +23,23 @@ class FixupAppExecuteRestart(FixupBase):
if not reference:
return
if not (app := self.sys_apps.get_local_only(reference)):
_LOGGER.info("Cannot restart app %s as it does not exist", reference)
if not (addon := self.sys_addons.get_local_only(reference)):
_LOGGER.info("Cannot restart addon %s as it does not exist", reference)
return
# Stop app
# Stop addon
try:
await app.stop()
except AppsError as err:
await addon.stop()
except AddonsError as err:
_LOGGER.error("Could not stop %s due to %s", reference, err)
raise ResolutionFixupError() from None
# Start app
# Start addon
# Removing the container has already fixed the issue and dismissed it
# So any errors on startup are just logged. We won't wait on the startup task either
try:
await app.start()
except AppsError as err:
await addon.start()
except AddonsError as err:
_LOGGER.error("Could not restart %s due to %s", reference, err)
@property

View File

@@ -1,10 +1,10 @@
"""Helpers to fix app by starting it."""
"""Helpers to fix addon by starting it."""
import logging
from ...const import AppState
from ...const import AddonState
from ...coresys import CoreSys
from ...exceptions import AppsError, ResolutionFixupError
from ...exceptions import AddonsError, ResolutionFixupError
from ..const import ContextType, IssueType, SuggestionType
from .base import FixupBase
@@ -13,10 +13,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def setup(coresys: CoreSys) -> FixupBase:
"""Check setup function."""
return FixupAppExecuteStart(coresys)
return FixupAddonExecuteStart(coresys)
class FixupAppExecuteStart(FixupBase):
class FixupAddonExecuteStart(FixupBase):
"""Storage class for fixup."""
async def process_fixup(self, reference: str | None = None) -> None:
@@ -24,21 +24,21 @@ class FixupAppExecuteStart(FixupBase):
if not reference:
return
if not (app := self.sys_apps.get_local_only(reference)):
_LOGGER.info("Cannot start app %s as it does not exist", reference)
if not (addon := self.sys_addons.get_local_only(reference)):
_LOGGER.info("Cannot start addon %s as it does not exist", reference)
return
# Start app
# Start addon
try:
start_task = await app.start()
except AppsError as err:
start_task = await addon.start()
except AddonsError as err:
_LOGGER.error("Could not start %s due to %s", reference, err)
raise ResolutionFixupError() from None
# Wait for app start. If it ends up in error or unknown state it's not fixed
# Wait for addon start. If it ends up in error or unknown state it's not fixed
await start_task
if app.state in {AppState.ERROR, AppState.UNKNOWN}:
_LOGGER.error("App %s could not start successfully", reference)
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
_LOGGER.error("Addon %s could not start successfully", reference)
raise ResolutionFixupError()
@property

View File

@@ -47,8 +47,8 @@ class ResolutionManager(FileConfiguration, CoreSysAttributes):
self._suggestions: list[Suggestion] = []
self._issues: list[Issue] = []
self._unsupported: set[UnsupportedReason] = set()
self._unhealthy: set[UnhealthyReason] = set()
self._unsupported: list[UnsupportedReason] = []
self._unhealthy: list[UnhealthyReason] = []
# Map suggestion UUID to event listeners (list)
self._suggestion_listeners: dict[str, list[EventListener]] = {}
@@ -127,34 +127,32 @@ class ResolutionManager(FileConfiguration, CoreSysAttributes):
)
@property
def unsupported(self) -> set[UnsupportedReason]:
"""Return a set of unsupported reasons."""
def unsupported(self) -> list[UnsupportedReason]:
"""Return a list of unsupported reasons."""
return self._unsupported
def add_unsupported_reason(self, reason: UnsupportedReason) -> None:
"""Add a reason for unsupported."""
if reason in self._unsupported:
return
self._unsupported.add(reason)
self.sys_homeassistant.websocket.supervisor_event(
WSEvent.SUPPORTED_CHANGED,
attr.asdict(SupportedChanged(False, sorted(self.unsupported))),
)
if reason not in self._unsupported:
self._unsupported.append(reason)
self.sys_homeassistant.websocket.supervisor_event(
WSEvent.SUPPORTED_CHANGED,
attr.asdict(SupportedChanged(False, self.unsupported)),
)
@property
def unhealthy(self) -> set[UnhealthyReason]:
"""Return a set of unhealthy reasons."""
def unhealthy(self) -> list[UnhealthyReason]:
"""Return a list of unhealthy reasons."""
return self._unhealthy
def add_unhealthy_reason(self, reason: UnhealthyReason) -> None:
"""Add a reason for unhealthy."""
if reason in self._unhealthy:
return
self._unhealthy.add(reason)
self.sys_homeassistant.websocket.supervisor_event(
WSEvent.HEALTH_CHANGED,
attr.asdict(HealthChanged(False, sorted(self.unhealthy))),
)
if reason not in self._unhealthy:
self._unhealthy.append(reason)
self.sys_homeassistant.websocket.supervisor_event(
WSEvent.HEALTH_CHANGED,
attr.asdict(HealthChanged(False, self.unhealthy)),
)
_OSERROR_UNHEALTHY_REASONS: dict[int, UnhealthyReason] = {
errno.EBADMSG: UnhealthyReason.OSERROR_BAD_MESSAGE,
@@ -303,9 +301,7 @@ class ResolutionManager(FileConfiguration, CoreSysAttributes):
self.sys_homeassistant.websocket.supervisor_event(
WSEvent.SUPPORTED_CHANGED,
attr.asdict(
SupportedChanged(
self.sys_core.supported, sorted(self.unsupported) or None
)
SupportedChanged(self.sys_core.supported, self.unsupported or None)
),
)

View File

@@ -1,6 +1,6 @@
"""Service API static data."""
ATTR_APP = "addon"
ATTR_ADDON = "addon"
ATTR_HOST = "host"
ATTR_PASSWORD = "password"
ATTR_PORT = "port"

View File

@@ -5,7 +5,7 @@ from typing import Any
import voluptuous as vol
from ..addons.addon import App
from ..addons.addon import Addon
from ..const import PROVIDE_SERVICE
from ..coresys import CoreSys, CoreSysAttributes
@@ -34,17 +34,17 @@ class ServiceInterface(CoreSysAttributes, ABC):
@property
def providers(self) -> list[str]:
"""Return name of service providers app."""
apps = []
for app in self.sys_apps.installed:
if app.services_role.get(self.slug) == PROVIDE_SERVICE:
apps.append(app.slug)
return apps
"""Return name of service providers addon."""
addons = []
for addon in self.sys_addons.installed:
if addon.services_role.get(self.slug) == PROVIDE_SERVICE:
addons.append(addon.slug)
return addons
@property
@abstractmethod
def active(self) -> list[str]:
"""Return list of app slug they have enable that."""
"""Return list of addon slug they have enable that."""
@property
def enabled(self) -> bool:
@@ -62,9 +62,9 @@ class ServiceInterface(CoreSysAttributes, ABC):
return None
@abstractmethod
async def set_service_data(self, app: App, data: dict[str, Any]) -> None:
async def set_service_data(self, addon: Addon, data: dict[str, Any]) -> None:
"""Write the data into service object."""
@abstractmethod
async def del_service_data(self, app: App) -> None:
async def del_service_data(self, addon: Addon) -> None:
"""Remove the data from service object."""

View File

@@ -5,11 +5,11 @@ from typing import Any
import voluptuous as vol
from ...addons.addon import App
from ...addons.addon import Addon
from ...exceptions import ServicesError
from ...validate import network_port
from ..const import (
ATTR_APP,
ATTR_ADDON,
ATTR_HOST,
ATTR_PASSWORD,
ATTR_PORT,
@@ -37,7 +37,7 @@ SCHEMA_SERVICE_MQTT = vol.Schema(
}
)
SCHEMA_CONFIG_MQTT = SCHEMA_SERVICE_MQTT.extend({vol.Required(ATTR_APP): str})
SCHEMA_CONFIG_MQTT = SCHEMA_SERVICE_MQTT.extend({vol.Required(ATTR_ADDON): str})
class MQTTService(ServiceInterface):
@@ -60,26 +60,26 @@ class MQTTService(ServiceInterface):
@property
def active(self) -> list[str]:
"""Return list of app slug they have enable that."""
"""Return list of addon slug they have enable that."""
if not self.enabled:
return []
return [self._data[ATTR_APP]]
return [self._data[ATTR_ADDON]]
async def set_service_data(self, app: App, data: dict[str, Any]) -> None:
async def set_service_data(self, addon: Addon, data: dict[str, Any]) -> None:
"""Write the data into service object."""
if self.enabled:
raise ServicesError(
f"There is already a MQTT service in use from {self._data[ATTR_APP]}",
f"There is already a MQTT service in use from {self._data[ATTR_ADDON]}",
_LOGGER.error,
)
self._data.update(data)
self._data[ATTR_APP] = app.slug
self._data[ATTR_ADDON] = addon.slug
_LOGGER.info("Set %s as service provider for mqtt", app.slug)
_LOGGER.info("Set %s as service provider for mqtt", addon.slug)
await self.save()
async def del_service_data(self, app: App) -> None:
async def del_service_data(self, addon: Addon) -> None:
"""Remove the data from service object."""
if not self.enabled:
raise ServicesError(

View File

@@ -5,11 +5,11 @@ from typing import Any
import voluptuous as vol
from ...addons.addon import App
from ...addons.addon import Addon
from ...exceptions import ServicesError
from ...validate import network_port
from ..const import (
ATTR_APP,
ATTR_ADDON,
ATTR_HOST,
ATTR_PASSWORD,
ATTR_PORT,
@@ -31,7 +31,7 @@ SCHEMA_SERVICE_MYSQL = vol.Schema(
}
)
SCHEMA_CONFIG_MYSQL = SCHEMA_SERVICE_MYSQL.extend({vol.Required(ATTR_APP): str})
SCHEMA_CONFIG_MYSQL = SCHEMA_SERVICE_MYSQL.extend({vol.Required(ATTR_ADDON): str})
class MySQLService(ServiceInterface):
@@ -54,26 +54,26 @@ class MySQLService(ServiceInterface):
@property
def active(self) -> list[str]:
"""Return list of app slug they have enable that."""
"""Return list of addon slug they have enable that."""
if not self.enabled:
return []
return [self._data[ATTR_APP]]
return [self._data[ATTR_ADDON]]
async def set_service_data(self, app: App, data: dict[str, Any]) -> None:
async def set_service_data(self, addon: Addon, data: dict[str, Any]) -> None:
"""Write the data into service object."""
if self.enabled:
raise ServicesError(
f"There is already a MySQL service in use from {self._data[ATTR_APP]}",
f"There is already a MySQL service in use from {self._data[ATTR_ADDON]}",
_LOGGER.error,
)
self._data.update(data)
self._data[ATTR_APP] = app.slug
self._data[ATTR_ADDON] = addon.slug
_LOGGER.info("Set %s as service provider for MySQL", app.slug)
_LOGGER.info("Set %s as service provider for MySQL", addon.slug)
await self.save()
async def del_service_data(self, app: App) -> None:
async def del_service_data(self, addon: Addon) -> None:
"""Remove the data from service object."""
if not self.enabled:
raise ServicesError("Can't remove not exists services", _LOGGER.warning)

View File

@@ -1,4 +1,4 @@
"""App Store handler."""
"""Add-on Store handler."""
import asyncio
from collections.abc import Awaitable
@@ -10,14 +10,14 @@ from ..exceptions import (
StoreError,
StoreGitCloneError,
StoreGitError,
StoreInvalidAppRepo,
StoreInvalidAddonRepo,
StoreJobError,
StoreNotFound,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..utils.common import FileConfiguration
from .addon import AppStore
from .addon import AddonStore
from .const import FILE_HASSIO_STORE, BuiltinRepository
from .data import StoreData
from .repository import Repository
@@ -27,7 +27,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
class StoreManager(CoreSysAttributes, FileConfiguration):
"""Manage apps inside Supervisor."""
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
@@ -38,7 +38,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
@property
def all(self) -> list[Repository]:
"""Return list of app repositories."""
"""Return list of add-on repositories."""
return list(self.repositories.values())
@property
@@ -63,7 +63,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
return self.repositories[slug]
async def load(self) -> None:
"""Start up app store management."""
"""Start up add-on store management."""
# Make sure the built-in repositories are all present
# This is especially important when adding new built-in repositories
# to make sure existing installations have them.
@@ -82,7 +82,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
on_condition=StoreJobError,
)
async def reload(self, repository: Repository | None = None) -> None:
"""Update apps from repository and reload list."""
"""Update add-ons from repository and reload list."""
# Make a copy to prevent race with other tasks
repositories = [repository] if repository else self.all.copy()
results: list[bool | BaseException] = await asyncio.gather(
@@ -101,19 +101,19 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
result,
)
# Update path cache for all apps in updated repos
# Update path cache for all addons in updated repos
if updated_repos:
await asyncio.gather(
*[
app.refresh_path_cache()
for app in self.sys_apps.store.values()
if app.repository in updated_repos
addon.refresh_path_cache()
for addon in self.sys_addons.store.values()
if addon.repository in updated_repos
]
)
# read data from repositories
await self.data.update()
await self._read_apps()
await self._read_addons()
@Job(
name="store_manager_add_repository",
@@ -186,7 +186,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
else:
if not await repository.validate():
if issue_on_error:
_LOGGER.error("%s is not a valid app repository", url)
_LOGGER.error("%s is not a valid add-on repository", url)
self.sys_resolution.create_issue(
IssueType.CORRUPT_REPOSITORY,
ContextType.STORE,
@@ -195,8 +195,8 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
)
else:
await repository.remove()
raise StoreInvalidAppRepo(
f"{url} is not a valid app repository", logger=_LOGGER.error
raise StoreInvalidAddonRepo(
f"{url} is not a valid add-on repository", logger=_LOGGER.error
)
# Add Repository to list
@@ -210,18 +210,18 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
# Persist changes
if persist:
await self.data.update()
await self._read_apps()
await self._read_addons()
async def remove_repository(self, repository: Repository, *, persist: bool = True):
"""Remove a repository."""
if repository.is_builtin:
raise StoreInvalidAppRepo(
raise StoreInvalidAddonRepo(
"Can't remove built-in repositories!", logger=_LOGGER.error
)
if repository.slug in (app.repository for app in self.sys_apps.installed):
if repository.slug in (addon.repository for addon in self.sys_addons.installed):
raise StoreError(
f"Can't remove '{repository.source}'. It's used by installed apps",
f"Can't remove '{repository.source}'. It's used by installed add-ons",
logger=_LOGGER.error,
)
await self.repositories.pop(repository.slug).remove()
@@ -230,7 +230,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
if persist:
await self.data.update()
await self._read_apps()
await self._read_addons()
@Job(name="store_manager_update_repositories")
async def update_repositories(
@@ -280,37 +280,37 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
# Always update data, even if there are errors, some changes may have succeeded
await self.data.update()
await self._read_apps()
await self._read_addons()
# Raise the first error we found (if any)
for error in add_errors + remove_errors:
if error:
raise error
async def _read_apps(self) -> None:
"""Reload apps inside store."""
all_apps = set(self.data.apps)
async def _read_addons(self) -> None:
"""Reload add-ons inside store."""
all_addons = set(self.data.addons)
# calc diff
add_apps = all_apps - set(self.sys_apps.store)
del_apps = set(self.sys_apps.store) - all_apps
add_addons = all_addons - set(self.sys_addons.store)
del_addons = set(self.sys_addons.store) - all_addons
_LOGGER.info(
"Loading apps from store: %d all - %d new - %d remove",
len(all_apps),
len(add_apps),
len(del_apps),
"Loading add-ons from store: %d all - %d new - %d remove",
len(all_addons),
len(add_addons),
len(del_addons),
)
# new apps
if add_apps:
# new addons
if add_addons:
cache_updates: list[Awaitable[None]] = []
for slug in add_apps:
self.sys_apps.store[slug] = AppStore(self.coresys, slug)
cache_updates.append(self.sys_apps.store[slug].refresh_path_cache())
for slug in add_addons:
self.sys_addons.store[slug] = AddonStore(self.coresys, slug)
cache_updates.append(self.sys_addons.store[slug].refresh_path_cache())
await asyncio.gather(*cache_updates)
# remove
for slug in del_apps:
self.sys_apps.store.pop(slug)
for slug in del_addons:
self.sys_addons.store.pop(slug)

View File

@@ -1,17 +1,17 @@
"""Init file for Supervisor apps."""
"""Init file for Supervisor add-ons."""
from copy import deepcopy
import logging
from typing import Self
from ..addons.model import AppModel, Data
from ..addons.model import AddonModel, Data
from ..coresys import CoreSys
_LOGGER: logging.Logger = logging.getLogger(__name__)
class AppStore(AppModel):
"""Hold data for app inside Supervisor."""
class AddonStore(AddonModel):
"""Hold data for add-on inside Supervisor."""
def __init__(self, coresys: CoreSys, slug: str, data: Data | None = None):
"""Initialize object."""
@@ -24,17 +24,17 @@ class AppStore(AppModel):
@property
def data(self) -> Data:
"""Return app data/config."""
return self._data or self.sys_store.data.apps[self.slug]
"""Return add-on data/config."""
return self._data or self.sys_store.data.addons[self.slug]
@property
def is_installed(self) -> bool:
"""Return True if an app is installed."""
return self.sys_apps.get_local_only(self.slug) is not None
"""Return True if an add-on is installed."""
return self.sys_addons.get_local_only(self.slug) is not None
@property
def is_detached(self) -> bool:
"""Return True if app is detached."""
"""Return True if add-on is detached."""
return False
def clone(self) -> Self:

View File

@@ -1,4 +1,4 @@
"""Constants for the app store."""
"""Constants for the add-on store."""
from enum import StrEnum
from pathlib import Path

View File

@@ -1,4 +1,4 @@
"""Init file for Supervisor app data."""
"""Init file for Supervisor add-on data."""
from dataclasses import dataclass
import errno
@@ -40,12 +40,12 @@ class ProcessedRepository:
config: dict[str, Any]
def _read_app_translations(app_path: Path) -> dict:
"""Read translations from apps folder.
def _read_addon_translations(addon_path: Path) -> dict:
"""Read translations from add-ons folder.
Should be run in the executor.
"""
translations_dir = app_path / "translations"
translations_dir = addon_path / "translations"
translations: dict[str, Any] = {}
if not translations_dir.exists():
@@ -101,25 +101,25 @@ def _read_git_repository(path: Path) -> ProcessedRepository | None:
class StoreData(CoreSysAttributes):
"""Hold data for Apps inside Supervisor."""
"""Hold data for Add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize data holder."""
self.coresys: CoreSys = coresys
self.repositories: dict[str, Any] = {}
self.apps: dict[str, dict[str, Any]] = {}
self.addons: dict[str, dict[str, Any]] = {}
async def update(self) -> None:
"""Read data from app repository."""
"""Read data from add-on repository."""
# read core repository
apps = await self._read_apps_folder(
self.sys_config.path_apps_core, REPOSITORY_CORE
addons = await self._read_addons_folder(
self.sys_config.path_addons_core, REPOSITORY_CORE
)
# read local repository
apps.update(
await self._read_apps_folder(
self.sys_config.path_apps_local, REPOSITORY_LOCAL
addons.update(
await self._read_addons_folder(
self.sys_config.path_addons_local, REPOSITORY_LOCAL
)
)
@@ -130,36 +130,38 @@ class StoreData(CoreSysAttributes):
def _read_git_repositories() -> list[ProcessedRepository]:
return [
repo
for repository_element in self.sys_config.path_apps_git.iterdir()
for repository_element in self.sys_config.path_addons_git.iterdir()
if repository_element.is_dir()
and (repo := _read_git_repository(repository_element))
]
for repo in await self.sys_run_in_executor(_read_git_repositories):
repositories[repo.slug] = repo.config
apps.update(await self._read_apps_folder(repo.path, repo.slug))
addons.update(await self._read_addons_folder(repo.path, repo.slug))
self.repositories = repositories
self.apps = apps
self.addons = addons
async def _find_app_configs(self, path: Path, repository: str) -> list[Path] | None:
"""Find apps in the path."""
async def _find_addon_configs(
self, path: Path, repository: str
) -> list[Path] | None:
"""Find add-ons in the path."""
def _get_apps_list() -> list[Path]:
def _get_addons_list() -> list[Path]:
# Generate a list without artefact, safe for corruptions
return [
app
for app in path.glob("**/config.*")
addon
for addon in path.glob("**/config.*")
if not [
part
for part in app.parts
for part in addon.parts
if part.startswith(".") or part == "rootfs"
]
and app.suffix in FILE_SUFFIX_CONFIGURATION
and addon.suffix in FILE_SUFFIX_CONFIGURATION
]
try:
app_list = await self.sys_run_in_executor(_get_apps_list)
addon_list = await self.sys_run_in_executor(_get_addons_list)
except OSError as err:
suggestion = None
self.sys_resolution.check_oserror(err)
@@ -175,48 +177,48 @@ class StoreData(CoreSysAttributes):
"Can't process %s because of Filesystem issues: %s", repository, err
)
return None
return app_list
return addon_list
async def _read_apps_folder(
async def _read_addons_folder(
self, path: Path, repository: str
) -> dict[str, dict[str, Any]]:
"""Read data from apps folder."""
if not (app_config_list := await self._find_app_configs(path, repository)):
"""Read data from add-ons folder."""
if not (addon_config_list := await self._find_addon_configs(path, repository)):
return {}
def _process_apps_config() -> dict[str, dict[str, Any]]:
apps: dict[str, dict[str, Any]] = {}
for app_config in app_config_list:
def _process_addons_config() -> dict[str, dict[str, Any]]:
addons: dict[str, dict[str, Any]] = {}
for addon_config in addon_config_list:
try:
app = read_json_or_yaml_file(app_config)
addon = read_json_or_yaml_file(addon_config)
except ConfigurationFileError:
_LOGGER.warning(
"Can't read %s from repository %s", app_config, repository
"Can't read %s from repository %s", addon_config, repository
)
continue
# validate
try:
app = SCHEMA_ADDON_CONFIG(app)
addon = SCHEMA_ADDON_CONFIG(addon)
except vol.Invalid as ex:
_LOGGER.warning(
"Can't read %s: %s", app_config, humanize_error(app, ex)
"Can't read %s: %s", addon_config, humanize_error(addon, ex)
)
continue
# Generate slug
app_slug = f"{repository}_{app[ATTR_SLUG]}"
addon_slug = f"{repository}_{addon[ATTR_SLUG]}"
# store
app[ATTR_REPOSITORY] = repository
app[ATTR_LOCATION] = str(app_config.parent)
app[ATTR_TRANSLATIONS] = _read_app_translations(app_config.parent)
app[ATTR_VERSION_TIMESTAMP] = app_config.stat().st_mtime
apps[app_slug] = app
addon[ATTR_REPOSITORY] = repository
addon[ATTR_LOCATION] = str(addon_config.parent)
addon[ATTR_TRANSLATIONS] = _read_addon_translations(addon_config.parent)
addon[ATTR_VERSION_TIMESTAMP] = addon_config.stat().st_mtime
addons[addon_slug] = addon
return apps
return addons
return await self.sys_run_in_executor(_process_apps_config)
return await self.sys_run_in_executor(_process_addons_config)
def _get_builtin_repositories(self) -> dict[str, dict[str, str]]:
"""Get local built-in repositories into dataset.

View File

@@ -1,4 +1,4 @@
"""Init file for Supervisor app Git."""
"""Init file for Supervisor add-on Git."""
import asyncio
import functools as ft
@@ -20,7 +20,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
class GitRepo(CoreSysAttributes):
"""Manage App Git repository."""
"""Manage Add-on Git repository."""
def __init__(self, coresys: CoreSys, path: Path, url: str):
"""Initialize Git base wrapper."""
@@ -48,7 +48,7 @@ class GitRepo(CoreSysAttributes):
return self.data[ATTR_BRANCH]
async def load(self) -> None:
"""Init Git app repository."""
"""Init Git add-on repository."""
if await self.sys_run_in_executor(directory_missing_or_empty, self.path):
await self.clone()
return
@@ -56,7 +56,7 @@ class GitRepo(CoreSysAttributes):
# Load repository
async with self.lock:
try:
_LOGGER.info("Loading app %s repository", self.path)
_LOGGER.info("Loading add-on %s repository", self.path)
self.repo = await self.sys_run_in_executor(git.Repo, str(self.path))
except (
@@ -71,7 +71,7 @@ class GitRepo(CoreSysAttributes):
# Fix possible corruption
async with self.lock:
try:
_LOGGER.debug("Integrity check app %s repository", self.path)
_LOGGER.debug("Integrity check add-on %s repository", self.path)
await self.sys_run_in_executor(self.repo.git.execute, ["git", "fsck"])
except git.CommandError as err:
_LOGGER.error("Integrity check on %s failed: %s.", self.path, err)
@@ -83,7 +83,7 @@ class GitRepo(CoreSysAttributes):
on_condition=StoreJobError,
)
async def clone(self) -> None:
"""Clone git app repository."""
"""Clone git add-on repository."""
async with self.lock:
await self._clone()
@@ -121,7 +121,7 @@ class GitRepo(CoreSysAttributes):
await self.sys_run_in_executor(temp_dir.cleanup)
async def _clone(self, path: Path | None = None) -> None:
"""Clone git app repository to location."""
"""Clone git add-on repository to location."""
path = path or self.path
git_args = {
attribute: value
@@ -135,7 +135,7 @@ class GitRepo(CoreSysAttributes):
}
try:
_LOGGER.info("Cloning app %s repository from %s", path, self.url)
_LOGGER.info("Cloning add-on %s repository from %s", path, self.url)
self.repo = await self.sys_run_in_executor(
ft.partial(
git.Repo.clone_from,
@@ -160,7 +160,7 @@ class GitRepo(CoreSysAttributes):
on_condition=StoreJobError,
)
async def pull(self) -> bool:
"""Pull Git app repo."""
"""Pull Git add-on repo."""
if self.lock.locked():
_LOGGER.warning("There is already a task in progress")
return False
@@ -169,7 +169,7 @@ class GitRepo(CoreSysAttributes):
return False
async with self.lock:
_LOGGER.info("Update app %s repository from %s", self.path, self.url)
_LOGGER.info("Update add-on %s repository from %s", self.path, self.url)
try:
git_cmd = git.Git()
@@ -239,12 +239,12 @@ class GitRepo(CoreSysAttributes):
"""Remove a repository."""
if self.lock.locked():
_LOGGER.warning(
"Cannot remove app repository %s, there is already a task in progress",
"Cannot remove add-on repository %s, there is already a task in progress",
self.url,
)
return
_LOGGER.info("Removing custom app repository %s", self.url)
_LOGGER.info("Removing custom add-on repository %s", self.url)
def _remove_git_dir(path: Path) -> None:
if not path.is_dir():

View File

@@ -36,10 +36,10 @@ UNKNOWN = "unknown"
class Repository(CoreSysAttributes, ABC):
"""App store repository in Supervisor."""
"""Add-on store repository in Supervisor."""
def __init__(self, coresys: CoreSys, repository: str, local_path: Path, slug: str):
"""Initialize app store repository object."""
"""Initialize add-on store repository object."""
self._slug: str = slug
self._local_path: Path = local_path
self.coresys: CoreSys = coresys
@@ -58,15 +58,15 @@ class Repository(CoreSysAttributes, ABC):
"""Create builtin repository."""
if builtin == BuiltinRepository.LOCAL:
slug = REPOSITORY_LOCAL
local_path = coresys.config.path_apps_local
local_path = coresys.config.path_addons_local
return RepositoryLocal(coresys, local_path, slug)
elif builtin == BuiltinRepository.CORE:
slug = REPOSITORY_CORE
local_path = coresys.config.path_apps_core
local_path = coresys.config.path_addons_core
else:
# For other builtin repositories (URL-based)
slug = get_hash_from_repository(builtin.value)
local_path = coresys.config.path_apps_git / slug
local_path = coresys.config.path_addons_git / slug
return RepositoryGitBuiltin(
coresys, builtin.value, local_path, slug, builtin.git_url
)
@@ -75,7 +75,7 @@ class Repository(CoreSysAttributes, ABC):
def _create_custom(coresys: CoreSys, repository: str) -> RepositoryCustom:
"""Create custom repository."""
slug = get_hash_from_repository(repository)
local_path = coresys.config.path_apps_git / slug
local_path = coresys.config.path_addons_git / slug
return RepositoryCustom(coresys, repository, local_path, slug)
def __repr__(self) -> str:
@@ -123,26 +123,26 @@ class Repository(CoreSysAttributes, ABC):
@abstractmethod
async def load(self) -> None:
"""Load app repository."""
"""Load addon repository."""
@abstractmethod
async def update(self) -> bool:
"""Update app repository.
"""Update add-on repository.
Returns True if the repository was updated.
"""
@abstractmethod
async def remove(self) -> None:
"""Remove app repository."""
"""Remove add-on repository."""
@abstractmethod
async def reset(self) -> None:
"""Reset app repository to fix corruption issue with files."""
"""Reset add-on repository to fix corruption issue with files."""
class RepositoryBuiltin(Repository, ABC):
"""A built-in app repository."""
"""A built-in add-on repository."""
@property
def is_builtin(self) -> bool:
@@ -159,16 +159,16 @@ class RepositoryBuiltin(Repository, ABC):
class RepositoryGit(Repository, ABC):
"""A git based app repository."""
"""A git based add-on repository."""
_git: GitRepo
async def load(self) -> None:
"""Load app repository."""
"""Load addon repository."""
await self._git.load()
async def update(self) -> bool:
"""Update app repository.
"""Update add-on repository.
Returns True if the repository was updated.
"""
@@ -202,7 +202,7 @@ class RepositoryGit(Repository, ABC):
return await self.sys_run_in_executor(validate_file)
async def reset(self) -> None:
"""Reset app repository to fix corruption issue with files."""
"""Reset add-on repository to fix corruption issue with files."""
try:
await self._git.reset()
await self.load()
@@ -212,7 +212,7 @@ class RepositoryGit(Repository, ABC):
class RepositoryLocal(RepositoryBuiltin):
"""A local app repository."""
"""A local add-on repository."""
def __init__(self, coresys: CoreSys, local_path: Path, slug: str) -> None:
"""Initialize object."""
@@ -229,11 +229,11 @@ class RepositoryLocal(RepositoryBuiltin):
raise StoreRepositoryUnknownError(repo=self.slug) from err
async def load(self) -> None:
"""Load app repository."""
"""Load addon repository."""
self._latest_mtime, _ = await self._get_latest_mtime()
async def update(self) -> bool:
"""Update app repository.
"""Update add-on repository.
Returns True if the repository was updated.
"""
@@ -257,7 +257,7 @@ class RepositoryLocal(RepositoryBuiltin):
class RepositoryGitBuiltin(RepositoryBuiltin, RepositoryGit):
"""A built-in app repository based on git."""
"""A built-in add-on repository based on git."""
def __init__(
self, coresys: CoreSys, repository: str, local_path: Path, slug: str, url: str
@@ -268,7 +268,7 @@ class RepositoryGitBuiltin(RepositoryBuiltin, RepositoryGit):
class RepositoryCustom(RepositoryGit):
"""A custom app repository."""
"""A custom add-on repository."""
def __init__(self, coresys: CoreSys, url: str, local_path: Path, slug: str) -> None:
"""Initialize object."""
@@ -281,5 +281,5 @@ class RepositoryCustom(RepositoryGit):
return False
async def remove(self) -> None:
"""Remove app repository."""
"""Remove add-on repository."""
await self._git.remove()

View File

@@ -1,4 +1,4 @@
"""Util apps functions."""
"""Util add-ons functions."""
import hashlib
import logging

Some files were not shown because too many files have changed in this diff Show More