Compare commits

..

1 Commits

Author SHA1 Message Date
Mike Degatano
b7c53d9e40 No update available if update cannot be installed on system 2024-06-12 15:58:30 -04:00
360 changed files with 1358 additions and 3581 deletions

View File

@@ -4,12 +4,8 @@
"containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
"remoteEnv": {
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
},
"appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_setup",
"postStartCommand": "bash devcontainer_bootstrap",
"postCreateCommand": "bash devcontainer_bootstrap",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": {
"vscode": {
@@ -23,21 +19,17 @@
"GitHub.vscode-pull-request-github"
],
"settings": {
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}

View File

@@ -38,7 +38,6 @@
- This PR is related to issue:
- Link to documentation pull request:
- Link to cli pull request:
- Link to client library pull request:
## Checklist
@@ -56,11 +55,9 @@
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
- [ ] Tests have been added to verify that the new code works.
If API endpoints or add-on configuration are added/changed:
If API endpoints of add-on configuration are added/changed:
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
- [ ] [CLI][cli-repository] updated (if necessary)
- [ ] [Client library][client-library-repository] updated (if necessary)
<!--
Thank you for contributing <3
@@ -70,5 +67,3 @@ If API endpoints or add-on configuration are added/changed:
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
[cli-repository]: https://github.com/home-assistant/cli
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/

View File

@@ -53,7 +53,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
with:
fetch-depth: 0
@@ -92,7 +92,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
with:
fetch-depth: 0
@@ -106,7 +106,7 @@ jobs:
- name: Build wheels
if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.01.0
with:
abi: cp312
tag: musllinux_1_2
@@ -125,15 +125,15 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign
if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.7.0
uses: sigstore/cosign-installer@v3.5.0
with:
cosign-release: "v2.4.0"
cosign-release: "v2.2.3"
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
@@ -149,7 +149,7 @@ jobs:
- name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.2.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -160,7 +160,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2024.03.5
with:
args: |
$BUILD_ARGS \
@@ -178,7 +178,7 @@ jobs:
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Initialize git
if: needs.init.outputs.publish == 'true'
@@ -203,11 +203,11 @@ jobs:
timeout-minutes: 60
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2024.03.5
with:
args: |
--test \

View File

@@ -25,15 +25,15 @@ jobs:
name: Prepare Python dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Set up Python
id: python
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -47,7 +47,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -67,15 +67,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -87,7 +87,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -110,15 +110,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -130,7 +130,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -153,7 +153,7 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -168,15 +168,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -188,7 +188,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -212,15 +212,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -232,7 +232,7 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
@@ -256,15 +256,15 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -288,19 +288,19 @@ jobs:
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign
uses: sigstore/cosign-installer@v3.7.0
uses: sigstore/cosign-installer@v3.5.0
with:
cosign-release: "v2.4.0"
cosign-release: "v2.2.3"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -313,7 +313,7 @@ jobs:
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
@@ -335,11 +335,10 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v4.4.3
uses: actions/upload-artifact@v4.3.3
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
include-hidden-files: true
coverage:
name: Process test coverage
@@ -347,15 +346,15 @@ jobs:
needs: ["pytest", "prepare"]
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v4.0.2
with:
path: venv
key: |
@@ -366,7 +365,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.8
uses: actions/download-artifact@v4.1.7
- name: Combine coverage results
run: |
. venv/bin/activate
@@ -374,4 +373,4 @@ jobs:
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4.6.0
uses: codecov/codecov-action@v4.4.1

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
with:
fetch-depth: 0

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v4.1.6
- name: Sentry Release
uses: getsentry/action-release@v1.7.0
env:

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.7
rev: v0.2.1
hooks:
- id: ruff
args:

View File

@@ -4,8 +4,7 @@ FROM ${BUILD_FROM}
ENV \
S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
UV_SYSTEM_PYTHON=true
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
ARG \
COSIGN_VERSION \
@@ -27,17 +26,14 @@ RUN \
yaml \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \
&& pip3 install uv==0.2.21
&& chmod a+x /usr/bin/cosign
# Install requirements
COPY requirements.txt .
RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \
linux32 uv pip install --no-build -r requirements.txt; \
else \
uv pip install --no-build -r requirements.txt; \
fi \
export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --only-binary=:all: \
-r ./requirements.txt \
&& rm -f requirements.txt
# Install Home Assistant Supervisor

View File

@@ -30,5 +30,3 @@ Releases are done in 3 stages (channels) with this structure:
[development]: https://developers.home-assistant.io/docs/supervisor/development
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
[![Home Assistant - A project from the Open Home Foundation](https://www.openhomefoundation.org/badges/home-assistant.png)](https://www.openhomefoundation.org/)

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.19
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.19
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.19
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.19
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.19
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
@@ -12,7 +12,7 @@ cosign:
base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.*
args:
COSIGN_VERSION: 2.4.0
COSIGN_VERSION: 2.2.3
labels:
io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor

View File

@@ -31,7 +31,7 @@ include-package-data = true
include = ["supervisor*"]
[tool.pylint.MAIN]
py-version = "3.12"
py-version = "3.11"
# Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate.
jobs = 2
@@ -215,9 +215,6 @@ expected-line-ending-format = "LF"
[tool.pylint.EXCEPTIONS]
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pylint.DESIGN]
max-positional-arguments = 10
[tool.pytest.ini_options]
testpaths = ["tests"]
norecursedirs = [".git"]
@@ -231,13 +228,12 @@ filterwarnings = [
]
[tool.ruff]
lint.select = [
select = [
"B002", # Python does not support the unary prefix increment
"B007", # Loop control variable {name} not used within loop body
"B014", # Exception handler with duplicate exception
"B023", # Function definition does not bind loop variable {name}
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
"B904", # Use raise from to specify exception cause
"C", # complexity
"COM818", # Trailing comma on bare tuple prohibited
"D", # docstrings
@@ -251,6 +247,7 @@ lint.select = [
"N804", # First argument of a class method should be named cls
"N805", # First argument of a method should be named self
"N815", # Variable {name} in class scope should not be mixedCase
"PGH001", # No builtin eval() allowed
"PGH004", # Use specific rule codes when using noqa
"PLC0414", # Useless import alias. Import alias does not rename original package.
"PLC", # pylint
@@ -289,12 +286,13 @@ lint.select = [
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY200", # Use raise from to specify exception cause
"TRY302", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade
"W", # pycodestyle
]
lint.ignore = [
ignore = [
"D202", # No blank lines allowed after function docstring
"D203", # 1 blank line required before class docstring
"D213", # Multi-line docstring summary should start at the second line
@@ -341,16 +339,16 @@ lint.ignore = [
"PLE0605",
]
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
[tool.ruff.flake8-import-conventions.extend-aliases]
voluptuous = "vol"
[tool.ruff.lint.flake8-pytest-style]
[tool.ruff.flake8-pytest-style]
fixture-parentheses = false
[tool.ruff.lint.flake8-tidy-imports.banned-api]
[tool.ruff.flake8-tidy-imports.banned-api]
"pytz".msg = "use zoneinfo instead"
[tool.ruff.lint.isort]
[tool.ruff.isort]
force-sort-within-sections = true
section-order = [
"future",
@@ -364,10 +362,10 @@ known-first-party = ["supervisor", "tests"]
combine-as-imports = true
split-on-trailing-comma = false
[tool.ruff.lint.per-file-ignores]
[tool.ruff.per-file-ignores]
# DBus Service Mocks must use typing and names understood by dbus-fast
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
[tool.ruff.lint.mccabe]
[tool.ruff.mccabe]
max-complexity = 25

View File

@@ -1,29 +1,30 @@
aiodns==3.2.0
aiohttp==3.10.10
aiohttp==3.9.5
aiohttp-fast-url-dispatcher==0.3.0
atomicwrites-homeassistant==1.4.1
attrs==24.2.0
awesomeversion==24.6.0
attrs==23.2.0
awesomeversion==24.2.0
brotli==1.1.0
ciso8601==2.3.1
colorlog==6.8.2
cpe==1.3.1
cryptography==43.0.1
debugpy==1.8.7
deepmerge==2.0
dirhash==0.5.0
cpe==1.2.1
cryptography==42.0.8
debugpy==1.8.1
deepmerge==1.1.1
dirhash==0.4.0
docker==7.1.0
faust-cchardet==2.1.19
gitpython==3.1.43
jinja2==3.1.4
orjson==3.10.7
pulsectl==24.8.0
orjson==3.9.15
pulsectl==24.4.0
pyudev==0.24.3
PyYAML==6.0.2
PyYAML==6.0.1
requests==2.32.3
securetar==2024.2.1
sentry-sdk==2.16.0
setuptools==75.1.0
voluptuous==0.15.2
dbus-fast==2.24.3
sentry-sdk==2.5.1
setuptools==70.0.0
voluptuous==0.14.2
dbus-fast==2.21.3
typing_extensions==4.12.2
zlib-fast==0.2.0

View File

@@ -1,12 +1,12 @@
coverage==7.6.3
pre-commit==4.0.1
pylint==3.3.1
coverage==7.5.3
pre-commit==3.7.1
pylint==3.2.3
pytest-aiohttp==1.0.5
pytest-asyncio==0.23.6
pytest-cov==5.0.0
pytest-timeout==2.3.1
pytest==8.3.3
ruff==0.6.9
time-machine==2.16.0
pytest==8.2.2
ruff==0.4.8
time-machine==2.14.1
typing_extensions==4.12.2
urllib3==2.2.3
urllib3==2.2.1

View File

@@ -1,5 +1,4 @@
"""Home Assistant Supervisor setup."""
from pathlib import Path
import re

View File

@@ -1,5 +1,4 @@
"""Main file for Supervisor."""
import asyncio
from concurrent.futures import ThreadPoolExecutor
import logging

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-ons."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
@@ -47,8 +46,6 @@ from ..const import (
ATTR_SLUG,
ATTR_STATE,
ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TYPE,
ATTR_USER,
ATTR_UUID,
@@ -57,7 +54,6 @@ from ..const import (
ATTR_WATCHDOG,
DNS_SUFFIX,
AddonBoot,
AddonBootConfig,
AddonStartup,
AddonState,
BusEvent,
@@ -289,9 +285,13 @@ class Addon(AddonModel):
@property
def need_update(self) -> bool:
"""Return True if an update is available."""
if self.is_detached:
if self.is_detached or self.version == self.latest_version:
return False
return self.version != self.latest_version
with suppress(AddonsNotSupportedError):
self._validate_availability(self.data_store)
return True
return False
@property
def dns(self) -> list[str]:
@@ -312,9 +312,7 @@ class Addon(AddonModel):
@property
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced."""
if self.boot_config == AddonBootConfig.MANUAL_ONLY:
return super().boot
"""Return boot config with prio local settings."""
return self.persist.get(ATTR_BOOT, super().boot)
@boot.setter
@@ -369,37 +367,6 @@ class Addon(AddonModel):
else:
self.persist[ATTR_WATCHDOG] = value
@property
def system_managed(self) -> bool:
"""Return True if addon is managed by Home Assistant."""
return self.persist[ATTR_SYSTEM_MANAGED]
@system_managed.setter
def system_managed(self, value: bool) -> None:
"""Set system managed enable/disable."""
if not value and self.system_managed_config_entry:
self.system_managed_config_entry = None
self.persist[ATTR_SYSTEM_MANAGED] = value
@property
def system_managed_config_entry(self) -> str | None:
"""Return id of config entry managing this addon (if any)."""
if not self.system_managed:
return None
return self.persist.get(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY)
@system_managed_config_entry.setter
def system_managed_config_entry(self, value: str | None) -> None:
"""Set ID of config entry managing this addon."""
if not self.system_managed:
_LOGGER.warning(
"Ignoring system managed config entry for %s because it is not system managed",
self.slug,
)
else:
self.persist[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] = value
@property
def uuid(self) -> str:
"""Return an API token for this add-on."""
@@ -766,12 +733,10 @@ class Addon(AddonModel):
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=AddonsJobError,
)
async def uninstall(
self, *, remove_config: bool, remove_image: bool = True
) -> None:
async def uninstall(self, *, remove_config: bool) -> None:
"""Uninstall and cleanup this addon."""
try:
await self.instance.remove(remove_image=remove_image)
await self.instance.remove()
except DockerError as err:
raise AddonsError() from err

View File

@@ -1,5 +1,4 @@
"""Supervisor add-on build environment."""
from __future__ import annotations
from functools import cached_property

View File

@@ -1,5 +1,4 @@
"""Add-on static data."""
from datetime import timedelta
from enum import StrEnum

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-on data."""
from copy import deepcopy
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Supervisor add-on manager."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
@@ -185,15 +184,7 @@ class AddonManager(CoreSysAttributes):
_LOGGER.warning("Add-on %s is not installed", slug)
return
shared_image = any(
self.local[slug].image == addon.image
and self.local[slug].version == addon.version
for addon in self.installed
if addon.slug != slug
)
await self.local[slug].uninstall(
remove_config=remove_config, remove_image=not shared_image
)
await self.local[slug].uninstall(remove_config=remove_config)
_LOGGER.info("Add-on '%s' successfully removed", slug)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod
from collections import defaultdict
from collections.abc import Awaitable, Callable
@@ -83,7 +82,6 @@ from ..const import (
SECURITY_DISABLE,
SECURITY_PROFILE,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
)
@@ -150,15 +148,10 @@ class AddonModel(JobGroup, ABC):
"""Return options with local changes."""
return self.data[ATTR_OPTIONS]
@property
def boot_config(self) -> AddonBootConfig:
"""Return boot config."""
return self.data[ATTR_BOOT]
@property
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced."""
return AddonBoot(self.data[ATTR_BOOT])
"""Return boot config with prio local settings."""
return self.data[ATTR_BOOT]
@property
def auto_update(self) -> bool | None:

View File

@@ -1,5 +1,4 @@
"""Add-on Options / UI rendering."""
import hashlib
import logging
from pathlib import Path

View File

@@ -1,5 +1,4 @@
"""Util add-ons functions."""
from __future__ import annotations
import asyncio

View File

@@ -1,5 +1,4 @@
"""Validate add-ons options schema."""
import logging
import re
import secrets
@@ -79,8 +78,6 @@ from ..const import (
ATTR_STATE,
ATTR_STDIN,
ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
@@ -98,7 +95,6 @@ from ..const import (
ROLE_ALL,
ROLE_DEFAULT,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
AddonState,
@@ -322,9 +318,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
AddonStartup
),
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
AddonBootConfig
),
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
@@ -473,8 +467,6 @@ SCHEMA_ADDON_USER = vol.Schema(
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
},
extra=vol.REMOVE_EXTRA,
)

View File

@@ -1,11 +1,11 @@
"""Init file for Supervisor RESTful API."""
from functools import partial
import logging
from pathlib import Path
from typing import Any
from aiohttp import web
from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher
from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes
@@ -67,6 +67,7 @@ class RestAPI(CoreSysAttributes):
"max_field_size": MAX_LINE_SIZE,
},
)
attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher())
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
@@ -509,7 +510,6 @@ class RestAPI(CoreSysAttributes):
web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/options", api_addons.options),
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
web.post(
"/addons/{addon}/options/validate", api_addons.options_validate
),

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -82,8 +81,6 @@ from ..const import (
ATTR_STARTUP,
ATTR_STATE,
ATTR_STDIN,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TRANSLATIONS,
ATTR_UART,
ATTR_UDEV,
@@ -98,7 +95,6 @@ from ..const import (
ATTR_WEBUI,
REQUEST_FROM,
AddonBoot,
AddonBootConfig,
)
from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats
@@ -110,7 +106,7 @@ from ..exceptions import (
PwnedSecret,
)
from ..validate import docker_ports
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED
from .const import ATTR_REMOVE_CONFIG, ATTR_SIGNED
from .utils import api_process, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -130,13 +126,6 @@ SCHEMA_OPTIONS = vol.Schema(
}
)
SCHEMA_SYS_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
}
)
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
SCHEMA_UNINSTALL = vol.Schema(
@@ -189,7 +178,6 @@ class APIAddons(CoreSysAttributes):
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_SYSTEM_MANAGED: addon.system_managed,
}
for addon in self.sys_addons.installed
]
@@ -218,7 +206,6 @@ class APIAddons(CoreSysAttributes):
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon),
ATTR_BOOT_CONFIG: addon.boot_config,
ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options,
ATTR_SCHEMA: addon.schema_ui,
@@ -278,8 +265,6 @@ class APIAddons(CoreSysAttributes):
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
ATTR_SYSTEM_MANAGED: addon.system_managed,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
}
return data
@@ -302,10 +287,6 @@ class APIAddons(CoreSysAttributes):
if ATTR_OPTIONS in body:
addon.options = body[ATTR_OPTIONS]
if ATTR_BOOT in body:
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
raise APIError(
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
)
addon.boot = body[ATTR_BOOT]
if ATTR_AUTO_UPDATE in body:
addon.auto_update = body[ATTR_AUTO_UPDATE]
@@ -323,20 +304,6 @@ class APIAddons(CoreSysAttributes):
addon.save_persist()
@api_process
async def sys_options(self, request: web.Request) -> None:
"""Store system options for an add-on."""
addon = self.get_addon_for_request(request)
# Validate/Process Body
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
if ATTR_SYSTEM_MANAGED in body:
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
addon.save_persist()
@api_process
async def options_validate(self, request: web.Request) -> None:
"""Validate user options for add-on."""

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Audio RESTful API."""
import asyncio
from collections.abc import Awaitable
from dataclasses import asdict

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor auth/SSO RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Backups RESTful API."""
import asyncio
from collections.abc import Callable
import errno
@@ -343,9 +342,9 @@ class APIBackups(CoreSysAttributes):
_LOGGER.info("Downloading backup %s", backup.slug)
response = web.FileResponse(backup.tarfile)
response.content_type = CONTENT_TYPE_TAR
response.headers[CONTENT_DISPOSITION] = (
f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
)
response.headers[
CONTENT_DISPOSITION
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
return response
@api_process

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HA cli RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -17,7 +17,6 @@ ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes"
ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BACKGROUND = "background"
ATTR_BOOT_CONFIG = "boot_config"
ATTR_BOOT_SLOT = "boot_slot"
ATTR_BOOT_SLOTS = "boot_slots"
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
@@ -37,7 +36,6 @@ ATTR_DT_UTC = "dt_utc"
ATTR_EJECTABLE = "ejectable"
ATTR_FALLBACK = "fallback"
ATTR_FILESYSTEMS = "filesystems"
ATTR_FORCE = "force"
ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active"

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor network RESTful API."""
import logging
import voluptuous as vol

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor DNS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor hardware RESTful API."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -35,9 +34,9 @@ from ..const import (
ATTR_WATCHDOG,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError
from ..exceptions import APIError
from ..validate import docker_image, network_port, version_tag
from .const import ATTR_FORCE, ATTR_SAFE_MODE
from .const import ATTR_SAFE_MODE
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -67,13 +66,6 @@ SCHEMA_UPDATE = vol.Schema(
SCHEMA_RESTART = vol.Schema(
{
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
SCHEMA_STOP = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
@@ -81,17 +73,6 @@ SCHEMA_STOP = vol.Schema(
class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions."""
async def _check_offline_migration(self, force: bool = False) -> None:
"""Check and raise if there's an offline DB migration in progress."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Offline database migration in progress, try again after it has completed"
)
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return host information."""
@@ -173,7 +154,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def update(self, request: web.Request) -> None:
"""Update Home Assistant."""
body = await api_validate(SCHEMA_UPDATE, request)
await self._check_offline_migration()
await asyncio.shield(
self.sys_homeassistant.core.update(
@@ -183,12 +163,9 @@ class APIHomeAssistant(CoreSysAttributes):
)
@api_process
async def stop(self, request: web.Request) -> Awaitable[None]:
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop Home Assistant."""
body = await api_validate(SCHEMA_STOP, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_homeassistant.core.stop())
return asyncio.shield(self.sys_homeassistant.core.stop())
@api_process
def start(self, request: web.Request) -> Awaitable[None]:
@@ -199,7 +176,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def restart(self, request: web.Request) -> None:
"""Restart Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield(
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
@@ -209,7 +185,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def rebuild(self, request: web.Request) -> None:
"""Rebuild Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield(
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])

View File

@@ -28,7 +28,7 @@ from ..const import (
ATTR_TIMEZONE,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError
from ..exceptions import APIError, HostLogError
from ..host.const import (
PARAM_BOOT_ID,
PARAM_FOLLOW,
@@ -46,7 +46,6 @@ from .const import (
ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC,
ATTR_FORCE,
ATTR_IDENTIFIERS,
ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME,
@@ -61,33 +60,14 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
IDENTIFIER = "identifier"
BOOTID = "bootid"
DEFAULT_LINES = 100
DEFAULT_RANGE = 100
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
# pylint: disable=no-value-for-parameter
SCHEMA_SHUTDOWN = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
class APIHost(CoreSysAttributes):
"""Handle RESTful API for host functions."""
async def _check_ha_offline_migration(self, force: bool) -> None:
"""Check if HA has an offline migration in progress and raise if not forced."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
)
@api_process
async def info(self, request):
"""Return host information."""
@@ -129,20 +109,14 @@ class APIHost(CoreSysAttributes):
)
@api_process
async def reboot(self, request):
def reboot(self, request):
"""Reboot host."""
body = await api_validate(SCHEMA_SHUTDOWN, request)
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.reboot())
return asyncio.shield(self.sys_host.control.reboot())
@api_process
async def shutdown(self, request):
def shutdown(self, request):
"""Poweroff host."""
body = await api_validate(SCHEMA_SHUTDOWN, request)
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.shutdown())
return asyncio.shield(self.sys_host.control.shutdown())
@api_process
def reload(self, request):
@@ -222,30 +196,13 @@ class APIHost(CoreSysAttributes):
"supported for now."
)
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
if request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
log_formatter = LogFormatter.VERBOSE
if "lines" in request.query:
lines = request.query.get("lines", DEFAULT_LINES)
try:
lines = int(lines)
except ValueError:
# If the user passed a non-integer value, just use the default instead of error.
lines = DEFAULT_LINES
finally:
# We can't use the entries= Range header syntax to refer to the last 1 line,
# and passing 1 to the calculation below would return the 1st line of the logs
# instead. Since this is really an edge case that doesn't matter much, we'll just
# return 2 lines at minimum.
lines = max(2, lines)
# entries=cursor[[:num_skip]:num_entries]
range_header = f"entries=:-{lines-1}:{'' if follow else lines}"
elif RANGE in request.headers:
if RANGE in request.headers:
range_header = request.headers.get(RANGE)
else:
range_header = (
f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}"
)
range_header = f"entries=:-{DEFAULT_RANGE}:"
async with self.sys_host.logs.journald_logs(
params=params, range_header=range_header, accept=LogFormat.JOURNAL
@@ -253,13 +210,8 @@ class APIHost(CoreSysAttributes):
try:
response = web.StreamResponse()
response.content_type = CONTENT_TYPE_TEXT
headers_returned = False
async for cursor, line in journal_logs_reader(resp, log_formatter):
if not headers_returned:
if cursor:
response.headers["X-First-Cursor"] = cursor
await response.prepare(request)
headers_returned = True
await response.prepare(request)
async for line in journal_logs_reader(resp, log_formatter):
await response.write(line.encode("utf-8") + b"\n")
except ConnectionResetError as ex:
raise APIError(

View File

@@ -1,5 +1,4 @@
"""Supervisor Add-on ingress service."""
import asyncio
from ipaddress import ip_address
import logging

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Jobs RESTful API."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Handle security part of this API."""
import logging
import re
from typing import Final
@@ -9,8 +8,6 @@ from aiohttp.web import Request, RequestHandler, Response, middleware
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
from awesomeversion import AwesomeVersion
from supervisor.homeassistant.const import LANDINGPAGE
from ...addons.const import RE_SLUG
from ...const import (
REQUEST_FROM,
@@ -80,13 +77,6 @@ ADDONS_API_BYPASS: Final = re.compile(
r")$"
)
# Home Assistant only
CORE_ONLY_PATHS: Final = re.compile(
r"^(?:"
r"/addons/" + RE_SLUG + "/sys_options"
r")$"
)
# Policy role add-on API access
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
ROLE_DEFAULT: re.compile(
@@ -242,9 +232,6 @@ class SecurityMiddleware(CoreSysAttributes):
if supervisor_token == self.sys_homeassistant.supervisor_token:
_LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant
elif CORE_ONLY_PATHS.match(request.path):
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
raise HTTPForbidden()
# Host
if supervisor_token == self.sys_plugins.cli.supervisor_token:
@@ -290,10 +277,8 @@ class SecurityMiddleware(CoreSysAttributes):
@middleware
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy."""
if (
request[REQUEST_FROM] != self.sys_homeassistant
or self.sys_homeassistant.version == LANDINGPAGE
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough(
self.sys_homeassistant.version, _CORE_VERSION
):
return await handler(request)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Multicast RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging

View File

@@ -1,8 +1,8 @@
"""REST API for network."""
import asyncio
from collections.abc import Awaitable
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface
from dataclasses import replace
from ipaddress import ip_address, ip_interface
from typing import Any
from aiohttp import web
@@ -48,28 +48,18 @@ from ..host.configuration import (
Interface,
InterfaceMethod,
IpConfig,
IpSetting,
VlanConfig,
WifiConfig,
)
from ..host.const import AuthMethod, InterfaceType, WifiMode
from .utils import api_process, api_validate
_SCHEMA_IPV4_CONFIG = vol.Schema(
_SCHEMA_IP_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)],
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)],
}
)
_SCHEMA_IPV6_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
}
)
@@ -86,18 +76,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
# pylint: disable=no-value-for-parameter
SCHEMA_UPDATE = vol.Schema(
{
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
vol.Optional(ATTR_ENABLED): vol.Boolean(),
}
)
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
"""Return a dict with information about ip configuration."""
return {
ATTR_METHOD: setting.method,
ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
@@ -132,8 +122,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac,
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
}
@@ -207,26 +197,24 @@ class APINetwork(CoreSysAttributes):
# Apply config
for key, config in body.items():
if key == ATTR_IPV4:
interface.ipv4setting = IpSetting(
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
config.get(ATTR_ADDRESS, []),
config.get(ATTR_GATEWAY),
config.get(ATTR_NAMESERVERS, []),
interface.ipv4 = replace(
interface.ipv4
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
**config,
)
elif key == ATTR_IPV6:
interface.ipv6setting = IpSetting(
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
config.get(ATTR_ADDRESS, []),
config.get(ATTR_GATEWAY),
config.get(ATTR_NAMESERVERS, []),
interface.ipv6 = replace(
interface.ipv6
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
**config,
)
elif key == ATTR_WIFI:
interface.wifi = WifiConfig(
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
config.get(ATTR_SSID, ""),
config.get(ATTR_AUTH, AuthMethod.OPEN),
config.get(ATTR_PSK, None),
None,
interface.wifi = replace(
interface.wifi
or WifiConfig(
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
),
**config,
)
elif key == ATTR_ENABLED:
interface.enabled = config
@@ -268,22 +256,24 @@ class APINetwork(CoreSysAttributes):
vlan_config = VlanConfig(vlan, interface.name)
ipv4_setting = None
ipv4_config = None
if ATTR_IPV4 in body:
ipv4_setting = IpSetting(
ipv4_config = IpConfig(
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
None,
)
ipv6_setting = None
ipv6_config = None
if ATTR_IPV6 in body:
ipv6_setting = IpSetting(
ipv6_config = IpConfig(
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
None,
)
vlan_interface = Interface(
@@ -294,10 +284,8 @@ class APINetwork(CoreSysAttributes):
True,
False,
InterfaceType.VLAN,
None,
ipv4_setting,
None,
ipv6_setting,
ipv4_config,
ipv6_config,
None,
vlan_config,
)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Observer RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HassOS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging

View File

@@ -1,5 +1,4 @@
"""Utils for Home Assistant Proxy."""
import asyncio
from contextlib import asynccontextmanager
import logging

View File

@@ -1,5 +1,4 @@
"""Handle REST API for resoulution."""
import asyncio
from collections.abc import Awaitable
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Root RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Security RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Supervisor RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor util for RESTful API."""
import json
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Handle Arch for underlay maschine/platforms."""
import logging
from pathlib import Path
import platform

View File

@@ -1,5 +1,4 @@
"""Manage SSO for Add-ons with Home Assistant user."""
import asyncio
import hashlib
import logging

View File

@@ -1,5 +1,4 @@
"""Representation of a backup file."""
import asyncio
from base64 import b64decode, b64encode
from collections import defaultdict

View File

@@ -1,5 +1,4 @@
"""Backup consts."""
from enum import StrEnum
BUF_SIZE = 2**20 * 4 # 4MB

View File

@@ -1,5 +1,4 @@
"""Backup manager."""
from __future__ import annotations
import asyncio
@@ -10,10 +9,7 @@ from pathlib import Path
from ..addons.addon import Addon
from ..const import (
ATTR_DATA,
ATTR_DAYS_UNTIL_STALE,
ATTR_SLUG,
ATTR_TYPE,
FILE_HASSIO_BACKUPS,
FOLDER_HOMEASSISTANT,
CoreState,
@@ -24,9 +20,7 @@ from ..exceptions import (
BackupInvalidError,
BackupJobError,
BackupMountDownError,
HomeAssistantWSError,
)
from ..homeassistant.const import WSType
from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit
from ..jobs.decorator import Job
from ..jobs.job_group import JobGroup
@@ -265,6 +259,11 @@ class BackupManager(FileConfiguration, JobGroup):
self.sys_core.state = CoreState.FREEZE
async with backup:
# Backup add-ons
if addon_list:
self._change_stage(BackupJobStage.ADDONS, backup)
addon_start_tasks = await backup.store_addons(addon_list)
# HomeAssistant Folder is for v1
if homeassistant:
self._change_stage(BackupJobStage.HOME_ASSISTANT, backup)
@@ -274,11 +273,6 @@ class BackupManager(FileConfiguration, JobGroup):
else homeassistant_exclude_database
)
# Backup add-ons
if addon_list:
self._change_stage(BackupJobStage.ADDONS, backup)
addon_start_tasks = await backup.store_addons(addon_list)
# Backup folders
if folder_list:
self._change_stage(BackupJobStage.FOLDERS, backup)
@@ -304,18 +298,6 @@ class BackupManager(FileConfiguration, JobGroup):
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
try:
await self.sys_homeassistant.websocket.async_send_command(
{
ATTR_TYPE: WSType.BACKUP_SYNC,
ATTR_DATA: {
ATTR_SLUG: backup.slug,
},
},
)
except HomeAssistantWSError as err:
_LOGGER.error("Can't send backup sync to Home Assistant: %s", err)
return backup
finally:
self.sys_core.state = CoreState.RUNNING

View File

@@ -1,5 +1,4 @@
"""Util add-on functions."""
import hashlib
import re

View File

@@ -1,5 +1,4 @@
"""Validate some things around restore."""
from __future__ import annotations
from typing import Any

View File

@@ -1,6 +1,4 @@
"""Bootstrap Supervisor."""
# ruff: noqa: T100
import logging
import os
from pathlib import Path

View File

@@ -1,5 +1,4 @@
"""Bus event system."""
from __future__ import annotations
from collections.abc import Awaitable, Callable

View File

@@ -1,5 +1,4 @@
"""Bootstrap Supervisor."""
from datetime import UTC, datetime
import logging
import os

View File

@@ -1,5 +1,4 @@
"""Constants file for Supervisor."""
from dataclasses import dataclass
from enum import StrEnum
from ipaddress import ip_network
@@ -310,8 +309,6 @@ ATTR_SUPERVISOR_VERSION = "supervisor_version"
ATTR_SUPPORTED = "supported"
ATTR_SUPPORTED_ARCH = "supported_arch"
ATTR_SYSTEM = "system"
ATTR_SYSTEM_MANAGED = "system_managed"
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY = "system_managed_config_entry"
ATTR_TIMEOUT = "timeout"
ATTR_TIMEZONE = "timezone"
ATTR_TITLE = "title"
@@ -382,27 +379,12 @@ ROLE_ADMIN = "admin"
ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN]
class AddonBootConfig(StrEnum):
"""Boot mode config for the add-on."""
AUTO = "auto"
MANUAL = "manual"
MANUAL_ONLY = "manual_only"
class AddonBoot(StrEnum):
"""Boot mode for the add-on."""
AUTO = "auto"
MANUAL = "manual"
@classmethod
def _missing_(cls, value: str) -> Self | None:
"""Convert 'forced' config values to their counterpart."""
if value == AddonBootConfig.MANUAL_ONLY:
return AddonBoot.MANUAL
return None
class AddonStartup(StrEnum):
"""Startup types of Add-on."""

View File

@@ -1,5 +1,4 @@
"""Main file for Supervisor."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress

View File

@@ -1,5 +1,4 @@
"""Handle core shared data."""
from __future__ import annotations
import asyncio
@@ -63,7 +62,7 @@ class CoreSys:
# External objects
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
self._websession = None
self._websession: aiohttp.ClientSession = aiohttp.ClientSession()
# Global objects
self._config: CoreConfig = CoreConfig()
@@ -96,8 +95,10 @@ class CoreSys:
self._bus: Bus | None = None
self._mounts: MountManager | None = None
# Setup aiohttp session
self.create_websession()
# Set default header for aiohttp
self._websession._default_headers = MappingProxyType(
{aiohttp.hdrs.USER_AGENT: SERVER_SOFTWARE}
)
# Task factory attributes
self._set_task_context: list[Callable[[Context], Context]] = []
@@ -112,11 +113,8 @@ class CoreSys:
"""Return system timezone."""
if self.config.timezone:
return self.config.timezone
# pylint bug with python 3.12.4 (https://github.com/pylint-dev/pylint/issues/9811)
# pylint: disable=no-member
if self.host.info.timezone:
return self.host.info.timezone
# pylint: enable=no-member
return "UTC"
@property
@@ -546,16 +544,6 @@ class CoreSys:
return self.loop.run_in_executor(None, funct, *args)
def create_websession(self) -> None:
"""Create a new aiohttp session."""
if self._websession:
self.create_task(self._websession.close())
# Create session and set default header for aiohttp
self._websession: aiohttp.ClientSession = aiohttp.ClientSession(
headers=MappingProxyType({aiohttp.hdrs.USER_AGENT: SERVER_SOFTWARE})
)
def _create_context(self) -> Context:
"""Create a new context for a task."""
context = copy_context()

View File

@@ -1,5 +1,4 @@
"""OS-Agent implementation for DBUS."""
import asyncio
from collections.abc import Awaitable
import logging
@@ -8,7 +7,7 @@ from typing import Any
from awesomeversion import AwesomeVersion
from dbus_fast.aio.message_bus import MessageBus
from ...exceptions import DBusInterfaceError, DBusServiceUnkownError
from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
from ..const import (
DBUS_ATTR_DIAGNOSTICS,
DBUS_ATTR_VERSION,
@@ -96,25 +95,13 @@ class OSAgent(DBusInterfaceProxy):
_LOGGER.info("Load dbus interface %s", self.name)
try:
await super().connect(bus)
await asyncio.gather(*[dbus.connect(bus) for dbus in self.all])
except DBusError:
_LOGGER.warning("Can't connect to OS-Agent")
except (DBusServiceUnkownError, DBusInterfaceError):
_LOGGER.error(
_LOGGER.warning(
"No OS-Agent support on the host. Some Host functions have been disabled."
)
return
errors = await asyncio.gather(
*[dbus.connect(bus) for dbus in self.all], return_exceptions=True
)
for err in errors:
if err:
dbus = self.all[errors.index(err)]
_LOGGER.error(
"Can't load OS Agent dbus interface %s %s: %s",
dbus.bus_name,
dbus.object_path,
err,
)
@dbus_connected
async def update(self, changed: dict[str, Any] | None = None) -> None:

View File

@@ -1,5 +1,4 @@
"""AppArmor object for OS-Agent."""
from pathlib import Path
from awesomeversion import AwesomeVersion

View File

@@ -1,10 +1,9 @@
"""Board management for OS Agent."""
import logging
from dbus_fast.aio.message_bus import MessageBus
from ....exceptions import BoardInvalidError, DBusInterfaceError, DBusServiceUnkownError
from ....exceptions import BoardInvalidError
from ...const import (
DBUS_ATTR_BOARD,
DBUS_IFACE_HAOS_BOARDS,
@@ -75,10 +74,6 @@ class BoardManager(DBusInterfaceProxy):
self._board_proxy = Green()
elif self.board == BOARD_NAME_SUPERVISED:
self._board_proxy = Supervised()
else:
return
try:
if self._board_proxy:
await self._board_proxy.connect(bus)
except (DBusServiceUnkownError, DBusInterfaceError) as ex:
_LOGGER.warning("OS-Agent board support initialization failed: %s", ex)

View File

@@ -1,9 +1,5 @@
"""Supervised board management."""
from typing import Any
from supervisor.dbus.utils import dbus_connected
from .const import BOARD_NAME_SUPERVISED
from .interface import BoardProxy
@@ -15,11 +11,3 @@ class Supervised(BoardProxy):
"""Initialize properties."""
super().__init__(BOARD_NAME_SUPERVISED)
self.sync_properties: bool = False
@dbus_connected
async def update(self, changed: dict[str, Any] | None = None) -> None:
"""Do nothing as there are no properties.
Currently unused, avoid using the Properties interface to avoid a bug in
Go D-Bus, see: https://github.com/home-assistant/os-agent/issues/206
"""

View File

@@ -1,5 +1,4 @@
"""DataDisk object for OS-Agent."""
from pathlib import Path
from ..const import (

View File

@@ -1,5 +1,4 @@
"""Constants for DBUS."""
from enum import IntEnum, StrEnum
from socket import AF_INET, AF_INET6

View File

@@ -1,5 +1,4 @@
"""D-Bus interface for hostname."""
import logging
from dbus_fast.aio.message_bus import MessageBus

View File

@@ -1,5 +1,4 @@
"""Interface class for D-Bus wrappers."""
from abc import ABC
from collections.abc import Callable
from functools import wraps

View File

@@ -1,5 +1,4 @@
"""Interface to Logind over D-Bus."""
import logging
from dbus_fast.aio.message_bus import MessageBus

View File

@@ -1,5 +1,4 @@
"""D-Bus interface objects."""
import asyncio
import logging
@@ -129,11 +128,9 @@ class DBusManager(CoreSysAttributes):
for err in errors:
if err:
dbus = self.all[errors.index(err)]
_LOGGER.warning(
"Can't load dbus interface %s %s: %s",
dbus.name,
dbus.object_path,
"Can't load dbus interface %s: %s",
self.all[errors.index(err)].name,
err,
)

View File

@@ -1,5 +1,4 @@
"""Network Manager implementation for DBUS."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""NetworkConnection objects for Network Manager."""
from dataclasses import dataclass
from ipaddress import IPv4Address, IPv6Address
@@ -59,22 +58,11 @@ class VlanProperties:
parent: str | None
@dataclass(slots=True)
class IpAddress:
"""IP address object for Network Manager."""
address: str
prefix: int
@dataclass(slots=True)
class IpProperties:
"""IP properties object for Network Manager."""
method: str | None
address_data: list[IpAddress] | None
gateway: str | None
dns: list[bytes | int] | None
@dataclass(slots=True)

View File

@@ -1,5 +1,4 @@
"""Network Manager DNS Manager object."""
from ipaddress import ip_address
import logging
from typing import Any

View File

@@ -1,18 +1,17 @@
"""Connection object for Network Manager."""
import logging
from typing import Any
from dbus_fast import Variant
from dbus_fast.aio.message_bus import MessageBus
from ....const import ATTR_METHOD, ATTR_MODE, ATTR_PSK, ATTR_SSID
from ...const import DBUS_NAME_NM
from ...interface import DBusInterface
from ...utils import dbus_connected
from ..configuration import (
ConnectionProperties,
EthernetProperties,
IpAddress,
IpProperties,
MatchProperties,
VlanProperties,
@@ -21,52 +20,30 @@ from ..configuration import (
)
CONF_ATTR_CONNECTION = "connection"
CONF_ATTR_MATCH = "match"
CONF_ATTR_802_ETHERNET = "802-3-ethernet"
CONF_ATTR_802_WIRELESS = "802-11-wireless"
CONF_ATTR_802_WIRELESS_SECURITY = "802-11-wireless-security"
CONF_ATTR_VLAN = "vlan"
CONF_ATTR_IPV4 = "ipv4"
CONF_ATTR_IPV6 = "ipv6"
CONF_ATTR_MATCH = "match"
CONF_ATTR_PATH = "path"
CONF_ATTR_CONNECTION_ID = "id"
CONF_ATTR_CONNECTION_UUID = "uuid"
CONF_ATTR_CONNECTION_TYPE = "type"
CONF_ATTR_CONNECTION_LLMNR = "llmnr"
CONF_ATTR_CONNECTION_MDNS = "mdns"
CONF_ATTR_CONNECTION_AUTOCONNECT = "autoconnect"
CONF_ATTR_CONNECTION_INTERFACE_NAME = "interface-name"
CONF_ATTR_MATCH_PATH = "path"
CONF_ATTR_VLAN_ID = "id"
CONF_ATTR_VLAN_PARENT = "parent"
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC = "assigned-mac-address"
CONF_ATTR_802_WIRELESS_MODE = "mode"
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC = "assigned-mac-address"
CONF_ATTR_802_WIRELESS_SSID = "ssid"
CONF_ATTR_802_WIRELESS_POWERSAVE = "powersave"
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG = "auth-alg"
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT = "key-mgmt"
CONF_ATTR_802_WIRELESS_SECURITY_PSK = "psk"
CONF_ATTR_IPV4_METHOD = "method"
CONF_ATTR_IPV4_ADDRESS_DATA = "address-data"
CONF_ATTR_IPV4_GATEWAY = "gateway"
CONF_ATTR_IPV4_DNS = "dns"
CONF_ATTR_IPV6_METHOD = "method"
CONF_ATTR_IPV6_ADDRESS_DATA = "address-data"
CONF_ATTR_IPV6_GATEWAY = "gateway"
CONF_ATTR_IPV6_DNS = "dns"
ATTR_ID = "id"
ATTR_UUID = "uuid"
ATTR_TYPE = "type"
ATTR_PARENT = "parent"
ATTR_ASSIGNED_MAC = "assigned-mac-address"
ATTR_POWERSAVE = "powersave"
ATTR_AUTH_ALG = "auth-alg"
ATTR_KEY_MGMT = "key-mgmt"
ATTR_INTERFACE_NAME = "interface-name"
ATTR_PATH = "path"
IPV4_6_IGNORE_FIELDS = [
"addresses",
"address-data",
"dns",
"dns-data",
"gateway",
"method",
]
@@ -96,7 +73,7 @@ def _merge_settings_attribute(
class NetworkSetting(DBusInterface):
"""Network connection setting object for Network Manager.
https://networkmanager.dev/docs/api/1.48.0/gdbus-org.freedesktop.NetworkManager.Settings.Connection.html
https://developer.gnome.org/NetworkManager/stable/gdbus-org.freedesktop.NetworkManager.Settings.Connection.html
"""
bus_name: str = DBUS_NAME_NM
@@ -170,7 +147,7 @@ class NetworkSetting(DBusInterface):
new_settings,
settings,
CONF_ATTR_CONNECTION,
ignore_current_value=[CONF_ATTR_CONNECTION_INTERFACE_NAME],
ignore_current_value=[ATTR_INTERFACE_NAME],
)
_merge_settings_attribute(new_settings, settings, CONF_ATTR_802_ETHERNET)
_merge_settings_attribute(new_settings, settings, CONF_ATTR_802_WIRELESS)
@@ -215,69 +192,47 @@ class NetworkSetting(DBusInterface):
# See: https://developer-old.gnome.org/NetworkManager/stable/ch01.html
if CONF_ATTR_CONNECTION in data:
self._connection = ConnectionProperties(
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_ID),
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_UUID),
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_TYPE),
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_INTERFACE_NAME),
data[CONF_ATTR_CONNECTION].get(ATTR_ID),
data[CONF_ATTR_CONNECTION].get(ATTR_UUID),
data[CONF_ATTR_CONNECTION].get(ATTR_TYPE),
data[CONF_ATTR_CONNECTION].get(ATTR_INTERFACE_NAME),
)
if CONF_ATTR_802_ETHERNET in data:
self._ethernet = EthernetProperties(
data[CONF_ATTR_802_ETHERNET].get(CONF_ATTR_802_ETHERNET_ASSIGNED_MAC),
data[CONF_ATTR_802_ETHERNET].get(ATTR_ASSIGNED_MAC),
)
if CONF_ATTR_802_WIRELESS in data:
self._wireless = WirelessProperties(
bytes(
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_SSID, [])
).decode(),
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_ASSIGNED_MAC),
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_MODE),
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_POWERSAVE),
bytes(data[CONF_ATTR_802_WIRELESS].get(ATTR_SSID, [])).decode(),
data[CONF_ATTR_802_WIRELESS].get(ATTR_ASSIGNED_MAC),
data[CONF_ATTR_802_WIRELESS].get(ATTR_MODE),
data[CONF_ATTR_802_WIRELESS].get(ATTR_POWERSAVE),
)
if CONF_ATTR_802_WIRELESS_SECURITY in data:
self._wireless_security = WirelessSecurityProperties(
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG
),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT
),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
CONF_ATTR_802_WIRELESS_SECURITY_PSK
),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_AUTH_ALG),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_KEY_MGMT),
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_PSK),
)
if CONF_ATTR_VLAN in data:
self._vlan = VlanProperties(
data[CONF_ATTR_VLAN].get(CONF_ATTR_VLAN_ID),
data[CONF_ATTR_VLAN].get(CONF_ATTR_VLAN_PARENT),
data[CONF_ATTR_VLAN].get(ATTR_ID),
data[CONF_ATTR_VLAN].get(ATTR_PARENT),
)
if CONF_ATTR_IPV4 in data:
address_data = None
if ips := data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_ADDRESS_DATA):
address_data = [IpAddress(ip["address"], ip["prefix"]) for ip in ips]
self._ipv4 = IpProperties(
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_METHOD),
address_data,
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_GATEWAY),
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_DNS),
data[CONF_ATTR_IPV4].get(ATTR_METHOD),
)
if CONF_ATTR_IPV6 in data:
address_data = None
if ips := data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_ADDRESS_DATA):
address_data = [IpAddress(ip["address"], ip["prefix"]) for ip in ips]
self._ipv6 = IpProperties(
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_METHOD),
address_data,
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_GATEWAY),
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_DNS),
data[CONF_ATTR_IPV6].get(ATTR_METHOD),
)
if CONF_ATTR_MATCH in data:
self._match = MatchProperties(
data[CONF_ATTR_MATCH].get(CONF_ATTR_MATCH_PATH)
)
self._match = MatchProperties(data[CONF_ATTR_MATCH].get(ATTR_PATH))

View File

@@ -1,5 +1,4 @@
"""Payload generators for DBUS communication."""
from __future__ import annotations
import socket
@@ -11,128 +10,22 @@ from dbus_fast import Variant
from ....host.const import InterfaceMethod, InterfaceType
from .. import NetworkManager
from . import (
ATTR_ASSIGNED_MAC,
CONF_ATTR_802_ETHERNET,
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC,
CONF_ATTR_802_WIRELESS,
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC,
CONF_ATTR_802_WIRELESS_MODE,
CONF_ATTR_802_WIRELESS_POWERSAVE,
CONF_ATTR_802_WIRELESS_SECURITY,
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG,
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT,
CONF_ATTR_802_WIRELESS_SECURITY_PSK,
CONF_ATTR_802_WIRELESS_SSID,
CONF_ATTR_CONNECTION,
CONF_ATTR_CONNECTION_AUTOCONNECT,
CONF_ATTR_CONNECTION_ID,
CONF_ATTR_CONNECTION_LLMNR,
CONF_ATTR_CONNECTION_MDNS,
CONF_ATTR_CONNECTION_TYPE,
CONF_ATTR_CONNECTION_UUID,
CONF_ATTR_IPV4,
CONF_ATTR_IPV4_ADDRESS_DATA,
CONF_ATTR_IPV4_DNS,
CONF_ATTR_IPV4_GATEWAY,
CONF_ATTR_IPV4_METHOD,
CONF_ATTR_IPV6,
CONF_ATTR_IPV6_ADDRESS_DATA,
CONF_ATTR_IPV6_DNS,
CONF_ATTR_IPV6_GATEWAY,
CONF_ATTR_IPV6_METHOD,
CONF_ATTR_MATCH,
CONF_ATTR_MATCH_PATH,
CONF_ATTR_PATH,
CONF_ATTR_VLAN,
CONF_ATTR_VLAN_ID,
CONF_ATTR_VLAN_PARENT,
)
if TYPE_CHECKING:
from ....host.configuration import Interface
def _get_ipv4_connection_settings(ipv4setting) -> dict:
ipv4 = {}
if not ipv4setting or ipv4setting.method == InterfaceMethod.AUTO:
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "auto")
elif ipv4setting.method == InterfaceMethod.DISABLED:
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "disabled")
elif ipv4setting.method == InterfaceMethod.STATIC:
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "manual")
address_data = []
for address in ipv4setting.address:
address_data.append(
{
"address": Variant("s", str(address.ip)),
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
}
)
ipv4[CONF_ATTR_IPV4_ADDRESS_DATA] = Variant("aa{sv}", address_data)
if ipv4setting.gateway:
ipv4[CONF_ATTR_IPV4_GATEWAY] = Variant("s", str(ipv4setting.gateway))
else:
raise RuntimeError("Invalid IPv4 InterfaceMethod")
if (
ipv4setting
and ipv4setting.nameservers
and ipv4setting.method
in (
InterfaceMethod.AUTO,
InterfaceMethod.STATIC,
)
):
nameservers = ipv4setting.nameservers if ipv4setting else []
ipv4[CONF_ATTR_IPV4_DNS] = Variant(
"au",
[socket.htonl(int(ip_address)) for ip_address in nameservers],
)
return ipv4
def _get_ipv6_connection_settings(ipv6setting) -> dict:
ipv6 = {}
if not ipv6setting or ipv6setting.method == InterfaceMethod.AUTO:
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "auto")
elif ipv6setting.method == InterfaceMethod.DISABLED:
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "link-local")
elif ipv6setting.method == InterfaceMethod.STATIC:
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "manual")
address_data = []
for address in ipv6setting.address:
address_data.append(
{
"address": Variant("s", str(address.ip)),
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
}
)
ipv6[CONF_ATTR_IPV6_ADDRESS_DATA] = Variant("aa{sv}", address_data)
if ipv6setting.gateway:
ipv6[CONF_ATTR_IPV6_GATEWAY] = Variant("s", str(ipv6setting.gateway))
else:
raise RuntimeError("Invalid IPv6 InterfaceMethod")
if (
ipv6setting
and ipv6setting.nameservers
and ipv6setting.method
in (
InterfaceMethod.AUTO,
InterfaceMethod.STATIC,
)
):
nameservers = ipv6setting.nameservers if ipv6setting else []
ipv6[CONF_ATTR_IPV6_DNS] = Variant(
"aay",
[ip_address.packed for ip_address in nameservers],
)
return ipv6
def get_connection_from_interface(
interface: Interface,
network_manager: NetworkManager,
@@ -160,31 +53,77 @@ def get_connection_from_interface(
conn: dict[str, dict[str, Variant]] = {
CONF_ATTR_CONNECTION: {
CONF_ATTR_CONNECTION_ID: Variant("s", name),
CONF_ATTR_CONNECTION_UUID: Variant("s", uuid),
CONF_ATTR_CONNECTION_TYPE: Variant("s", iftype),
CONF_ATTR_CONNECTION_LLMNR: Variant("i", 2),
CONF_ATTR_CONNECTION_MDNS: Variant("i", 2),
CONF_ATTR_CONNECTION_AUTOCONNECT: Variant("b", True),
"id": Variant("s", name),
"type": Variant("s", iftype),
"uuid": Variant("s", uuid),
"llmnr": Variant("i", 2),
"mdns": Variant("i", 2),
"autoconnect": Variant("b", True),
},
}
if interface.type != InterfaceType.VLAN:
if interface.path:
conn[CONF_ATTR_MATCH] = {
CONF_ATTR_MATCH_PATH: Variant("as", [interface.path])
}
conn[CONF_ATTR_MATCH] = {CONF_ATTR_PATH: Variant("as", [interface.path])}
else:
conn[CONF_ATTR_CONNECTION]["interface-name"] = Variant("s", interface.name)
conn[CONF_ATTR_IPV4] = _get_ipv4_connection_settings(interface.ipv4setting)
ipv4 = {}
if not interface.ipv4 or interface.ipv4.method == InterfaceMethod.AUTO:
ipv4["method"] = Variant("s", "auto")
elif interface.ipv4.method == InterfaceMethod.DISABLED:
ipv4["method"] = Variant("s", "disabled")
else:
ipv4["method"] = Variant("s", "manual")
ipv4["dns"] = Variant(
"au",
[
socket.htonl(int(ip_address))
for ip_address in interface.ipv4.nameservers
],
)
conn[CONF_ATTR_IPV6] = _get_ipv6_connection_settings(interface.ipv6setting)
adressdata = []
for address in interface.ipv4.address:
adressdata.append(
{
"address": Variant("s", str(address.ip)),
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
}
)
ipv4["address-data"] = Variant("aa{sv}", adressdata)
ipv4["gateway"] = Variant("s", str(interface.ipv4.gateway))
conn[CONF_ATTR_IPV4] = ipv4
ipv6 = {}
if not interface.ipv6 or interface.ipv6.method == InterfaceMethod.AUTO:
ipv6["method"] = Variant("s", "auto")
elif interface.ipv6.method == InterfaceMethod.DISABLED:
ipv6["method"] = Variant("s", "link-local")
else:
ipv6["method"] = Variant("s", "manual")
ipv6["dns"] = Variant(
"aay", [ip_address.packed for ip_address in interface.ipv6.nameservers]
)
adressdata = []
for address in interface.ipv6.address:
adressdata.append(
{
"address": Variant("s", str(address.ip)),
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
}
)
ipv6["address-data"] = Variant("aa{sv}", adressdata)
ipv6["gateway"] = Variant("s", str(interface.ipv6.gateway))
conn[CONF_ATTR_IPV6] = ipv6
if interface.type == InterfaceType.ETHERNET:
conn[CONF_ATTR_802_ETHERNET] = {
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC: Variant("s", "preserve")
}
conn[CONF_ATTR_802_ETHERNET] = {ATTR_ASSIGNED_MAC: Variant("s", "preserve")}
elif interface.type == "vlan":
parent = interface.vlan.interface
if parent in network_manager and (
@@ -193,44 +132,30 @@ def get_connection_from_interface(
parent = parent_connection.uuid
conn[CONF_ATTR_VLAN] = {
CONF_ATTR_VLAN_ID: Variant("u", interface.vlan.id),
CONF_ATTR_VLAN_PARENT: Variant("s", parent),
"id": Variant("u", interface.vlan.id),
"parent": Variant("s", parent),
}
elif interface.type == InterfaceType.WIRELESS:
wireless = {
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC: Variant("s", "preserve"),
CONF_ATTR_802_WIRELESS_MODE: Variant("s", "infrastructure"),
CONF_ATTR_802_WIRELESS_POWERSAVE: Variant("i", 1),
ATTR_ASSIGNED_MAC: Variant("s", "preserve"),
"ssid": Variant("ay", interface.wifi.ssid.encode("UTF-8")),
"mode": Variant("s", "infrastructure"),
"powersave": Variant("i", 1),
}
if interface.wifi and interface.wifi.ssid:
wireless[CONF_ATTR_802_WIRELESS_SSID] = Variant(
"ay", interface.wifi.ssid.encode("UTF-8")
)
conn[CONF_ATTR_802_WIRELESS] = wireless
if interface.wifi and interface.wifi.auth != "open":
if interface.wifi.auth != "open":
wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY)
wireless_security = {}
if interface.wifi.auth == "wep":
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG] = Variant(
"s", "open"
)
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT] = Variant(
"s", "none"
)
wireless_security["auth-alg"] = Variant("s", "open")
wireless_security["key-mgmt"] = Variant("s", "none")
elif interface.wifi.auth == "wpa-psk":
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG] = Variant(
"s", "open"
)
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT] = Variant(
"s", "wpa-psk"
)
wireless_security["auth-alg"] = Variant("s", "open")
wireless_security["key-mgmt"] = Variant("s", "wpa-psk")
if interface.wifi.psk:
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_PSK] = Variant(
"s", interface.wifi.psk
)
wireless_security["psk"] = Variant("s", interface.wifi.psk)
conn[CONF_ATTR_802_WIRELESS_SECURITY] = wireless_security
return conn

View File

@@ -1,5 +1,4 @@
"""Network Manager implementation for DBUS."""
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Wireless object for Network Manager."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""D-Bus interface for systemd-resolved."""
from __future__ import annotations
import logging

View File

@@ -1,5 +1,4 @@
"""Interface to systemd-timedate over D-Bus."""
from datetime import datetime
import logging

View File

@@ -1,5 +1,4 @@
"""Interface to UDisks2 over D-Bus."""
import asyncio
import logging
from typing import Any

View File

@@ -1,5 +1,4 @@
"""Interface to UDisks2 Block Device over D-Bus."""
import asyncio
from collections.abc import Callable
from pathlib import Path

View File

@@ -1,5 +1,4 @@
"""Handle discover message for Home Assistant."""
from __future__ import annotations
from contextlib import suppress

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-on Docker object."""
from __future__ import annotations
from collections.abc import Awaitable
@@ -709,28 +708,6 @@ class DockerAddon(DockerInterface):
with suppress(DockerError):
await self.cleanup()
@Job(name="docker_addon_cleanup", limit=JobExecutionLimit.GROUP_WAIT)
async def cleanup(
self,
old_image: str | None = None,
image: str | None = None,
version: AwesomeVersion | None = None,
) -> None:
"""Check if old version exists and cleanup other versions of image not in use."""
await self.sys_run_in_executor(
self.sys_docker.cleanup_old_images,
(image := image or self.image),
version or self.version,
{old_image} if old_image else None,
keep_images={
f"{addon.image}:{addon.version}"
for addon in self.sys_addons.installed
if addon.slug != self.addon.slug
and addon.image
and addon.image in {old_image, image}
},
)
@Job(
name="docker_addon_write_stdin",
limit=JobExecutionLimit.GROUP_ONCE,

View File

@@ -1,5 +1,4 @@
"""Audio docker object."""
import logging
import docker

View File

@@ -1,5 +1,4 @@
"""HA Cli docker object."""
import logging
from ..coresys import CoreSysAttributes

View File

@@ -1,5 +1,4 @@
"""Docker constants."""
from enum import StrEnum
from docker.types import Mount

View File

@@ -1,5 +1,4 @@
"""DNS docker object."""
import logging
from docker.types import Mount

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Docker object."""
from collections.abc import Awaitable
from ipaddress import IPv4Address
import logging

View File

@@ -1,5 +1,4 @@
"""Interface class for Supervisor Docker object."""
from __future__ import annotations
from collections import defaultdict
@@ -429,17 +428,15 @@ class DockerInterface(JobGroup):
limit=JobExecutionLimit.GROUP_ONCE,
on_condition=DockerJobError,
)
async def remove(self, *, remove_image: bool = True) -> None:
async def remove(self) -> None:
"""Remove Docker images."""
# Cleanup container
with suppress(DockerError):
await self.stop()
if remove_image:
await self.sys_run_in_executor(
self.sys_docker.remove_image, self.image, self.version
)
await self.sys_run_in_executor(
self.sys_docker.remove_image, self.image, self.version
)
self._meta = None
@Job(
@@ -512,14 +509,14 @@ class DockerInterface(JobGroup):
return b""
@Job(name="docker_interface_cleanup", limit=JobExecutionLimit.GROUP_WAIT)
async def cleanup(
def cleanup(
self,
old_image: str | None = None,
image: str | None = None,
version: AwesomeVersion | None = None,
) -> None:
) -> Awaitable[None]:
"""Check if old version exists and cleanup."""
await self.sys_run_in_executor(
return self.sys_run_in_executor(
self.sys_docker.cleanup_old_images,
image or self.image,
version or self.version,

View File

@@ -1,5 +1,4 @@
"""Manager for Supervisor Docker."""
from contextlib import suppress
from ipaddress import IPv4Address
import logging
@@ -548,13 +547,10 @@ class DockerAPI:
current_image: str,
current_version: AwesomeVersion,
old_images: set[str] | None = None,
*,
keep_images: set[str] | None = None,
) -> None:
"""Clean up old versions of an image."""
image = f"{current_image}:{current_version!s}"
try:
keep: set[str] = {self.images.get(image).id}
current: Image = self.images.get(f"{current_image}:{current_version!s}")
except ImageNotFound:
raise DockerNotFound(
f"{current_image} not found for cleanup", _LOGGER.warning
@@ -564,19 +560,6 @@ class DockerAPI:
f"Can't get {current_image} for cleanup", _LOGGER.warning
) from err
if keep_images:
keep_images -= {image}
try:
for image in keep_images:
# If its not found, no need to preserve it from getting removed
with suppress(ImageNotFound):
keep.add(self.images.get(image).id)
except (DockerException, requests.RequestException) as err:
raise DockerError(
f"Failed to get one or more images from {keep} during cleanup",
_LOGGER.warning,
) from err
# Cleanup old and current
image_names = list(
old_images | {current_image} if old_images else {current_image}
@@ -589,7 +572,7 @@ class DockerAPI:
) from err
for image in images_list:
if image.id in keep:
if current.id == image.id:
continue
with suppress(DockerException, requests.RequestException):

View File

@@ -1,5 +1,4 @@
"""HA Cli docker object."""
import logging
from ..coresys import CoreSysAttributes

View File

@@ -1,5 +1,4 @@
"""Internal network manager for Supervisor."""
from contextlib import suppress
from ipaddress import IPv4Address
import logging

View File

@@ -1,5 +1,4 @@
"""Observer docker object."""
import logging
from ..const import DOCKER_NETWORK_MASK

View File

@@ -1,5 +1,4 @@
"""Calc and represent docker stats data."""
from contextlib import suppress

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Docker object."""
from collections.abc import Awaitable
from ipaddress import IPv4Address
import logging

View File

@@ -1,5 +1,4 @@
"""Core Exceptions."""
from collections.abc import Callable
@@ -340,12 +339,6 @@ class APIAddonNotInstalled(APIError):
"""Not installed addon requested at addons API."""
class APIDBMigrationInProgress(APIError):
"""Service is unavailable due to an offline DB migration is in progress."""
status = 503
# Service / Discovery

Some files were not shown because too many files have changed in this diff Show More