mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-12-05 07:28:07 +00:00
Compare commits
15 Commits
refactor-d
...
2025.12.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
382f0e8aef | ||
|
|
3b3db2a9bc | ||
|
|
7895bc9007 | ||
|
|
81b7e54b18 | ||
|
|
d203f20b7f | ||
|
|
fea8159ccf | ||
|
|
aeb8e59da4 | ||
|
|
bee0a4482e | ||
|
|
37cc078144 | ||
|
|
20f993e891 | ||
|
|
d220fa801f | ||
|
|
abeee95eb1 | ||
|
|
50d31202ae | ||
|
|
bac072a985 | ||
|
|
2fc6a7dcab |
@@ -1,6 +1,7 @@
|
||||
# General files
|
||||
.git
|
||||
.github
|
||||
.gitkeep
|
||||
.devcontainer
|
||||
.vscode
|
||||
|
||||
|
||||
85
.github/workflows/builder.yml
vendored
85
.github/workflows/builder.yml
vendored
@@ -53,10 +53,10 @@ jobs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
build_wheels: ${{ steps.requirements.outputs.build_wheels }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -72,20 +72,25 @@ jobs:
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
if: steps.version.outputs.publish == 'false'
|
||||
if: github.event_name != 'release'
|
||||
uses: masesgroup/retrieve-changed-files@491e80760c0e28d36ca6240a27b1ccb8e1402c13 # v3.0.0
|
||||
|
||||
- name: Check if requirements files changed
|
||||
id: requirements
|
||||
run: |
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
# No wheels build necessary for releases
|
||||
if [[ "${{ github.event_name }}" == "release" ]]; then
|
||||
echo "build_wheels=false" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements\.txt|build\.yaml|\.github/workflows/builder\.yml) ]]; then
|
||||
echo "build_wheels=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "build_wheels=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.arch }} supervisor
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
@@ -93,34 +98,66 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
include:
|
||||
- runs-on: ubuntu-24.04
|
||||
- runs-on: ubuntu-24.04-arm
|
||||
arch: aarch64
|
||||
env:
|
||||
WHEELS_ABI: cp313
|
||||
WHEELS_TAG: musllinux_1_2
|
||||
WHEELS_APK_DEPS: "libffi-dev;openssl-dev;yaml-dev"
|
||||
WHEELS_SKIP_BINARY: aiohttp
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Write env-file
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
- name: Write env-file for wheels build
|
||||
if: needs.init.outputs.build_wheels == 'true'
|
||||
run: |
|
||||
(
|
||||
# Fix out of memory issues with rust
|
||||
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
||||
) > .env_file
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@2025.11.0
|
||||
- name: Build and publish wheels
|
||||
if: needs.init.outputs.build_wheels == 'true' && needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
with:
|
||||
abi: cp313
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev"
|
||||
skip-binary: aiohttp
|
||||
abi: ${{ env.WHEELS_ABI }}
|
||||
tag: ${{ env.WHEELS_TAG }}
|
||||
arch: ${{ matrix.arch }}
|
||||
apk: ${{ env.WHEELS_APK_DEPS }}
|
||||
skip-binary: ${{ env.WHEELS_SKIP_BINARY }}
|
||||
env-file: true
|
||||
requirements: "requirements.txt"
|
||||
|
||||
- name: Build local wheels
|
||||
if: needs.init.outputs.build_wheels == 'true' && needs.init.outputs.publish == 'false'
|
||||
uses: home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
with:
|
||||
wheels-host: ""
|
||||
wheels-user: ""
|
||||
wheels-key: ""
|
||||
local-wheels-repo-path: "wheels/"
|
||||
abi: ${{ env.WHEELS_ABI }}
|
||||
tag: ${{ env.WHEELS_TAG }}
|
||||
arch: ${{ matrix.arch }}
|
||||
apk: ${{ env.WHEELS_APK_DEPS }}
|
||||
skip-binary: ${{ env.WHEELS_SKIP_BINARY }}
|
||||
env-file: true
|
||||
requirements: "requirements.txt"
|
||||
|
||||
- name: Upload local wheels artifact
|
||||
if: needs.init.outputs.build_wheels == 'true' && needs.init.outputs.publish == 'false'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: wheels-${{ matrix.arch }}
|
||||
path: wheels
|
||||
retention-days: 1
|
||||
|
||||
- name: Set version
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
@@ -167,6 +204,7 @@ jobs:
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2025.11.0
|
||||
with:
|
||||
image: ${{ matrix.arch }}
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
@@ -181,7 +219,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize git
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
@@ -206,7 +244,14 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Download local wheels artifact
|
||||
if: needs.init.outputs.build_wheels == 'true' && needs.init.outputs.publish == 'false'
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: wheels-amd64
|
||||
path: wheels
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build the Supervisor
|
||||
|
||||
20
.github/workflows/ci.yaml
vendored
20
.github/workflows/ci.yaml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
name: Prepare Python dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
id: python
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
id: python
|
||||
@@ -154,7 +154,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -169,7 +169,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
id: python
|
||||
@@ -213,7 +213,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
id: python
|
||||
@@ -257,7 +257,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
id: python
|
||||
@@ -293,7 +293,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
id: python
|
||||
@@ -339,7 +339,7 @@ jobs:
|
||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
id: python
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs: ["pytest", "prepare"]
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
id: python
|
||||
|
||||
2
.github/workflows/release-drafter.yml
vendored
2
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
2
.github/workflows/sentry.yaml
vendored
2
.github/workflows/sentry.yaml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@128c5058bbbe93c8e02147fe0a9c713f166259a6 # v3.4.0
|
||||
env:
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 30
|
||||
|
||||
4
.github/workflows/update_frontend.yml
vendored
4
.github/workflows/update_frontend.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Get latest frontend release
|
||||
id: latest_frontend_version
|
||||
uses: abatilo/release-info-action@32cb932219f1cee3fc4f4a298fd65ead5d35b661 # v1.3.3
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
if: needs.check-version.outputs.skip != 'true'
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Clear www folder
|
||||
run: |
|
||||
rm -rf supervisor/api/panel/*
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -24,6 +24,9 @@ var/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Local wheels
|
||||
wheels/**/*.whl
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
@@ -102,4 +105,4 @@ ENV/
|
||||
/.dmypy.json
|
||||
|
||||
# Mac
|
||||
.DS_Store
|
||||
.DS_Store
|
||||
|
||||
12
Dockerfile
12
Dockerfile
@@ -32,7 +32,17 @@ RUN \
|
||||
# Install requirements
|
||||
RUN \
|
||||
--mount=type=bind,source=./requirements.txt,target=/usr/src/requirements.txt \
|
||||
uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt
|
||||
--mount=type=bind,source=./wheels,target=/usr/src/wheels \
|
||||
if ls /usr/src/wheels/musllinux/* >/dev/null 2>&1; then \
|
||||
LOCAL_WHEELS=/usr/src/wheels/musllinux; \
|
||||
echo "Using local wheels from: $LOCAL_WHEELS"; \
|
||||
else \
|
||||
LOCAL_WHEELS=; \
|
||||
echo "No local wheels found"; \
|
||||
fi && \
|
||||
uv pip install --compile-bytecode --no-cache --no-build \
|
||||
-r requirements.txt \
|
||||
${LOCAL_WHEELS:+--find-links $LOCAL_WHEELS}
|
||||
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
|
||||
@@ -10,7 +10,7 @@ pytest-timeout==2.4.0
|
||||
pytest==9.0.1
|
||||
ruff==0.14.7
|
||||
time-machine==3.1.0
|
||||
types-docker==7.1.0.20251129
|
||||
types-docker==7.1.0.20251202
|
||||
types-pyyaml==6.0.12.20250915
|
||||
types-requests==2.32.4.20250913
|
||||
urllib3==2.5.0
|
||||
|
||||
@@ -66,13 +66,22 @@ from ..docker.const import ContainerState
|
||||
from ..docker.monitor import DockerContainerStateEvent
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonBackupMetadataInvalidError,
|
||||
AddonBuildFailedUnknownError,
|
||||
AddonConfigurationInvalidError,
|
||||
AddonNotRunningError,
|
||||
AddonNotSupportedError,
|
||||
AddonNotSupportedWriteStdinError,
|
||||
AddonPrePostBackupCommandReturnedError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonUnknownError,
|
||||
BackupRestoreUnknownError,
|
||||
ConfigurationFileError,
|
||||
DockerBuildError,
|
||||
DockerError,
|
||||
HostAppArmorError,
|
||||
StoreAddonNotFoundError,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from ..homeassistant.const import WSEvent
|
||||
@@ -235,7 +244,7 @@ class Addon(AddonModel):
|
||||
await self.instance.check_image(self.version, default_image, self.arch)
|
||||
except DockerError:
|
||||
_LOGGER.info("No %s addon Docker image %s found", self.slug, self.image)
|
||||
with suppress(DockerError):
|
||||
with suppress(DockerError, AddonNotSupportedError):
|
||||
await self.instance.install(self.version, default_image, arch=self.arch)
|
||||
|
||||
self.persist[ATTR_IMAGE] = default_image
|
||||
@@ -718,18 +727,16 @@ class Addon(AddonModel):
|
||||
options = self.schema.validate(self.options)
|
||||
await self.sys_run_in_executor(write_json_file, self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
"Add-on %s has invalid options: %s",
|
||||
self.slug,
|
||||
humanize_error(self.options, ex),
|
||||
)
|
||||
except ConfigurationFileError:
|
||||
raise AddonConfigurationInvalidError(
|
||||
_LOGGER.error,
|
||||
addon=self.slug,
|
||||
validation_error=humanize_error(self.options, ex),
|
||||
) from None
|
||||
except ConfigurationFileError as err:
|
||||
_LOGGER.error("Add-on %s can't write options", self.slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s write options: %s", self.slug, options)
|
||||
return
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
raise AddonConfigurationError()
|
||||
_LOGGER.debug("Add-on %s write options: %s", self.slug, options)
|
||||
|
||||
@Job(
|
||||
name="addon_unload",
|
||||
@@ -772,7 +779,7 @@ class Addon(AddonModel):
|
||||
async def install(self) -> None:
|
||||
"""Install and setup this addon."""
|
||||
if not self.addon_store:
|
||||
raise AddonsError("Missing from store, cannot install!")
|
||||
raise StoreAddonNotFoundError(addon=self.slug)
|
||||
|
||||
await self.sys_addons.data.install(self.addon_store)
|
||||
|
||||
@@ -793,9 +800,17 @@ class Addon(AddonModel):
|
||||
await self.instance.install(
|
||||
self.latest_version, self.addon_store.image, arch=self.arch
|
||||
)
|
||||
except DockerError as err:
|
||||
except AddonsError:
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
raise AddonsError() from err
|
||||
raise
|
||||
except DockerBuildError as err:
|
||||
_LOGGER.error("Could not build image for addon %s: %s", self.slug, err)
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
raise AddonBuildFailedUnknownError(addon=self.slug) from err
|
||||
except DockerError as err:
|
||||
_LOGGER.error("Could not pull image to update addon %s: %s", self.slug, err)
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
# Finish initialization and set up listeners
|
||||
await self.load()
|
||||
@@ -819,7 +834,8 @@ class Addon(AddonModel):
|
||||
try:
|
||||
await self.instance.remove(remove_image=remove_image)
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
_LOGGER.error("Could not remove image for addon %s: %s", self.slug, err)
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
self.state = AddonState.UNKNOWN
|
||||
|
||||
@@ -884,7 +900,7 @@ class Addon(AddonModel):
|
||||
if it was running. Else nothing is returned.
|
||||
"""
|
||||
if not self.addon_store:
|
||||
raise AddonsError("Missing from store, cannot update!")
|
||||
raise StoreAddonNotFoundError(addon=self.slug)
|
||||
|
||||
old_image = self.image
|
||||
# Cache data to prevent races with other updates to global
|
||||
@@ -892,8 +908,12 @@ class Addon(AddonModel):
|
||||
|
||||
try:
|
||||
await self.instance.update(store.version, store.image, arch=self.arch)
|
||||
except DockerBuildError as err:
|
||||
_LOGGER.error("Could not build image for addon %s: %s", self.slug, err)
|
||||
raise AddonBuildFailedUnknownError(addon=self.slug) from err
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
_LOGGER.error("Could not pull image to update addon %s: %s", self.slug, err)
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
# Stop the addon if running
|
||||
if (last_state := self.state) in {AddonState.STARTED, AddonState.STARTUP}:
|
||||
@@ -935,12 +955,23 @@ class Addon(AddonModel):
|
||||
"""
|
||||
last_state: AddonState = self.state
|
||||
try:
|
||||
# remove docker container but not addon config
|
||||
# remove docker container and image but not addon config
|
||||
try:
|
||||
await self.instance.remove()
|
||||
await self.instance.install(self.version)
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
_LOGGER.error("Could not remove image for addon %s: %s", self.slug, err)
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
try:
|
||||
await self.instance.install(self.version)
|
||||
except DockerBuildError as err:
|
||||
_LOGGER.error("Could not build image for addon %s: %s", self.slug, err)
|
||||
raise AddonBuildFailedUnknownError(addon=self.slug) from err
|
||||
except DockerError as err:
|
||||
_LOGGER.error(
|
||||
"Could not pull image to update addon %s: %s", self.slug, err
|
||||
)
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
if self.addon_store:
|
||||
await self.sys_addons.data.update(self.addon_store)
|
||||
@@ -1111,8 +1142,9 @@ class Addon(AddonModel):
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerError as err:
|
||||
_LOGGER.error("Could not start container for addon %s: %s", self.slug, err)
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
return self.sys_create_task(self._wait_for_startup())
|
||||
|
||||
@@ -1127,8 +1159,9 @@ class Addon(AddonModel):
|
||||
try:
|
||||
await self.instance.stop()
|
||||
except DockerError as err:
|
||||
_LOGGER.error("Could not stop container for addon %s: %s", self.slug, err)
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
@Job(
|
||||
name="addon_restart",
|
||||
@@ -1161,9 +1194,15 @@ class Addon(AddonModel):
|
||||
async def stats(self) -> DockerStats:
|
||||
"""Return stats of container."""
|
||||
try:
|
||||
if not await self.is_running():
|
||||
raise AddonNotRunningError(_LOGGER.warning, addon=self.slug)
|
||||
|
||||
return await self.instance.stats()
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
_LOGGER.error(
|
||||
"Could not get stats of container for addon %s: %s", self.slug, err
|
||||
)
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
@Job(
|
||||
name="addon_write_stdin",
|
||||
@@ -1173,14 +1212,18 @@ class Addon(AddonModel):
|
||||
async def write_stdin(self, data) -> None:
|
||||
"""Write data to add-on stdin."""
|
||||
if not self.with_stdin:
|
||||
raise AddonNotSupportedError(
|
||||
f"Add-on {self.slug} does not support writing to stdin!", _LOGGER.error
|
||||
)
|
||||
raise AddonNotSupportedWriteStdinError(_LOGGER.error, addon=self.slug)
|
||||
|
||||
try:
|
||||
return await self.instance.write_stdin(data)
|
||||
if not await self.is_running():
|
||||
raise AddonNotRunningError(_LOGGER.warning, addon=self.slug)
|
||||
|
||||
await self.instance.write_stdin(data)
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
_LOGGER.error(
|
||||
"Could not write stdin to container for addon %s: %s", self.slug, err
|
||||
)
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
async def _backup_command(self, command: str) -> None:
|
||||
try:
|
||||
@@ -1189,15 +1232,14 @@ class Addon(AddonModel):
|
||||
_LOGGER.debug(
|
||||
"Pre-/Post backup command failed with: %s", command_return.output
|
||||
)
|
||||
raise AddonsError(
|
||||
f"Pre-/Post backup command returned error code: {command_return.exit_code}",
|
||||
_LOGGER.error,
|
||||
raise AddonPrePostBackupCommandReturnedError(
|
||||
_LOGGER.error, addon=self.slug, exit_code=command_return.exit_code
|
||||
)
|
||||
except DockerError as err:
|
||||
raise AddonsError(
|
||||
f"Failed running pre-/post backup command {command}: {str(err)}",
|
||||
_LOGGER.error,
|
||||
) from err
|
||||
_LOGGER.error(
|
||||
"Failed running pre-/post backup command %s: %s", command, err
|
||||
)
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
@Job(
|
||||
name="addon_begin_backup",
|
||||
@@ -1286,15 +1328,14 @@ class Addon(AddonModel):
|
||||
try:
|
||||
self.instance.export_image(temp_path.joinpath("image.tar"))
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
raise BackupRestoreUnknownError() from err
|
||||
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(temp_path.joinpath("addon.json"), metadata)
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError(
|
||||
f"Can't save meta for {self.slug}", _LOGGER.error
|
||||
) from err
|
||||
_LOGGER.error("Can't save meta for %s: %s", self.slug, err)
|
||||
raise BackupRestoreUnknownError() from err
|
||||
|
||||
# Store AppArmor Profile
|
||||
if apparmor_profile:
|
||||
@@ -1304,9 +1345,7 @@ class Addon(AddonModel):
|
||||
apparmor_profile, profile_backup_file
|
||||
)
|
||||
except HostAppArmorError as err:
|
||||
raise AddonsError(
|
||||
"Can't backup AppArmor profile", _LOGGER.error
|
||||
) from err
|
||||
raise BackupRestoreUnknownError() from err
|
||||
|
||||
# Write tarfile
|
||||
with tar_file as backup:
|
||||
@@ -1360,7 +1399,8 @@ class Addon(AddonModel):
|
||||
)
|
||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||
except (tarfile.TarError, OSError, AddFileError) as err:
|
||||
raise AddonsError(f"Can't write tarfile: {err}", _LOGGER.error) from err
|
||||
_LOGGER.error("Can't write backup tarfile for addon %s: %s", self.slug, err)
|
||||
raise BackupRestoreUnknownError() from err
|
||||
finally:
|
||||
if was_running:
|
||||
wait_for_start = await self.end_backup()
|
||||
@@ -1402,28 +1442,24 @@ class Addon(AddonModel):
|
||||
try:
|
||||
tmp, data = await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
raise AddonsError(
|
||||
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
_LOGGER.error("Can't extract backup tarfile for %s: %s", self.slug, err)
|
||||
raise BackupRestoreUnknownError() from err
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError() from err
|
||||
raise AddonUnknownError(addon=self.slug) from err
|
||||
|
||||
try:
|
||||
# Validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_BACKUP(data)
|
||||
except vol.Invalid as err:
|
||||
raise AddonsError(
|
||||
f"Can't validate {self.slug}, backup data: {humanize_error(data, err)}",
|
||||
raise AddonBackupMetadataInvalidError(
|
||||
_LOGGER.error,
|
||||
addon=self.slug,
|
||||
validation_error=humanize_error(data, err),
|
||||
) from err
|
||||
|
||||
# If available
|
||||
if not self._available(data[ATTR_SYSTEM]):
|
||||
raise AddonNotSupportedError(
|
||||
f"Add-on {self.slug} is not available for this platform",
|
||||
_LOGGER.error,
|
||||
)
|
||||
# Validate availability. Raises if not
|
||||
self._validate_availability(data[ATTR_SYSTEM], logger=_LOGGER.error)
|
||||
|
||||
# Restore local add-on information
|
||||
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||
@@ -1482,9 +1518,10 @@ class Addon(AddonModel):
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
raise AddonsError(
|
||||
f"Can't restore origin data: {err}", _LOGGER.error
|
||||
) from err
|
||||
_LOGGER.error(
|
||||
"Can't restore origin data for %s: %s", self.slug, err
|
||||
)
|
||||
raise BackupRestoreUnknownError() from err
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(tmp.name, "apparmor.txt")
|
||||
@@ -1495,10 +1532,11 @@ class Addon(AddonModel):
|
||||
)
|
||||
except HostAppArmorError as err:
|
||||
_LOGGER.error(
|
||||
"Can't restore AppArmor profile for add-on %s",
|
||||
"Can't restore AppArmor profile for add-on %s: %s",
|
||||
self.slug,
|
||||
err,
|
||||
)
|
||||
raise AddonsError() from err
|
||||
raise BackupRestoreUnknownError() from err
|
||||
|
||||
finally:
|
||||
# Is add-on loaded
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import base64
|
||||
from functools import cached_property
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -23,15 +24,22 @@ from ..const import (
|
||||
CpuArch,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..docker.const import DOCKER_HUB
|
||||
from ..docker.const import DOCKER_HUB, DOCKER_HUB_LEGACY
|
||||
from ..docker.interface import MAP_ARCH
|
||||
from ..exceptions import ConfigurationFileError, HassioArchNotFound
|
||||
from ..exceptions import (
|
||||
AddonBuildArchitectureNotSupportedError,
|
||||
AddonBuildDockerfileMissingError,
|
||||
ConfigurationFileError,
|
||||
HassioArchNotFound,
|
||||
)
|
||||
from ..utils.common import FileConfiguration, find_one_filetype
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import AnyAddon
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
"""Handle build options for add-ons."""
|
||||
@@ -112,7 +120,7 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
async def is_valid(self) -> bool:
|
||||
async def is_valid(self) -> None:
|
||||
"""Return true if the build env is valid."""
|
||||
|
||||
def build_is_valid() -> bool:
|
||||
@@ -124,9 +132,17 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
)
|
||||
|
||||
try:
|
||||
return await self.sys_run_in_executor(build_is_valid)
|
||||
if not await self.sys_run_in_executor(build_is_valid):
|
||||
raise AddonBuildDockerfileMissingError(
|
||||
_LOGGER.error, addon=self.addon.slug
|
||||
)
|
||||
except HassioArchNotFound:
|
||||
return False
|
||||
raise AddonBuildArchitectureNotSupportedError(
|
||||
_LOGGER.error,
|
||||
addon=self.addon.slug,
|
||||
addon_arch_list=self.addon.supported_arch,
|
||||
system_arch_list=[arch.value for arch in self.sys_arch.supported],
|
||||
) from None
|
||||
|
||||
def get_docker_config_json(self) -> str | None:
|
||||
"""Generate Docker config.json content with registry credentials for base image.
|
||||
@@ -155,8 +171,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
|
||||
# Use the actual registry URL for the key
|
||||
# Docker Hub uses "https://index.docker.io/v1/" as the key
|
||||
# Support both docker.io (official) and hub.docker.com (legacy)
|
||||
registry_key = (
|
||||
"https://index.docker.io/v1/" if registry == DOCKER_HUB else registry
|
||||
"https://index.docker.io/v1/"
|
||||
if registry in (DOCKER_HUB, DOCKER_HUB_LEGACY)
|
||||
else registry
|
||||
)
|
||||
|
||||
config = {"auths": {registry_key: {"auth": auth_string}}}
|
||||
|
||||
@@ -316,12 +316,12 @@ class AddonModel(JobGroup, ABC):
|
||||
|
||||
@property
|
||||
def panel_title(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
"""Return panel title for Ingress frame."""
|
||||
return self.data.get(ATTR_PANEL_TITLE, self.name)
|
||||
|
||||
@property
|
||||
def panel_admin(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
def panel_admin(self) -> bool:
|
||||
"""Return if panel is only available for admin users."""
|
||||
return self.data[ATTR_PANEL_ADMIN]
|
||||
|
||||
@property
|
||||
@@ -489,7 +489,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return self.data[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def with_tmpfs(self) -> str | None:
|
||||
def with_tmpfs(self) -> bool:
|
||||
"""Return if tmp is in memory of add-on."""
|
||||
return self.data[ATTR_TMPFS]
|
||||
|
||||
@@ -509,7 +509,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return self.data[ATTR_VIDEO]
|
||||
|
||||
@property
|
||||
def homeassistant_version(self) -> str | None:
|
||||
def homeassistant_version(self) -> AwesomeVersion | None:
|
||||
"""Return min Home Assistant version they needed by Add-on."""
|
||||
return self.data.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
|
||||
@@ -100,6 +100,9 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import (
|
||||
AddonBootConfigCannotChangeError,
|
||||
AddonConfigurationInvalidError,
|
||||
AddonNotSupportedWriteStdinError,
|
||||
APIAddonNotInstalled,
|
||||
APIError,
|
||||
APIForbidden,
|
||||
@@ -125,6 +128,7 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_OPTIONS): vol.Maybe(dict),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -300,19 +304,20 @@ class APIAddons(CoreSysAttributes):
|
||||
# Update secrets for validation
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
|
||||
# Extend schema with add-on specific validation
|
||||
addon_schema = SCHEMA_OPTIONS.extend(
|
||||
{vol.Optional(ATTR_OPTIONS): vol.Maybe(addon.schema)}
|
||||
)
|
||||
|
||||
# Validate/Process Body
|
||||
body = await api_validate(addon_schema, request)
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
if ATTR_OPTIONS in body:
|
||||
addon.options = body[ATTR_OPTIONS]
|
||||
try:
|
||||
addon.options = addon.schema(body[ATTR_OPTIONS])
|
||||
except vol.Invalid as ex:
|
||||
raise AddonConfigurationInvalidError(
|
||||
addon=addon.slug,
|
||||
validation_error=humanize_error(body[ATTR_OPTIONS], ex),
|
||||
) from None
|
||||
if ATTR_BOOT in body:
|
||||
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
|
||||
raise APIError(
|
||||
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
|
||||
raise AddonBootConfigCannotChangeError(
|
||||
addon=addon.slug, boot_config=addon.boot_config.value
|
||||
)
|
||||
addon.boot = body[ATTR_BOOT]
|
||||
if ATTR_AUTO_UPDATE in body:
|
||||
@@ -385,7 +390,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options_config(self, request: web.Request) -> None:
|
||||
async def options_config(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Validate user options for add-on."""
|
||||
slug: str = request.match_info["addon"]
|
||||
if slug != "self":
|
||||
@@ -430,11 +435,11 @@ class APIAddons(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
async def uninstall(self, request: web.Request) -> None:
|
||||
"""Uninstall add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
|
||||
return await asyncio.shield(
|
||||
await asyncio.shield(
|
||||
self.sys_addons.uninstall(
|
||||
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
|
||||
)
|
||||
@@ -476,7 +481,7 @@ class APIAddons(CoreSysAttributes):
|
||||
"""Write to stdin of add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
||||
raise AddonNotSupportedWriteStdinError(_LOGGER.error, addon=addon.slug)
|
||||
|
||||
data = await request.read()
|
||||
await asyncio.shield(addon.write_stdin(data))
|
||||
|
||||
@@ -15,7 +15,7 @@ import voluptuous as vol
|
||||
from ..addons.addon import Addon
|
||||
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
from ..exceptions import APIForbidden, AuthInvalidNonStringValueError
|
||||
from .const import (
|
||||
ATTR_GROUP_IDS,
|
||||
ATTR_IS_ACTIVE,
|
||||
@@ -69,7 +69,9 @@ class APIAuth(CoreSysAttributes):
|
||||
try:
|
||||
_ = username.encode and password.encode # type: ignore
|
||||
except AttributeError:
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER) from None
|
||||
raise AuthInvalidNonStringValueError(
|
||||
_LOGGER.error, headers=REALM_HEADER
|
||||
) from None
|
||||
|
||||
return self.sys_auth.check_login(
|
||||
addon, cast(str, username), cast(str, password)
|
||||
|
||||
@@ -211,7 +211,7 @@ class APIBackups(CoreSysAttributes):
|
||||
await self.sys_backups.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload(self, _):
|
||||
async def reload(self, _: web.Request) -> bool:
|
||||
"""Reload backup list."""
|
||||
await asyncio.shield(self.sys_backups.reload())
|
||||
return True
|
||||
@@ -421,7 +421,7 @@ class APIBackups(CoreSysAttributes):
|
||||
await self.sys_backups.remove(backup, locations=locations)
|
||||
|
||||
@api_process
|
||||
async def download(self, request: web.Request):
|
||||
async def download(self, request: web.Request) -> web.StreamResponse:
|
||||
"""Download a backup file."""
|
||||
backup = self._extract_slug(request)
|
||||
# Query will give us '' for /backups, convert value to None
|
||||
@@ -451,7 +451,7 @@ class APIBackups(CoreSysAttributes):
|
||||
return response
|
||||
|
||||
@api_process
|
||||
async def upload(self, request: web.Request):
|
||||
async def upload(self, request: web.Request) -> dict[str, str] | bool:
|
||||
"""Upload a backup file."""
|
||||
location: LOCATION_TYPE = None
|
||||
locations: list[LOCATION_TYPE] | None = None
|
||||
|
||||
@@ -55,7 +55,7 @@ class APIDocker(CoreSysAttributes):
|
||||
"""Handle RESTful API for Docker configuration."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request):
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get docker info."""
|
||||
data_registries = {}
|
||||
for hostname, registry in self.sys_docker.config.registries.items():
|
||||
@@ -113,7 +113,7 @@ class APIDocker(CoreSysAttributes):
|
||||
return {ATTR_REGISTRIES: data_registries}
|
||||
|
||||
@api_process
|
||||
async def create_registry(self, request: web.Request):
|
||||
async def create_registry(self, request: web.Request) -> None:
|
||||
"""Create a new docker registry."""
|
||||
body = await api_validate(SCHEMA_DOCKER_REGISTRY, request)
|
||||
|
||||
@@ -123,7 +123,7 @@ class APIDocker(CoreSysAttributes):
|
||||
await self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def remove_registry(self, request: web.Request):
|
||||
async def remove_registry(self, request: web.Request) -> None:
|
||||
"""Delete a docker registry."""
|
||||
hostname = request.match_info.get(ATTR_HOSTNAME)
|
||||
if hostname not in self.sys_docker.config.registries:
|
||||
|
||||
@@ -154,7 +154,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
await self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[Any, str]:
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_homeassistant.core.stats()
|
||||
if not stats:
|
||||
@@ -191,7 +191,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
return await update_task
|
||||
|
||||
@api_process
|
||||
async def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
async def stop(self, request: web.Request) -> None:
|
||||
"""Stop Home Assistant."""
|
||||
body = await api_validate(SCHEMA_STOP, request)
|
||||
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Init file for Supervisor host RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from contextlib import suppress
|
||||
import json
|
||||
import logging
|
||||
@@ -99,7 +100,7 @@ class APIHost(CoreSysAttributes):
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
||||
@@ -128,7 +129,7 @@ class APIHost(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Edit host settings."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
@@ -139,7 +140,7 @@ class APIHost(CoreSysAttributes):
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def reboot(self, request):
|
||||
async def reboot(self, request: web.Request) -> None:
|
||||
"""Reboot host."""
|
||||
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
||||
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
||||
@@ -147,7 +148,7 @@ class APIHost(CoreSysAttributes):
|
||||
return await asyncio.shield(self.sys_host.control.reboot())
|
||||
|
||||
@api_process
|
||||
async def shutdown(self, request):
|
||||
async def shutdown(self, request: web.Request) -> None:
|
||||
"""Poweroff host."""
|
||||
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
||||
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
||||
@@ -155,12 +156,12 @@ class APIHost(CoreSysAttributes):
|
||||
return await asyncio.shield(self.sys_host.control.shutdown())
|
||||
|
||||
@api_process
|
||||
def reload(self, request):
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload host data."""
|
||||
return asyncio.shield(self.sys_host.reload())
|
||||
|
||||
@api_process
|
||||
async def services(self, request):
|
||||
async def services(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return list of available services."""
|
||||
services = []
|
||||
for unit in self.sys_host.services:
|
||||
@@ -175,7 +176,7 @@ class APIHost(CoreSysAttributes):
|
||||
return {ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def list_boots(self, _: web.Request):
|
||||
async def list_boots(self, _: web.Request) -> dict[str, Any]:
|
||||
"""Return a list of boot IDs."""
|
||||
boot_ids = await self.sys_host.logs.get_boot_ids()
|
||||
return {
|
||||
@@ -186,7 +187,7 @@ class APIHost(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def list_identifiers(self, _: web.Request):
|
||||
async def list_identifiers(self, _: web.Request) -> dict[str, list[str]]:
|
||||
"""Return a list of syslog identifiers."""
|
||||
return {ATTR_IDENTIFIERS: await self.sys_host.logs.get_identifiers()}
|
||||
|
||||
@@ -332,7 +333,7 @@ class APIHost(CoreSysAttributes):
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def disk_usage(self, request: web.Request) -> dict:
|
||||
async def disk_usage(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return a breakdown of storage usage for the system."""
|
||||
|
||||
max_depth = request.query.get(ATTR_MAX_DEPTH, 1)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
"""Handle security part of this API."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Final
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Request, Response, middleware
|
||||
from aiohttp.web import Request, StreamResponse, middleware
|
||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@@ -89,7 +89,7 @@ CORE_ONLY_PATHS: Final = re.compile(
|
||||
)
|
||||
|
||||
# Policy role add-on API access
|
||||
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
||||
ADDONS_ROLE_ACCESS: dict[str, re.Pattern[str]] = {
|
||||
ROLE_DEFAULT: re.compile(
|
||||
r"^(?:"
|
||||
r"|/.+/info"
|
||||
@@ -180,7 +180,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return unquoted
|
||||
|
||||
@middleware
|
||||
async def block_bad_requests(self, request: Request, handler: Callable) -> Response:
|
||||
async def block_bad_requests(
|
||||
self, request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
|
||||
) -> StreamResponse:
|
||||
"""Process request and tblock commonly known exploit attempts."""
|
||||
if FILTERS.search(self._recursive_unquote(request.path)):
|
||||
_LOGGER.warning(
|
||||
@@ -198,7 +200,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def system_validation(self, request: Request, handler: Callable) -> Response:
|
||||
async def system_validation(
|
||||
self, request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
|
||||
) -> StreamResponse:
|
||||
"""Check if core is ready to response."""
|
||||
if self.sys_core.state not in VALID_API_STATES:
|
||||
return api_return_error(
|
||||
@@ -208,7 +212,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def token_validation(self, request: Request, handler: Callable) -> Response:
|
||||
async def token_validation(
|
||||
self, request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
|
||||
) -> StreamResponse:
|
||||
"""Check security access of this layer."""
|
||||
request_from: CoreSysAttributes | None = None
|
||||
supervisor_token = extract_supervisor_token(request)
|
||||
@@ -279,7 +285,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
raise HTTPForbidden()
|
||||
|
||||
@middleware
|
||||
async def core_proxy(self, request: Request, handler: Callable) -> Response:
|
||||
async def core_proxy(
|
||||
self, request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
|
||||
) -> StreamResponse:
|
||||
"""Validate user from Core API proxy."""
|
||||
if (
|
||||
request[REQUEST_FROM] != self.sys_homeassistant
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_AVAILABLE,
|
||||
ATTR_PROVIDERS,
|
||||
@@ -25,7 +29,7 @@ class APIServices(CoreSysAttributes):
|
||||
return service
|
||||
|
||||
@api_process
|
||||
async def list_services(self, request):
|
||||
async def list_services(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Show register services."""
|
||||
services = []
|
||||
for service in self.sys_services.list_services:
|
||||
@@ -40,7 +44,7 @@ class APIServices(CoreSysAttributes):
|
||||
return {ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_service(self, request):
|
||||
async def set_service(self, request: web.Request) -> None:
|
||||
"""Write data into a service."""
|
||||
service = self._extract_service(request)
|
||||
body = await api_validate(service.schema, request)
|
||||
@@ -50,7 +54,7 @@ class APIServices(CoreSysAttributes):
|
||||
await service.set_service_data(addon, body)
|
||||
|
||||
@api_process
|
||||
async def get_service(self, request):
|
||||
async def get_service(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Read data into a service."""
|
||||
service = self._extract_service(request)
|
||||
|
||||
@@ -62,7 +66,7 @@ class APIServices(CoreSysAttributes):
|
||||
return service.get_service_data()
|
||||
|
||||
@api_process
|
||||
async def del_service(self, request):
|
||||
async def del_service(self, request: web.Request) -> None:
|
||||
"""Delete data into a service."""
|
||||
service = self._extract_service(request)
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
@@ -53,7 +53,7 @@ from ..const import (
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden, APINotFound
|
||||
from ..exceptions import APIError, APIForbidden, APINotFound, StoreAddonNotFoundError
|
||||
from ..store.addon import AddonStore
|
||||
from ..store.repository import Repository
|
||||
from ..store.validate import validate_repository
|
||||
@@ -104,7 +104,7 @@ class APIStore(CoreSysAttributes):
|
||||
addon_slug: str = request.match_info["addon"]
|
||||
|
||||
if not (addon := self.sys_addons.get(addon_slug)):
|
||||
raise APINotFound(f"Addon {addon_slug} does not exist")
|
||||
raise StoreAddonNotFoundError(addon=addon_slug)
|
||||
|
||||
if installed and not addon.is_installed:
|
||||
raise APIError(f"Addon {addon_slug} is not installed")
|
||||
@@ -112,7 +112,7 @@ class APIStore(CoreSysAttributes):
|
||||
if not installed and addon.is_installed:
|
||||
addon = cast(Addon, addon)
|
||||
if not addon.addon_store:
|
||||
raise APINotFound(f"Addon {addon_slug} does not exist in the store")
|
||||
raise StoreAddonNotFoundError(addon=addon_slug)
|
||||
return addon.addon_store
|
||||
|
||||
return addon
|
||||
@@ -349,13 +349,13 @@ class APIStore(CoreSysAttributes):
|
||||
return self._generate_repository_information(repository)
|
||||
|
||||
@api_process
|
||||
async def add_repository(self, request: web.Request):
|
||||
async def add_repository(self, request: web.Request) -> None:
|
||||
"""Add repository to the store."""
|
||||
body = await api_validate(SCHEMA_ADD_REPOSITORY, request)
|
||||
await asyncio.shield(self.sys_store.add_repository(body[ATTR_REPOSITORY]))
|
||||
|
||||
@api_process
|
||||
async def remove_repository(self, request: web.Request):
|
||||
async def remove_repository(self, request: web.Request) -> None:
|
||||
"""Remove repository from the store."""
|
||||
repository: Repository = self._extract_repository(request)
|
||||
await asyncio.shield(self.sys_store.remove_repository(repository))
|
||||
|
||||
@@ -80,7 +80,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
"""Handle RESTful API for Supervisor functions."""
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
async def ping(self, request: web.Request) -> bool:
|
||||
"""Return ok for signal that the API is ready."""
|
||||
return True
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Init file for Supervisor util for RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Mapping
|
||||
import json
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -26,7 +26,7 @@ from ..const import (
|
||||
RESULT_OK,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import APIError, BackupFileNotFoundError, DockerAPIError, HassioError
|
||||
from ..exceptions import APIError, DockerAPIError, HassioError
|
||||
from ..jobs import JobSchedulerOptions, SupervisorJob
|
||||
from ..utils import check_exception_chain, get_message_from_exception_chain
|
||||
from ..utils.json import json_dumps, json_loads as json_loads_util
|
||||
@@ -67,10 +67,10 @@ def api_process(method):
|
||||
"""Return API information."""
|
||||
try:
|
||||
answer = await method(*args, **kwargs)
|
||||
except BackupFileNotFoundError as err:
|
||||
return api_return_error(err, status=404)
|
||||
except APIError as err:
|
||||
return api_return_error(err, status=err.status, job_id=err.job_id)
|
||||
return api_return_error(
|
||||
err, status=err.status, job_id=err.job_id, headers=err.headers
|
||||
)
|
||||
except HassioError as err:
|
||||
return api_return_error(err)
|
||||
|
||||
@@ -139,6 +139,7 @@ def api_return_error(
|
||||
error_type: str | None = None,
|
||||
status: int = 400,
|
||||
*,
|
||||
headers: Mapping[str, str] | None = None,
|
||||
job_id: str | None = None,
|
||||
) -> web.Response:
|
||||
"""Return an API error message."""
|
||||
@@ -151,10 +152,15 @@ def api_return_error(
|
||||
|
||||
match error_type:
|
||||
case const.CONTENT_TYPE_TEXT:
|
||||
return web.Response(body=message, content_type=error_type, status=status)
|
||||
return web.Response(
|
||||
body=message, content_type=error_type, status=status, headers=headers
|
||||
)
|
||||
case const.CONTENT_TYPE_BINARY:
|
||||
return web.Response(
|
||||
body=message.encode(), content_type=error_type, status=status
|
||||
body=message.encode(),
|
||||
content_type=error_type,
|
||||
status=status,
|
||||
headers=headers,
|
||||
)
|
||||
case _:
|
||||
result: dict[str, Any] = {
|
||||
@@ -172,6 +178,7 @@ def api_return_error(
|
||||
result,
|
||||
status=status,
|
||||
dumps=json_dumps,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -9,8 +9,10 @@ from .addons.addon import Addon
|
||||
from .const import ATTR_PASSWORD, ATTR_TYPE, ATTR_USERNAME, FILE_HASSIO_AUTH
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .exceptions import (
|
||||
AuthError,
|
||||
AuthHomeAssistantAPIValidationError,
|
||||
AuthInvalidNonStringValueError,
|
||||
AuthListUsersError,
|
||||
AuthListUsersNoneResponseError,
|
||||
AuthPasswordResetError,
|
||||
HomeAssistantAPIError,
|
||||
HomeAssistantWSError,
|
||||
@@ -83,10 +85,8 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
self, addon: Addon, username: str | None, password: str | None
|
||||
) -> bool:
|
||||
"""Check username login."""
|
||||
if password is None:
|
||||
raise AuthError("None as password is not supported!", _LOGGER.error)
|
||||
if username is None:
|
||||
raise AuthError("None as username is not supported!", _LOGGER.error)
|
||||
if username is None or password is None:
|
||||
raise AuthInvalidNonStringValueError(_LOGGER.error)
|
||||
|
||||
_LOGGER.info("Auth request from '%s' for '%s'", addon.slug, username)
|
||||
|
||||
@@ -137,7 +137,7 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
finally:
|
||||
self._running.pop(username, None)
|
||||
|
||||
raise AuthError()
|
||||
raise AuthHomeAssistantAPIValidationError()
|
||||
|
||||
async def change_password(self, username: str, password: str) -> None:
|
||||
"""Change user password login."""
|
||||
@@ -155,7 +155,7 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
except HomeAssistantAPIError as err:
|
||||
_LOGGER.error("Can't request password reset on Home Assistant: %s", err)
|
||||
|
||||
raise AuthPasswordResetError()
|
||||
raise AuthPasswordResetError(user=username)
|
||||
|
||||
async def list_users(self) -> list[dict[str, Any]]:
|
||||
"""List users on the Home Assistant instance."""
|
||||
@@ -166,15 +166,12 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
{ATTR_TYPE: "config/auth/list"}
|
||||
)
|
||||
except HomeAssistantWSError as err:
|
||||
raise AuthListUsersError(
|
||||
f"Can't request listing users on Home Assistant: {err}", _LOGGER.error
|
||||
) from err
|
||||
_LOGGER.error("Can't request listing users on Home Assistant: %s", err)
|
||||
raise AuthListUsersError() from err
|
||||
|
||||
if users is not None:
|
||||
return users
|
||||
raise AuthListUsersError(
|
||||
"Can't request listing users on Home Assistant!", _LOGGER.error
|
||||
)
|
||||
raise AuthListUsersNoneResponseError(_LOGGER.error)
|
||||
|
||||
@staticmethod
|
||||
def _rehash(value: str, salt2: str = "") -> str:
|
||||
|
||||
@@ -628,9 +628,6 @@ class Backup(JobGroup):
|
||||
if start_task := await self._addon_save(addon):
|
||||
start_tasks.append(start_task)
|
||||
except BackupError as err:
|
||||
err = BackupError(
|
||||
f"Can't backup add-on {addon.slug}: {str(err)}", _LOGGER.error
|
||||
)
|
||||
self.sys_jobs.current.capture_error(err)
|
||||
|
||||
return start_tasks
|
||||
|
||||
@@ -250,7 +250,7 @@ class ConnectionType(StrEnum):
|
||||
WIRELESS = "802-11-wireless"
|
||||
|
||||
|
||||
class ConnectionStateType(IntEnum):
|
||||
class ConnectionState(IntEnum):
|
||||
"""Connection states.
|
||||
|
||||
https://networkmanager.dev/docs/api/latest/nm-dbus-types.html#NMActiveConnectionState
|
||||
|
||||
@@ -90,8 +90,8 @@ class Ip4Properties(IpProperties):
|
||||
class Ip6Properties(IpProperties):
|
||||
"""IPv6 properties object for Network Manager."""
|
||||
|
||||
addr_gen_mode: int
|
||||
ip6_privacy: int
|
||||
addr_gen_mode: int | None
|
||||
ip6_privacy: int | None
|
||||
dns: list[bytes] | None
|
||||
|
||||
|
||||
|
||||
@@ -16,8 +16,8 @@ from ..const import (
|
||||
DBUS_IFACE_CONNECTION_ACTIVE,
|
||||
DBUS_NAME_NM,
|
||||
DBUS_OBJECT_BASE,
|
||||
ConnectionState,
|
||||
ConnectionStateFlags,
|
||||
ConnectionStateType,
|
||||
)
|
||||
from ..interface import DBusInterfaceProxy, dbus_property
|
||||
from ..utils import dbus_connected
|
||||
@@ -67,9 +67,9 @@ class NetworkConnection(DBusInterfaceProxy):
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
def state(self) -> ConnectionStateType:
|
||||
def state(self) -> ConnectionState:
|
||||
"""Return the state of the connection."""
|
||||
return ConnectionStateType(self.properties[DBUS_ATTR_STATE])
|
||||
return ConnectionState(self.properties[DBUS_ATTR_STATE])
|
||||
|
||||
@property
|
||||
def state_flags(self) -> set[ConnectionStateFlags]:
|
||||
|
||||
@@ -16,7 +16,11 @@ from ....host.const import (
|
||||
InterfaceType,
|
||||
MulticastDnsMode,
|
||||
)
|
||||
from ...const import MulticastDnsValue
|
||||
from ...const import (
|
||||
InterfaceAddrGenMode as NMInterfaceAddrGenMode,
|
||||
InterfaceIp6Privacy as NMInterfaceIp6Privacy,
|
||||
MulticastDnsValue,
|
||||
)
|
||||
from .. import NetworkManager
|
||||
from . import (
|
||||
CONF_ATTR_802_ETHERNET,
|
||||
@@ -118,24 +122,41 @@ def _get_ipv6_connection_settings(
|
||||
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "auto")
|
||||
if ipv6setting:
|
||||
if ipv6setting.addr_gen_mode == InterfaceAddrGenMode.EUI64:
|
||||
ipv6[CONF_ATTR_IPV6_ADDR_GEN_MODE] = Variant("i", 0)
|
||||
ipv6[CONF_ATTR_IPV6_ADDR_GEN_MODE] = Variant(
|
||||
"i", NMInterfaceAddrGenMode.EUI64.value
|
||||
)
|
||||
elif (
|
||||
not support_addr_gen_mode_defaults
|
||||
or ipv6setting.addr_gen_mode == InterfaceAddrGenMode.STABLE_PRIVACY
|
||||
):
|
||||
ipv6[CONF_ATTR_IPV6_ADDR_GEN_MODE] = Variant("i", 1)
|
||||
ipv6[CONF_ATTR_IPV6_ADDR_GEN_MODE] = Variant(
|
||||
"i", NMInterfaceAddrGenMode.STABLE_PRIVACY.value
|
||||
)
|
||||
elif ipv6setting.addr_gen_mode == InterfaceAddrGenMode.DEFAULT_OR_EUI64:
|
||||
ipv6[CONF_ATTR_IPV6_ADDR_GEN_MODE] = Variant("i", 2)
|
||||
ipv6[CONF_ATTR_IPV6_ADDR_GEN_MODE] = Variant(
|
||||
"i", NMInterfaceAddrGenMode.DEFAULT_OR_EUI64.value
|
||||
)
|
||||
else:
|
||||
ipv6[CONF_ATTR_IPV6_ADDR_GEN_MODE] = Variant("i", 3)
|
||||
ipv6[CONF_ATTR_IPV6_ADDR_GEN_MODE] = Variant(
|
||||
"i", NMInterfaceAddrGenMode.DEFAULT.value
|
||||
)
|
||||
|
||||
if ipv6setting.ip6_privacy == InterfaceIp6Privacy.DISABLED:
|
||||
ipv6[CONF_ATTR_IPV6_PRIVACY] = Variant("i", 0)
|
||||
ipv6[CONF_ATTR_IPV6_PRIVACY] = Variant(
|
||||
"i", NMInterfaceIp6Privacy.DISABLED.value
|
||||
)
|
||||
elif ipv6setting.ip6_privacy == InterfaceIp6Privacy.ENABLED_PREFER_PUBLIC:
|
||||
ipv6[CONF_ATTR_IPV6_PRIVACY] = Variant("i", 1)
|
||||
ipv6[CONF_ATTR_IPV6_PRIVACY] = Variant(
|
||||
"i", NMInterfaceIp6Privacy.ENABLED_PREFER_PUBLIC.value
|
||||
)
|
||||
elif ipv6setting.ip6_privacy == InterfaceIp6Privacy.ENABLED:
|
||||
ipv6[CONF_ATTR_IPV6_PRIVACY] = Variant("i", 2)
|
||||
ipv6[CONF_ATTR_IPV6_PRIVACY] = Variant(
|
||||
"i", NMInterfaceIp6Privacy.ENABLED.value
|
||||
)
|
||||
else:
|
||||
ipv6[CONF_ATTR_IPV6_PRIVACY] = Variant("i", -1)
|
||||
ipv6[CONF_ATTR_IPV6_PRIVACY] = Variant(
|
||||
"i", NMInterfaceIp6Privacy.DEFAULT.value
|
||||
)
|
||||
elif ipv6setting.method == InterfaceMethod.DISABLED:
|
||||
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "link-local")
|
||||
elif ipv6setting.method == InterfaceMethod.STATIC:
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable
|
||||
from contextlib import suppress
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
@@ -35,6 +36,7 @@ from ..coresys import CoreSys
|
||||
from ..exceptions import (
|
||||
CoreDNSError,
|
||||
DBusError,
|
||||
DockerBuildError,
|
||||
DockerError,
|
||||
DockerJobError,
|
||||
DockerNotFound,
|
||||
@@ -682,13 +684,12 @@ class DockerAddon(DockerInterface):
|
||||
async def _build(self, version: AwesomeVersion, image: str | None = None) -> None:
|
||||
"""Build a Docker container."""
|
||||
build_env = await AddonBuild(self.coresys, self.addon).load_config()
|
||||
if not await build_env.is_valid():
|
||||
_LOGGER.error("Invalid build environment, can't build this add-on!")
|
||||
raise DockerError()
|
||||
# Check if the build environment is valid, raises if not
|
||||
await build_env.is_valid()
|
||||
|
||||
_LOGGER.info("Starting build for %s:%s", self.image, version)
|
||||
|
||||
def build_image():
|
||||
def build_image() -> tuple[str, str]:
|
||||
if build_env.squash:
|
||||
_LOGGER.warning(
|
||||
"Ignoring squash build option for %s as Docker BuildKit does not support it.",
|
||||
@@ -761,8 +762,9 @@ class DockerAddon(DockerInterface):
|
||||
requests.RequestException,
|
||||
aiodocker.DockerError,
|
||||
) as err:
|
||||
_LOGGER.error("Can't build %s:%s: %s", self.image, version, err)
|
||||
raise DockerError() from err
|
||||
raise DockerBuildError(
|
||||
f"Can't build {self.image}:{version}: {err!s}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
_LOGGER.info("Build %s:%s done", self.image, version)
|
||||
|
||||
@@ -820,12 +822,9 @@ class DockerAddon(DockerInterface):
|
||||
on_condition=DockerJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def write_stdin(self, data: bytes) -> None:
|
||||
def write_stdin(self, data: bytes) -> Awaitable[None]:
|
||||
"""Write to add-on stdin."""
|
||||
if not await self.is_running():
|
||||
raise DockerError()
|
||||
|
||||
await self.sys_run_in_executor(self._write_stdin, data)
|
||||
return self.sys_run_in_executor(self._write_stdin, data)
|
||||
|
||||
def _write_stdin(self, data: bytes) -> None:
|
||||
"""Write to add-on stdin.
|
||||
|
||||
@@ -15,11 +15,12 @@ from ..const import MACHINE_ID
|
||||
|
||||
RE_RETRYING_DOWNLOAD_STATUS = re.compile(r"Retrying in \d+ seconds?")
|
||||
|
||||
# Docker Hub registry identifier
|
||||
DOCKER_HUB = "hub.docker.com"
|
||||
# Docker Hub registry identifier (official default)
|
||||
# Docker's default registry is docker.io
|
||||
DOCKER_HUB = "docker.io"
|
||||
|
||||
# Regex to match images with a registry host (e.g., ghcr.io/org/image)
|
||||
IMAGE_WITH_HOST = re.compile(r"^((?:[a-z0-9]+(?:-[a-z0-9]+)*\.)+[a-z]{2,})\/.+")
|
||||
# Legacy Docker Hub identifier for backward compatibility
|
||||
DOCKER_HUB_LEGACY = "hub.docker.com"
|
||||
|
||||
|
||||
class Capabilities(StrEnum):
|
||||
|
||||
@@ -45,7 +45,13 @@ from ..jobs.decorator import Job
|
||||
from ..jobs.job_group import JobGroup
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from .const import DOCKER_HUB, ContainerState, PullImageLayerStage, RestartPolicy
|
||||
from .const import (
|
||||
DOCKER_HUB,
|
||||
DOCKER_HUB_LEGACY,
|
||||
ContainerState,
|
||||
PullImageLayerStage,
|
||||
RestartPolicy,
|
||||
)
|
||||
from .manager import CommandReturn, PullLogEntry
|
||||
from .monitor import DockerContainerStateEvent
|
||||
from .stats import DockerStats
|
||||
@@ -184,7 +190,8 @@ class DockerInterface(JobGroup, ABC):
|
||||
stored = self.sys_docker.config.registries[registry]
|
||||
credentials[ATTR_USERNAME] = stored[ATTR_USERNAME]
|
||||
credentials[ATTR_PASSWORD] = stored[ATTR_PASSWORD]
|
||||
if registry != DOCKER_HUB:
|
||||
# Don't include registry for Docker Hub (both official and legacy)
|
||||
if registry not in (DOCKER_HUB, DOCKER_HUB_LEGACY):
|
||||
credentials[ATTR_REGISTRY] = registry
|
||||
|
||||
_LOGGER.debug(
|
||||
@@ -450,35 +457,34 @@ class DockerInterface(JobGroup, ABC):
|
||||
return True
|
||||
return False
|
||||
|
||||
async def is_running(self) -> bool:
|
||||
"""Return True if Docker is running."""
|
||||
async def _get_container(self) -> Container | None:
|
||||
"""Get docker container, returns None if not found."""
|
||||
try:
|
||||
docker_container = await self.sys_run_in_executor(
|
||||
return await self.sys_run_in_executor(
|
||||
self.sys_docker.containers.get, self.name
|
||||
)
|
||||
except docker.errors.NotFound:
|
||||
return False
|
||||
return None
|
||||
except docker.errors.DockerException as err:
|
||||
raise DockerAPIError() from err
|
||||
raise DockerAPIError(
|
||||
f"Docker API error occurred while getting container information: {err!s}"
|
||||
) from err
|
||||
except requests.RequestException as err:
|
||||
raise DockerRequestError() from err
|
||||
raise DockerRequestError(
|
||||
f"Error communicating with Docker to get container information: {err!s}"
|
||||
) from err
|
||||
|
||||
return docker_container.status == "running"
|
||||
async def is_running(self) -> bool:
|
||||
"""Return True if Docker is running."""
|
||||
if docker_container := await self._get_container():
|
||||
return docker_container.status == "running"
|
||||
return False
|
||||
|
||||
async def current_state(self) -> ContainerState:
|
||||
"""Return current state of container."""
|
||||
try:
|
||||
docker_container = await self.sys_run_in_executor(
|
||||
self.sys_docker.containers.get, self.name
|
||||
)
|
||||
except docker.errors.NotFound:
|
||||
return ContainerState.UNKNOWN
|
||||
except docker.errors.DockerException as err:
|
||||
raise DockerAPIError() from err
|
||||
except requests.RequestException as err:
|
||||
raise DockerRequestError() from err
|
||||
|
||||
return _container_state_from_model(docker_container)
|
||||
if docker_container := await self._get_container():
|
||||
return _container_state_from_model(docker_container)
|
||||
return ContainerState.UNKNOWN
|
||||
|
||||
@Job(name="docker_interface_attach", concurrency=JobConcurrency.GROUP_QUEUE)
|
||||
async def attach(
|
||||
@@ -513,7 +519,9 @@ class DockerInterface(JobGroup, ABC):
|
||||
|
||||
# Successful?
|
||||
if not self._meta:
|
||||
raise DockerError()
|
||||
raise DockerError(
|
||||
f"Could not get metadata on container or image for {self.name}"
|
||||
)
|
||||
_LOGGER.info("Attaching to %s with version %s", self.image, self.version)
|
||||
|
||||
@Job(
|
||||
@@ -716,14 +724,8 @@ class DockerInterface(JobGroup, ABC):
|
||||
|
||||
async def is_failed(self) -> bool:
|
||||
"""Return True if Docker is failing state."""
|
||||
try:
|
||||
docker_container = await self.sys_run_in_executor(
|
||||
self.sys_docker.containers.get, self.name
|
||||
)
|
||||
except docker.errors.NotFound:
|
||||
if not (docker_container := await self._get_container()):
|
||||
return False
|
||||
except (docker.errors.DockerException, requests.RequestException) as err:
|
||||
raise DockerError() from err
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != "exited":
|
||||
|
||||
@@ -49,9 +49,10 @@ from ..exceptions import (
|
||||
)
|
||||
from ..utils.common import FileConfiguration
|
||||
from ..validate import SCHEMA_DOCKER_CONFIG
|
||||
from .const import DOCKER_HUB, IMAGE_WITH_HOST, LABEL_MANAGED
|
||||
from .const import DOCKER_HUB, DOCKER_HUB_LEGACY, LABEL_MANAGED
|
||||
from .monitor import DockerMonitor
|
||||
from .network import DockerNetwork
|
||||
from .utils import get_registry_from_image
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -212,19 +213,25 @@ class DockerConfig(FileConfiguration):
|
||||
|
||||
Matches the image against configured registries and returns the registry
|
||||
name if found, or None if no matching credentials are configured.
|
||||
|
||||
Uses Docker's domain detection logic from:
|
||||
vendor/github.com/distribution/reference/normalize.go
|
||||
"""
|
||||
if not self.registries:
|
||||
return None
|
||||
|
||||
# Check if image uses a custom registry (e.g., ghcr.io/org/image)
|
||||
matcher = IMAGE_WITH_HOST.match(image)
|
||||
if matcher:
|
||||
registry = matcher.group(1)
|
||||
registry = get_registry_from_image(image)
|
||||
if registry:
|
||||
if registry in self.registries:
|
||||
return registry
|
||||
# If no registry prefix, check for Docker Hub credentials
|
||||
elif DOCKER_HUB in self.registries:
|
||||
return DOCKER_HUB
|
||||
else:
|
||||
# No registry prefix means Docker Hub
|
||||
# Support both docker.io (official) and hub.docker.com (legacy)
|
||||
if DOCKER_HUB in self.registries:
|
||||
return DOCKER_HUB
|
||||
if DOCKER_HUB_LEGACY in self.registries:
|
||||
return DOCKER_HUB_LEGACY
|
||||
|
||||
return None
|
||||
|
||||
@@ -467,8 +474,10 @@ class DockerAPI(CoreSysAttributes):
|
||||
raises only if the get fails afterwards. Additionally it fires progress reports for the pull
|
||||
on the bus so listeners can use that to update status for users.
|
||||
"""
|
||||
# Use timeout=None to disable timeout for pull operations, matching docker-py behavior.
|
||||
# aiodocker converts None to ClientTimeout(total=None) which disables the timeout.
|
||||
async for e in self.images.pull(
|
||||
repository, tag=tag, platform=platform, auth=auth, stream=True
|
||||
repository, tag=tag, platform=platform, auth=auth, stream=True, timeout=None
|
||||
):
|
||||
entry = PullLogEntry.from_pull_log_dict(job_id, e)
|
||||
if entry.error:
|
||||
@@ -608,9 +617,15 @@ class DockerAPI(CoreSysAttributes):
|
||||
except aiodocker.DockerError as err:
|
||||
if err.status == HTTPStatus.NOT_FOUND:
|
||||
return False
|
||||
raise DockerError() from err
|
||||
raise DockerError(
|
||||
f"Could not get container {name} or image {image}:{version} to check state: {err!s}",
|
||||
_LOGGER.error,
|
||||
) from err
|
||||
except (docker_errors.DockerException, requests.RequestException) as err:
|
||||
raise DockerError() from err
|
||||
raise DockerError(
|
||||
f"Could not get container {name} or image {image}:{version} to check state: {err!s}",
|
||||
_LOGGER.error,
|
||||
) from err
|
||||
|
||||
# Check the image is correct and state is good
|
||||
return (
|
||||
@@ -626,9 +641,13 @@ class DockerAPI(CoreSysAttributes):
|
||||
try:
|
||||
docker_container: Container = self.containers.get(name)
|
||||
except docker_errors.NotFound:
|
||||
# Generally suppressed so we don't log this
|
||||
raise DockerNotFound() from None
|
||||
except (docker_errors.DockerException, requests.RequestException) as err:
|
||||
raise DockerError() from err
|
||||
raise DockerError(
|
||||
f"Could not get container {name} for stopping: {err!s}",
|
||||
_LOGGER.error,
|
||||
) from err
|
||||
|
||||
if docker_container.status == "running":
|
||||
_LOGGER.info("Stopping %s application", name)
|
||||
@@ -668,9 +687,13 @@ class DockerAPI(CoreSysAttributes):
|
||||
try:
|
||||
container: Container = self.containers.get(name)
|
||||
except docker_errors.NotFound:
|
||||
raise DockerNotFound() from None
|
||||
raise DockerNotFound(
|
||||
f"Container {name} not found for restarting", _LOGGER.warning
|
||||
) from None
|
||||
except (docker_errors.DockerException, requests.RequestException) as err:
|
||||
raise DockerError() from err
|
||||
raise DockerError(
|
||||
f"Could not get container {name} for restarting: {err!s}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
_LOGGER.info("Restarting %s", name)
|
||||
try:
|
||||
@@ -683,9 +706,13 @@ class DockerAPI(CoreSysAttributes):
|
||||
try:
|
||||
docker_container: Container = self.containers.get(name)
|
||||
except docker_errors.NotFound:
|
||||
raise DockerNotFound() from None
|
||||
raise DockerNotFound(
|
||||
f"Container {name} not found for logs", _LOGGER.warning
|
||||
) from None
|
||||
except (docker_errors.DockerException, requests.RequestException) as err:
|
||||
raise DockerError() from err
|
||||
raise DockerError(
|
||||
f"Could not get container {name} for logs: {err!s}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
try:
|
||||
return docker_container.logs(tail=tail, stdout=True, stderr=True)
|
||||
@@ -699,9 +726,13 @@ class DockerAPI(CoreSysAttributes):
|
||||
try:
|
||||
docker_container: Container = self.containers.get(name)
|
||||
except docker_errors.NotFound:
|
||||
raise DockerNotFound() from None
|
||||
raise DockerNotFound(
|
||||
f"Container {name} not found for stats", _LOGGER.warning
|
||||
) from None
|
||||
except (docker_errors.DockerException, requests.RequestException) as err:
|
||||
raise DockerError() from err
|
||||
raise DockerError(
|
||||
f"Could not inspect container '{name}': {err!s}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != "running":
|
||||
@@ -720,15 +751,21 @@ class DockerAPI(CoreSysAttributes):
|
||||
try:
|
||||
docker_container: Container = self.containers.get(name)
|
||||
except docker_errors.NotFound:
|
||||
raise DockerNotFound() from None
|
||||
raise DockerNotFound(
|
||||
f"Container {name} not found for running command", _LOGGER.warning
|
||||
) from None
|
||||
except (docker_errors.DockerException, requests.RequestException) as err:
|
||||
raise DockerError() from err
|
||||
raise DockerError(
|
||||
f"Can't get container {name} to run command: {err!s}"
|
||||
) from err
|
||||
|
||||
# Execute
|
||||
try:
|
||||
code, output = docker_container.exec_run(command)
|
||||
except (docker_errors.DockerException, requests.RequestException) as err:
|
||||
raise DockerError() from err
|
||||
raise DockerError(
|
||||
f"Can't run command in container {name}: {err!s}"
|
||||
) from err
|
||||
|
||||
return CommandReturn(code, output)
|
||||
|
||||
@@ -761,7 +798,7 @@ class DockerAPI(CoreSysAttributes):
|
||||
"""Import a tar file as image."""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
resp: list[dict[str, Any]] = self.images.import_image(read_tar)
|
||||
resp: list[dict[str, Any]] = await self.images.import_image(read_tar)
|
||||
except (aiodocker.DockerError, OSError) as err:
|
||||
raise DockerError(
|
||||
f"Can't import image from tar: {err}", _LOGGER.error
|
||||
|
||||
57
supervisor/docker/utils.py
Normal file
57
supervisor/docker/utils.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Docker utilities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
# Docker image reference domain regex
|
||||
# Based on Docker's reference implementation:
|
||||
# vendor/github.com/distribution/reference/normalize.go
|
||||
#
|
||||
# A domain is detected if the part before the first / contains:
|
||||
# - "localhost" (with optional port)
|
||||
# - Contains "." (like registry.example.com or 127.0.0.1)
|
||||
# - Contains ":" (like myregistry:5000)
|
||||
# - IPv6 addresses in brackets (like [::1]:5000)
|
||||
#
|
||||
# Note: Docker also treats uppercase letters as registry indicators since
|
||||
# namespaces must be lowercase, but this regex handles lowercase matching
|
||||
# and the get_registry_from_image() function validates the registry rules.
|
||||
IMAGE_REGISTRY_REGEX = re.compile(
|
||||
r"^(?P<registry>"
|
||||
r"localhost(?::[0-9]+)?|" # localhost with optional port
|
||||
r"(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])" # domain component
|
||||
r"(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9]))*" # more components
|
||||
r"(?::[0-9]+)?|" # optional port
|
||||
r"\[[a-fA-F0-9:]+\](?::[0-9]+)?" # IPv6 with optional port
|
||||
r")/" # must be followed by /
|
||||
)
|
||||
|
||||
|
||||
def get_registry_from_image(image_ref: str) -> str | None:
|
||||
"""Extract registry from Docker image reference.
|
||||
|
||||
Returns the registry if the image reference contains one,
|
||||
or None if the image uses Docker Hub (docker.io).
|
||||
|
||||
Based on Docker's reference implementation:
|
||||
vendor/github.com/distribution/reference/normalize.go
|
||||
|
||||
Examples:
|
||||
get_registry_from_image("nginx") -> None (docker.io)
|
||||
get_registry_from_image("library/nginx") -> None (docker.io)
|
||||
get_registry_from_image("myregistry.com/nginx") -> "myregistry.com"
|
||||
get_registry_from_image("localhost/myimage") -> "localhost"
|
||||
get_registry_from_image("localhost:5000/myimage") -> "localhost:5000"
|
||||
get_registry_from_image("registry.io:5000/org/app:v1") -> "registry.io:5000"
|
||||
get_registry_from_image("[::1]:5000/myimage") -> "[::1]:5000"
|
||||
|
||||
"""
|
||||
match = IMAGE_REGISTRY_REGEX.match(image_ref)
|
||||
if match:
|
||||
registry = match.group("registry")
|
||||
# Must contain '.' or ':' or be 'localhost' to be a real registry
|
||||
# This prevents treating "myuser/myimage" as having registry "myuser"
|
||||
if "." in registry or ":" in registry or registry == "localhost":
|
||||
return registry
|
||||
return None # No registry = Docker Hub (docker.io)
|
||||
@@ -1,25 +1,25 @@
|
||||
"""Core Exceptions."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
|
||||
MESSAGE_CHECK_SUPERVISOR_LOGS = (
|
||||
"Check supervisor logs for details (check with '{logs_command}')"
|
||||
)
|
||||
EXTRA_FIELDS_LOGS_COMMAND = {"logs_command": "ha supervisor logs"}
|
||||
|
||||
|
||||
class HassioError(Exception):
|
||||
"""Root exception."""
|
||||
|
||||
error_key: str | None = None
|
||||
message_template: str | None = None
|
||||
extra_fields: dict[str, Any] | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str | None = None,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
extra_fields: dict[str, Any] | None = None,
|
||||
self, message: str | None = None, logger: Callable[..., None] | None = None
|
||||
) -> None:
|
||||
"""Raise & log."""
|
||||
self.extra_fields = extra_fields or {}
|
||||
|
||||
if not message and self.message_template:
|
||||
message = (
|
||||
self.message_template.format(**self.extra_fields)
|
||||
@@ -41,6 +41,94 @@ class HassioNotSupportedError(HassioError):
|
||||
"""Function is not supported."""
|
||||
|
||||
|
||||
# API
|
||||
|
||||
|
||||
class APIError(HassioError, RuntimeError):
|
||||
"""API errors."""
|
||||
|
||||
status = 400
|
||||
headers: Mapping[str, str] | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str | None = None,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
headers: Mapping[str, str] | None = None,
|
||||
job_id: str | None = None,
|
||||
) -> None:
|
||||
"""Raise & log, optionally with job."""
|
||||
super().__init__(message, logger)
|
||||
self.headers = headers
|
||||
self.job_id = job_id
|
||||
|
||||
|
||||
class APIUnauthorized(APIError):
|
||||
"""API unauthorized error."""
|
||||
|
||||
status = 401
|
||||
|
||||
|
||||
class APIForbidden(APIError):
|
||||
"""API forbidden error."""
|
||||
|
||||
status = 403
|
||||
|
||||
|
||||
class APINotFound(APIError):
|
||||
"""API not found error."""
|
||||
|
||||
status = 404
|
||||
|
||||
|
||||
class APIGone(APIError):
|
||||
"""API is no longer available."""
|
||||
|
||||
status = 410
|
||||
|
||||
|
||||
class APITooManyRequests(APIError):
|
||||
"""API too many requests error."""
|
||||
|
||||
status = 429
|
||||
|
||||
|
||||
class APIInternalServerError(APIError):
|
||||
"""API internal server error."""
|
||||
|
||||
status = 500
|
||||
|
||||
|
||||
class APIAddonNotInstalled(APIError):
|
||||
"""Not installed addon requested at addons API."""
|
||||
|
||||
|
||||
class APIDBMigrationInProgress(APIError):
|
||||
"""Service is unavailable due to an offline DB migration is in progress."""
|
||||
|
||||
status = 503
|
||||
|
||||
|
||||
class APIUnknownSupervisorError(APIError):
|
||||
"""Unknown error occurred within supervisor. Adds supervisor check logs rider to message template."""
|
||||
|
||||
status = 500
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
job_id: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.message_template = (
|
||||
f"{self.message_template}. {MESSAGE_CHECK_SUPERVISOR_LOGS}"
|
||||
)
|
||||
self.extra_fields = (self.extra_fields or {}) | EXTRA_FIELDS_LOGS_COMMAND
|
||||
super().__init__(None, logger, job_id=job_id)
|
||||
|
||||
|
||||
# JobManager
|
||||
|
||||
|
||||
@@ -122,6 +210,13 @@ class SupervisorAppArmorError(SupervisorError):
|
||||
"""Supervisor AppArmor error."""
|
||||
|
||||
|
||||
class SupervisorUnknownError(SupervisorError, APIUnknownSupervisorError):
|
||||
"""Raise when an unknown error occurs interacting with Supervisor or its container."""
|
||||
|
||||
error_key = "supervisor_unknown_error"
|
||||
message_template = "An unknown error occurred with Supervisor"
|
||||
|
||||
|
||||
class SupervisorJobError(SupervisorError, JobException):
|
||||
"""Raise on job errors."""
|
||||
|
||||
@@ -250,6 +345,54 @@ class AddonConfigurationError(AddonsError):
|
||||
"""Error with add-on configuration."""
|
||||
|
||||
|
||||
class AddonConfigurationInvalidError(AddonConfigurationError, APIError):
|
||||
"""Raise if invalid configuration provided for addon."""
|
||||
|
||||
error_key = "addon_configuration_invalid_error"
|
||||
message_template = "Add-on {addon} has invalid options: {validation_error}"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
addon: str,
|
||||
validation_error: str,
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon, "validation_error": validation_error}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonBootConfigCannotChangeError(AddonsError, APIError):
|
||||
"""Raise if user attempts to change addon boot config when it can't be changed."""
|
||||
|
||||
error_key = "addon_boot_config_cannot_change_error"
|
||||
message_template = (
|
||||
"Addon {addon} boot option is set to {boot_config} so it cannot be changed"
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, logger: Callable[..., None] | None = None, *, addon: str, boot_config: str
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon, "boot_config": boot_config}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonNotRunningError(AddonsError, APIError):
|
||||
"""Raise when an addon is not running."""
|
||||
|
||||
error_key = "addon_not_running_error"
|
||||
message_template = "Add-on {addon} is not running"
|
||||
|
||||
def __init__(
|
||||
self, logger: Callable[..., None] | None = None, *, addon: str
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonNotSupportedError(HassioNotSupportedError):
|
||||
"""Addon doesn't support a function."""
|
||||
|
||||
@@ -268,11 +411,8 @@ class AddonNotSupportedArchitectureError(AddonNotSupportedError):
|
||||
architectures: list[str],
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
super().__init__(
|
||||
None,
|
||||
logger,
|
||||
extra_fields={"slug": slug, "architectures": ", ".join(architectures)},
|
||||
)
|
||||
self.extra_fields = {"slug": slug, "architectures": ", ".join(architectures)}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonNotSupportedMachineTypeError(AddonNotSupportedError):
|
||||
@@ -289,11 +429,8 @@ class AddonNotSupportedMachineTypeError(AddonNotSupportedError):
|
||||
machine_types: list[str],
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
super().__init__(
|
||||
None,
|
||||
logger,
|
||||
extra_fields={"slug": slug, "machine_types": ", ".join(machine_types)},
|
||||
)
|
||||
self.extra_fields = {"slug": slug, "machine_types": ", ".join(machine_types)}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonNotSupportedHomeAssistantVersionError(AddonNotSupportedError):
|
||||
@@ -310,11 +447,96 @@ class AddonNotSupportedHomeAssistantVersionError(AddonNotSupportedError):
|
||||
version: str,
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
super().__init__(
|
||||
None,
|
||||
logger,
|
||||
extra_fields={"slug": slug, "version": version},
|
||||
)
|
||||
self.extra_fields = {"slug": slug, "version": version}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonNotSupportedWriteStdinError(AddonNotSupportedError, APIError):
|
||||
"""Addon does not support writing to stdin."""
|
||||
|
||||
error_key = "addon_not_supported_write_stdin_error"
|
||||
message_template = "Add-on {addon} does not support writing to stdin"
|
||||
|
||||
def __init__(
|
||||
self, logger: Callable[..., None] | None = None, *, addon: str
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonBuildDockerfileMissingError(AddonNotSupportedError, APIError):
|
||||
"""Raise when addon build invalid because dockerfile is missing."""
|
||||
|
||||
error_key = "addon_build_dockerfile_missing_error"
|
||||
message_template = (
|
||||
"Cannot build addon '{addon}' because dockerfile is missing. A repair "
|
||||
"using '{repair_command}' will fix this if the cause is data "
|
||||
"corruption. Otherwise please report this to the addon developer."
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, logger: Callable[..., None] | None = None, *, addon: str
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon, "repair_command": "ha supervisor repair"}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonBuildArchitectureNotSupportedError(AddonNotSupportedError, APIError):
|
||||
"""Raise when addon cannot be built on system because it doesn't support its architecture."""
|
||||
|
||||
error_key = "addon_build_architecture_not_supported_error"
|
||||
message_template = (
|
||||
"Cannot build addon '{addon}' because its supported architectures "
|
||||
"({addon_arches}) do not match the system supported architectures ({system_arches})"
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
addon: str,
|
||||
addon_arch_list: list[str],
|
||||
system_arch_list: list[str],
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {
|
||||
"addon": addon,
|
||||
"addon_arches": ", ".join(addon_arch_list),
|
||||
"system_arches": ", ".join(system_arch_list),
|
||||
}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonUnknownError(AddonsError, APIUnknownSupervisorError):
|
||||
"""Raise when unknown error occurs taking an action for an addon."""
|
||||
|
||||
error_key = "addon_unknown_error"
|
||||
message_template = "An unknown error occurred with addon {addon}"
|
||||
|
||||
def __init__(
|
||||
self, logger: Callable[..., None] | None = None, *, addon: str
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon}
|
||||
super().__init__(logger)
|
||||
|
||||
|
||||
class AddonBuildFailedUnknownError(AddonsError, APIUnknownSupervisorError):
|
||||
"""Raise when the build failed for an addon due to an unknown error."""
|
||||
|
||||
error_key = "addon_build_failed_unknown_error"
|
||||
message_template = (
|
||||
"An unknown error occurred while trying to build the image for addon {addon}"
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, logger: Callable[..., None] | None = None, *, addon: str
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon}
|
||||
super().__init__(logger)
|
||||
|
||||
|
||||
class AddonsJobError(AddonsError, JobException):
|
||||
@@ -346,13 +568,64 @@ class AuthError(HassioError):
|
||||
"""Auth errors."""
|
||||
|
||||
|
||||
class AuthPasswordResetError(HassioError):
|
||||
class AuthPasswordResetError(AuthError, APIError):
|
||||
"""Auth error if password reset failed."""
|
||||
|
||||
error_key = "auth_password_reset_error"
|
||||
message_template = "Username '{user}' does not exist. Check list of users using '{auth_list_command}'."
|
||||
|
||||
class AuthListUsersError(HassioError):
|
||||
def __init__(
|
||||
self,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
user: str,
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"user": user, "auth_list_command": "ha auth list"}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AuthListUsersError(AuthError, APIUnknownSupervisorError):
|
||||
"""Auth error if listing users failed."""
|
||||
|
||||
error_key = "auth_list_users_error"
|
||||
message_template = "Can't request listing users on Home Assistant"
|
||||
|
||||
|
||||
class AuthListUsersNoneResponseError(AuthError, APIInternalServerError):
|
||||
"""Auth error if listing users returned invalid None response."""
|
||||
|
||||
error_key = "auth_list_users_none_response_error"
|
||||
message_template = "Home Assistant returned invalid response of `{none}` instead of a list of users. Check Home Assistant logs for details (check with `{logs_command}`)"
|
||||
extra_fields = {"none": "None", "logs_command": "ha core logs"}
|
||||
|
||||
def __init__(self, logger: Callable[..., None] | None = None) -> None:
|
||||
"""Initialize exception."""
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AuthInvalidNonStringValueError(AuthError, APIUnauthorized):
|
||||
"""Auth error if something besides a string provided as username or password."""
|
||||
|
||||
error_key = "auth_invalid_non_string_value_error"
|
||||
message_template = "Username and password must be strings"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
headers: Mapping[str, str] | None = None,
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
super().__init__(None, logger, headers=headers)
|
||||
|
||||
|
||||
class AuthHomeAssistantAPIValidationError(AuthError, APIUnknownSupervisorError):
|
||||
"""Error encountered trying to validate auth details via Home Assistant API."""
|
||||
|
||||
error_key = "auth_home_assistant_api_validation_error"
|
||||
message_template = "Unable to validate authentication details with Home Assistant"
|
||||
|
||||
|
||||
# Host
|
||||
|
||||
@@ -385,60 +658,6 @@ class HostLogError(HostError):
|
||||
"""Internal error with host log."""
|
||||
|
||||
|
||||
# API
|
||||
|
||||
|
||||
class APIError(HassioError, RuntimeError):
|
||||
"""API errors."""
|
||||
|
||||
status = 400
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str | None = None,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
job_id: str | None = None,
|
||||
error: HassioError | None = None,
|
||||
) -> None:
|
||||
"""Raise & log, optionally with job."""
|
||||
# Allow these to be set from another error here since APIErrors essentially wrap others to add a status
|
||||
self.error_key = error.error_key if error else None
|
||||
self.message_template = error.message_template if error else None
|
||||
super().__init__(
|
||||
message, logger, extra_fields=error.extra_fields if error else None
|
||||
)
|
||||
self.job_id = job_id
|
||||
|
||||
|
||||
class APIForbidden(APIError):
|
||||
"""API forbidden error."""
|
||||
|
||||
status = 403
|
||||
|
||||
|
||||
class APINotFound(APIError):
|
||||
"""API not found error."""
|
||||
|
||||
status = 404
|
||||
|
||||
|
||||
class APIGone(APIError):
|
||||
"""API is no longer available."""
|
||||
|
||||
status = 410
|
||||
|
||||
|
||||
class APIAddonNotInstalled(APIError):
|
||||
"""Not installed addon requested at addons API."""
|
||||
|
||||
|
||||
class APIDBMigrationInProgress(APIError):
|
||||
"""Service is unavailable due to an offline DB migration is in progress."""
|
||||
|
||||
status = 503
|
||||
|
||||
|
||||
# Service / Discovery
|
||||
|
||||
|
||||
@@ -616,6 +835,10 @@ class DockerError(HassioError):
|
||||
"""Docker API/Transport errors."""
|
||||
|
||||
|
||||
class DockerBuildError(DockerError):
|
||||
"""Docker error during build."""
|
||||
|
||||
|
||||
class DockerAPIError(DockerError):
|
||||
"""Docker API error."""
|
||||
|
||||
@@ -647,7 +870,7 @@ class DockerNoSpaceOnDevice(DockerError):
|
||||
super().__init__(None, logger=logger)
|
||||
|
||||
|
||||
class DockerHubRateLimitExceeded(DockerError):
|
||||
class DockerHubRateLimitExceeded(DockerError, APITooManyRequests):
|
||||
"""Raise for docker hub rate limit exceeded error."""
|
||||
|
||||
error_key = "dockerhub_rate_limit_exceeded"
|
||||
@@ -655,16 +878,13 @@ class DockerHubRateLimitExceeded(DockerError):
|
||||
"Your IP address has made too many requests to Docker Hub which activated a rate limit. "
|
||||
"For more details see {dockerhub_rate_limit_url}"
|
||||
)
|
||||
extra_fields = {
|
||||
"dockerhub_rate_limit_url": "https://www.home-assistant.io/more-info/dockerhub-rate-limit"
|
||||
}
|
||||
|
||||
def __init__(self, logger: Callable[..., None] | None = None) -> None:
|
||||
"""Raise & log."""
|
||||
super().__init__(
|
||||
None,
|
||||
logger=logger,
|
||||
extra_fields={
|
||||
"dockerhub_rate_limit_url": "https://www.home-assistant.io/more-info/dockerhub-rate-limit"
|
||||
},
|
||||
)
|
||||
super().__init__(None, logger=logger)
|
||||
|
||||
|
||||
class DockerJobError(DockerError, JobException):
|
||||
@@ -735,6 +955,20 @@ class StoreNotFound(StoreError):
|
||||
"""Raise if slug is not known."""
|
||||
|
||||
|
||||
class StoreAddonNotFoundError(StoreError, APINotFound):
|
||||
"""Raise if a requested addon is not in the store."""
|
||||
|
||||
error_key = "store_addon_not_found_error"
|
||||
message_template = "Addon {addon} does not exist in the store"
|
||||
|
||||
def __init__(
|
||||
self, logger: Callable[..., None] | None = None, *, addon: str
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class StoreJobError(StoreError, JobException):
|
||||
"""Raise on job error with git."""
|
||||
|
||||
@@ -770,7 +1004,7 @@ class BackupJobError(BackupError, JobException):
|
||||
"""Raise on Backup job error."""
|
||||
|
||||
|
||||
class BackupFileNotFoundError(BackupError):
|
||||
class BackupFileNotFoundError(BackupError, APINotFound):
|
||||
"""Raise if the backup file hasn't been found."""
|
||||
|
||||
|
||||
@@ -782,6 +1016,55 @@ class BackupFileExistError(BackupError):
|
||||
"""Raise if the backup file already exists."""
|
||||
|
||||
|
||||
class AddonBackupMetadataInvalidError(BackupError, APIError):
|
||||
"""Raise if invalid metadata file provided for addon in backup."""
|
||||
|
||||
error_key = "addon_backup_metadata_invalid_error"
|
||||
message_template = (
|
||||
"Metadata file for add-on {addon} in backup is invalid: {validation_error}"
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
logger: Callable[..., None] | None = None,
|
||||
*,
|
||||
addon: str,
|
||||
validation_error: str,
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {"addon": addon, "validation_error": validation_error}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class AddonPrePostBackupCommandReturnedError(BackupError, APIError):
|
||||
"""Raise when addon's pre/post backup command returns an error."""
|
||||
|
||||
error_key = "addon_pre_post_backup_command_returned_error"
|
||||
message_template = (
|
||||
"Pre-/Post backup command for add-on {addon} returned error code: "
|
||||
"{exit_code}. Please report this to the addon developer. Enable debug "
|
||||
"logging to capture complete command output using {debug_logging_command}"
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, logger: Callable[..., None] | None = None, *, addon: str, exit_code: int
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
self.extra_fields = {
|
||||
"addon": addon,
|
||||
"exit_code": exit_code,
|
||||
"debug_logging_command": "ha supervisor options --logging debug",
|
||||
}
|
||||
super().__init__(None, logger)
|
||||
|
||||
|
||||
class BackupRestoreUnknownError(BackupError, APIUnknownSupervisorError):
|
||||
"""Raise when an unknown error occurs during backup or restore."""
|
||||
|
||||
error_key = "backup_restore_unknown_error"
|
||||
message_template = "An unknown error occurred during backup/restore"
|
||||
|
||||
|
||||
# Security
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import logging
|
||||
import socket
|
||||
|
||||
from ..dbus.const import (
|
||||
ConnectionState,
|
||||
ConnectionStateFlags,
|
||||
ConnectionStateType,
|
||||
DeviceType,
|
||||
InterfaceAddrGenMode as NMInterfaceAddrGenMode,
|
||||
InterfaceIp6Privacy as NMInterfaceIp6Privacy,
|
||||
@@ -267,25 +267,47 @@ class Interface:
|
||||
return InterfaceMethod.DISABLED
|
||||
|
||||
@staticmethod
|
||||
def _map_nm_addr_gen_mode(addr_gen_mode: int) -> InterfaceAddrGenMode:
|
||||
"""Map IPv6 interface addr_gen_mode."""
|
||||
def _map_nm_addr_gen_mode(addr_gen_mode: int | None) -> InterfaceAddrGenMode:
|
||||
"""Map IPv6 interface addr_gen_mode.
|
||||
|
||||
NetworkManager omits the addr_gen_mode property when set to DEFAULT, so we
|
||||
treat None as DEFAULT here.
|
||||
"""
|
||||
mapping = {
|
||||
NMInterfaceAddrGenMode.EUI64.value: InterfaceAddrGenMode.EUI64,
|
||||
NMInterfaceAddrGenMode.STABLE_PRIVACY.value: InterfaceAddrGenMode.STABLE_PRIVACY,
|
||||
NMInterfaceAddrGenMode.DEFAULT_OR_EUI64.value: InterfaceAddrGenMode.DEFAULT_OR_EUI64,
|
||||
NMInterfaceAddrGenMode.DEFAULT.value: InterfaceAddrGenMode.DEFAULT,
|
||||
None: InterfaceAddrGenMode.DEFAULT,
|
||||
}
|
||||
|
||||
if addr_gen_mode not in mapping:
|
||||
_LOGGER.warning(
|
||||
"Unknown addr_gen_mode value from NetworkManager: %s", addr_gen_mode
|
||||
)
|
||||
|
||||
return mapping.get(addr_gen_mode, InterfaceAddrGenMode.DEFAULT)
|
||||
|
||||
@staticmethod
|
||||
def _map_nm_ip6_privacy(ip6_privacy: int) -> InterfaceIp6Privacy:
|
||||
"""Map IPv6 interface ip6_privacy."""
|
||||
def _map_nm_ip6_privacy(ip6_privacy: int | None) -> InterfaceIp6Privacy:
|
||||
"""Map IPv6 interface ip6_privacy.
|
||||
|
||||
NetworkManager omits the ip6_privacy property when set to DEFAULT, so we
|
||||
treat None as DEFAULT here.
|
||||
"""
|
||||
mapping = {
|
||||
NMInterfaceIp6Privacy.DISABLED.value: InterfaceIp6Privacy.DISABLED,
|
||||
NMInterfaceIp6Privacy.ENABLED_PREFER_PUBLIC.value: InterfaceIp6Privacy.ENABLED_PREFER_PUBLIC,
|
||||
NMInterfaceIp6Privacy.ENABLED.value: InterfaceIp6Privacy.ENABLED,
|
||||
NMInterfaceIp6Privacy.DEFAULT.value: InterfaceIp6Privacy.DEFAULT,
|
||||
None: InterfaceIp6Privacy.DEFAULT,
|
||||
}
|
||||
|
||||
if ip6_privacy not in mapping:
|
||||
_LOGGER.warning(
|
||||
"Unknown ip6_privacy value from NetworkManager: %s", ip6_privacy
|
||||
)
|
||||
|
||||
return mapping.get(ip6_privacy, InterfaceIp6Privacy.DEFAULT)
|
||||
|
||||
@staticmethod
|
||||
@@ -295,8 +317,8 @@ class Interface:
|
||||
return False
|
||||
|
||||
return connection.state in (
|
||||
ConnectionStateType.ACTIVATED,
|
||||
ConnectionStateType.ACTIVATING,
|
||||
ConnectionState.ACTIVATED,
|
||||
ConnectionState.ACTIVATING,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -16,7 +16,7 @@ from ..dbus.const import (
|
||||
DBUS_IFACE_DNS,
|
||||
DBUS_IFACE_NM,
|
||||
DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED,
|
||||
ConnectionStateType,
|
||||
ConnectionState,
|
||||
ConnectivityState,
|
||||
DeviceType,
|
||||
WirelessMethodType,
|
||||
@@ -338,16 +338,16 @@ class NetworkManager(CoreSysAttributes):
|
||||
# the state change before this point. Get the state currently to
|
||||
# avoid any race condition.
|
||||
await con.update()
|
||||
state: ConnectionStateType = con.state
|
||||
state: ConnectionState = con.state
|
||||
|
||||
while state != ConnectionStateType.ACTIVATED:
|
||||
if state == ConnectionStateType.DEACTIVATED:
|
||||
while state != ConnectionState.ACTIVATED:
|
||||
if state == ConnectionState.DEACTIVATED:
|
||||
raise HostNetworkError(
|
||||
"Activating connection failed, check connection settings."
|
||||
)
|
||||
|
||||
msg = await signal.wait_for_signal()
|
||||
state = msg[0]
|
||||
state = ConnectionState(msg[0])
|
||||
_LOGGER.debug("Active connection state changed to %s", state)
|
||||
|
||||
# update_only means not done by user so don't force a check afterwards
|
||||
|
||||
@@ -102,13 +102,17 @@ class SupervisorJobError:
|
||||
"Unknown error, see Supervisor logs (check with 'ha supervisor logs')"
|
||||
)
|
||||
stage: str | None = None
|
||||
error_key: str | None = None
|
||||
extra_fields: dict[str, Any] | None = None
|
||||
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return dictionary representation."""
|
||||
return {
|
||||
"type": self.type_.__name__,
|
||||
"message": self.message,
|
||||
"stage": self.stage,
|
||||
"error_key": self.error_key,
|
||||
"extra_fields": self.extra_fields,
|
||||
}
|
||||
|
||||
|
||||
@@ -158,7 +162,9 @@ class SupervisorJob:
|
||||
def capture_error(self, err: HassioError | None = None) -> None:
|
||||
"""Capture an error or record that an unknown error has occurred."""
|
||||
if err:
|
||||
new_error = SupervisorJobError(type(err), str(err), self.stage)
|
||||
new_error = SupervisorJobError(
|
||||
type(err), str(err), self.stage, err.error_key, err.extra_fields
|
||||
)
|
||||
else:
|
||||
new_error = SupervisorJobError(stage=self.stage)
|
||||
self.errors += [new_error]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from ...const import CoreState
|
||||
from ...coresys import CoreSys
|
||||
from ...dbus.const import ConnectionStateFlags, ConnectionStateType
|
||||
from ...dbus.const import ConnectionState, ConnectionStateFlags
|
||||
from ...dbus.network.interface import NetworkInterface
|
||||
from ...exceptions import NetworkInterfaceNotFound
|
||||
from ..const import ContextType, IssueType
|
||||
@@ -47,7 +47,7 @@ class CheckNetworkInterfaceIPV4(CheckBase):
|
||||
|
||||
return not (
|
||||
interface.connection.state
|
||||
in [ConnectionStateType.ACTIVATED, ConnectionStateType.ACTIVATING]
|
||||
in [ConnectionState.ACTIVATED, ConnectionState.ACTIVATING]
|
||||
and ConnectionStateFlags.IP4_READY in interface.connection.state_flags
|
||||
)
|
||||
|
||||
|
||||
@@ -183,19 +183,22 @@ class GitRepo(CoreSysAttributes):
|
||||
raise StoreGitError() from err
|
||||
|
||||
try:
|
||||
branch = self.repo.active_branch.name
|
||||
repo = self.repo
|
||||
|
||||
# Download data
|
||||
await self.sys_run_in_executor(
|
||||
ft.partial(
|
||||
self.repo.remotes.origin.fetch,
|
||||
**{"update-shallow": True, "depth": 1}, # type: ignore
|
||||
def _fetch_and_check() -> tuple[str, bool]:
|
||||
"""Fetch from origin and check if changed."""
|
||||
# This property access is I/O bound
|
||||
branch = repo.active_branch.name
|
||||
repo.remotes.origin.fetch(
|
||||
**{"update-shallow": True, "depth": 1} # type: ignore[arg-type]
|
||||
)
|
||||
)
|
||||
changed = repo.commit(branch) != repo.commit(f"origin/{branch}")
|
||||
return branch, changed
|
||||
|
||||
if changed := self.repo.commit(branch) != self.repo.commit(
|
||||
f"origin/{branch}"
|
||||
):
|
||||
# Download data and check for changes
|
||||
branch, changed = await self.sys_run_in_executor(_fetch_and_check)
|
||||
|
||||
if changed:
|
||||
# Jump on top of that
|
||||
await self.sys_run_in_executor(
|
||||
ft.partial(self.repo.git.reset, f"origin/{branch}", hard=True)
|
||||
|
||||
@@ -28,8 +28,8 @@ from .exceptions import (
|
||||
DockerError,
|
||||
HostAppArmorError,
|
||||
SupervisorAppArmorError,
|
||||
SupervisorError,
|
||||
SupervisorJobError,
|
||||
SupervisorUnknownError,
|
||||
SupervisorUpdateError,
|
||||
)
|
||||
from .jobs.const import JobCondition, JobThrottle
|
||||
@@ -261,7 +261,7 @@ class Supervisor(CoreSysAttributes):
|
||||
try:
|
||||
return await self.instance.stats()
|
||||
except DockerError as err:
|
||||
raise SupervisorError() from err
|
||||
raise SupervisorUnknownError() from err
|
||||
|
||||
async def repair(self):
|
||||
"""Repair local Supervisor data."""
|
||||
|
||||
@@ -5,6 +5,7 @@ from datetime import timedelta
|
||||
import errno
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, PropertyMock, call, patch
|
||||
|
||||
import aiodocker
|
||||
@@ -23,7 +24,13 @@ from supervisor.docker.addon import DockerAddon
|
||||
from supervisor.docker.const import ContainerState
|
||||
from supervisor.docker.manager import CommandReturn, DockerAPI
|
||||
from supervisor.docker.monitor import DockerContainerStateEvent
|
||||
from supervisor.exceptions import AddonsError, AddonsJobError, AudioUpdateError
|
||||
from supervisor.exceptions import (
|
||||
AddonPrePostBackupCommandReturnedError,
|
||||
AddonsJobError,
|
||||
AddonUnknownError,
|
||||
AudioUpdateError,
|
||||
HassioError,
|
||||
)
|
||||
from supervisor.hardware.helper import HwHelper
|
||||
from supervisor.ingress import Ingress
|
||||
from supervisor.store.repository import Repository
|
||||
@@ -502,31 +509,26 @@ async def test_backup_with_pre_post_command(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"get_error,exception_on_exec",
|
||||
("container_get_side_effect", "exec_run_side_effect", "exc_type_raised"),
|
||||
[
|
||||
(NotFound("missing"), False),
|
||||
(DockerException(), False),
|
||||
(None, True),
|
||||
(None, False),
|
||||
(NotFound("missing"), [(1, None)], AddonUnknownError),
|
||||
(DockerException(), [(1, None)], AddonUnknownError),
|
||||
(None, DockerException(), AddonUnknownError),
|
||||
(None, [(1, None)], AddonPrePostBackupCommandReturnedError),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("tmp_supervisor_data", "path_extern")
|
||||
async def test_backup_with_pre_command_error(
|
||||
coresys: CoreSys,
|
||||
install_addon_ssh: Addon,
|
||||
container: MagicMock,
|
||||
get_error: DockerException | None,
|
||||
exception_on_exec: bool,
|
||||
tmp_supervisor_data,
|
||||
path_extern,
|
||||
container_get_side_effect: DockerException | None,
|
||||
exec_run_side_effect: DockerException | list[tuple[int, Any]],
|
||||
exc_type_raised: type[HassioError],
|
||||
) -> None:
|
||||
"""Test backing up an addon with error running pre command."""
|
||||
if get_error:
|
||||
coresys.docker.containers.get.side_effect = get_error
|
||||
|
||||
if exception_on_exec:
|
||||
container.exec_run.side_effect = DockerException()
|
||||
else:
|
||||
container.exec_run.return_value = (1, None)
|
||||
coresys.docker.containers.get.side_effect = container_get_side_effect
|
||||
container.exec_run.side_effect = exec_run_side_effect
|
||||
|
||||
install_addon_ssh.path_data.mkdir()
|
||||
await install_addon_ssh.load()
|
||||
@@ -535,7 +537,7 @@ async def test_backup_with_pre_command_error(
|
||||
with (
|
||||
patch.object(DockerAddon, "is_running", return_value=True),
|
||||
patch.object(Addon, "backup_pre", new=PropertyMock(return_value="backup_pre")),
|
||||
pytest.raises(AddonsError),
|
||||
pytest.raises(exc_type_raised),
|
||||
):
|
||||
assert await install_addon_ssh.backup(tarfile) is None
|
||||
|
||||
@@ -947,7 +949,7 @@ async def test_addon_load_succeeds_with_docker_errors(
|
||||
)
|
||||
caplog.clear()
|
||||
await install_addon_ssh.load()
|
||||
assert "Invalid build environment" in caplog.text
|
||||
assert "Cannot build addon 'local_ssh' because dockerfile is missing" in caplog.text
|
||||
|
||||
# Image build failure
|
||||
caplog.clear()
|
||||
|
||||
@@ -6,11 +6,13 @@ from pathlib import Path
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
import pytest
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.addons.build import AddonBuild
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.docker.const import DOCKER_HUB
|
||||
from supervisor.exceptions import AddonBuildDockerfileMissingError
|
||||
|
||||
from tests.common import is_in_list
|
||||
|
||||
@@ -106,11 +108,11 @@ async def test_build_valid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
type(coresys.arch), "default", new=PropertyMock(return_value="aarch64")
|
||||
),
|
||||
):
|
||||
assert await build.is_valid()
|
||||
assert (await build.is_valid()) is None
|
||||
|
||||
|
||||
async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test platform set in docker args."""
|
||||
"""Test build not supported because Dockerfile missing for specified architecture."""
|
||||
build = await AddonBuild(coresys, install_addon_ssh).load_config()
|
||||
with (
|
||||
patch.object(
|
||||
@@ -119,8 +121,9 @@ async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
patch.object(
|
||||
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
||||
),
|
||||
pytest.raises(AddonBuildDockerfileMissingError),
|
||||
):
|
||||
assert not await build.is_valid()
|
||||
await build.is_valid()
|
||||
|
||||
|
||||
async def test_docker_config_no_registries(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
|
||||
@@ -5,6 +5,7 @@ from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from aiohttp import ClientResponse
|
||||
from aiohttp.test_utils import TestClient
|
||||
from docker.errors import DockerException
|
||||
import pytest
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
@@ -477,6 +478,11 @@ async def test_addon_options_boot_mode_manual_only_invalid(
|
||||
body["message"]
|
||||
== "Addon local_example boot option is set to manual_only so it cannot be changed"
|
||||
)
|
||||
assert body["error_key"] == "addon_boot_config_cannot_change_error"
|
||||
assert body["extra_fields"] == {
|
||||
"addon": "local_example",
|
||||
"boot_config": "manual_only",
|
||||
}
|
||||
|
||||
|
||||
async def get_message(resp: ClientResponse, json_expected: bool) -> str:
|
||||
@@ -545,3 +551,133 @@ async def test_addon_not_installed(
|
||||
resp = await api_client.request(method, url)
|
||||
assert resp.status == 400
|
||||
assert await get_message(resp, json_expected) == "Addon is not installed"
|
||||
|
||||
|
||||
async def test_addon_set_options(api_client: TestClient, install_addon_example: Addon):
|
||||
"""Test setting options for an addon."""
|
||||
resp = await api_client.post(
|
||||
"/addons/local_example/options", json={"options": {"message": "test"}}
|
||||
)
|
||||
assert resp.status == 200
|
||||
assert install_addon_example.options == {"message": "test"}
|
||||
|
||||
|
||||
async def test_addon_set_options_error(
|
||||
api_client: TestClient, install_addon_example: Addon
|
||||
):
|
||||
"""Test setting options for an addon."""
|
||||
resp = await api_client.post(
|
||||
"/addons/local_example/options", json={"options": {"message": True}}
|
||||
)
|
||||
assert resp.status == 400
|
||||
body = await resp.json()
|
||||
assert (
|
||||
body["message"]
|
||||
== "Add-on local_example has invalid options: not a valid value. Got {'message': True}"
|
||||
)
|
||||
assert body["error_key"] == "addon_configuration_invalid_error"
|
||||
assert body["extra_fields"] == {
|
||||
"addon": "local_example",
|
||||
"validation_error": "not a valid value. Got {'message': True}",
|
||||
}
|
||||
|
||||
|
||||
async def test_addon_start_options_error(
|
||||
api_client: TestClient,
|
||||
install_addon_example: Addon,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
):
|
||||
"""Test error writing options when trying to start addon."""
|
||||
install_addon_example.options = {"message": "hello"}
|
||||
|
||||
# Simulate OS error trying to write the file
|
||||
with patch("supervisor.utils.json.atomic_write", side_effect=OSError("fail")):
|
||||
resp = await api_client.post("/addons/local_example/start")
|
||||
assert resp.status == 500
|
||||
body = await resp.json()
|
||||
assert (
|
||||
body["message"]
|
||||
== "An unknown error occurred with addon local_example. Check supervisor logs for details (check with 'ha supervisor logs')"
|
||||
)
|
||||
assert body["error_key"] == "addon_unknown_error"
|
||||
assert body["extra_fields"] == {
|
||||
"addon": "local_example",
|
||||
"logs_command": "ha supervisor logs",
|
||||
}
|
||||
assert "Add-on local_example can't write options" in caplog.text
|
||||
|
||||
# Simulate an update with a breaking change for options schema creating failure on start
|
||||
caplog.clear()
|
||||
install_addon_example.data["schema"] = {"message": "bool"}
|
||||
resp = await api_client.post("/addons/local_example/start")
|
||||
assert resp.status == 400
|
||||
body = await resp.json()
|
||||
assert (
|
||||
body["message"]
|
||||
== "Add-on local_example has invalid options: expected boolean. Got {'message': 'hello'}"
|
||||
)
|
||||
assert body["error_key"] == "addon_configuration_invalid_error"
|
||||
assert body["extra_fields"] == {
|
||||
"addon": "local_example",
|
||||
"validation_error": "expected boolean. Got {'message': 'hello'}",
|
||||
}
|
||||
assert (
|
||||
"Add-on local_example has invalid options: expected boolean. Got {'message': 'hello'}"
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("method", "action"), [("get", "stats"), ("post", "stdin")])
|
||||
@pytest.mark.usefixtures("install_addon_example")
|
||||
async def test_addon_not_running_error(
|
||||
api_client: TestClient, method: str, action: str
|
||||
):
|
||||
"""Test addon not running error for endpoints that require that."""
|
||||
with patch.object(Addon, "with_stdin", new=PropertyMock(return_value=True)):
|
||||
resp = await api_client.request(method, f"/addons/local_example/{action}")
|
||||
|
||||
assert resp.status == 400
|
||||
body = await resp.json()
|
||||
assert body["message"] == "Add-on local_example is not running"
|
||||
assert body["error_key"] == "addon_not_running_error"
|
||||
assert body["extra_fields"] == {"addon": "local_example"}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("install_addon_example")
|
||||
async def test_addon_write_stdin_not_supported_error(api_client: TestClient):
|
||||
"""Test error when trying to write stdin to addon that does not support it."""
|
||||
resp = await api_client.post("/addons/local_example/stdin")
|
||||
assert resp.status == 400
|
||||
body = await resp.json()
|
||||
assert body["message"] == "Add-on local_example does not support writing to stdin"
|
||||
assert body["error_key"] == "addon_not_supported_write_stdin_error"
|
||||
assert body["extra_fields"] == {"addon": "local_example"}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("install_addon_ssh")
|
||||
async def test_addon_rebuild_fails_error(api_client: TestClient, coresys: CoreSys):
|
||||
"""Test error when build fails during rebuild for addon."""
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
coresys.docker.containers.run.side_effect = DockerException("fail")
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
CpuArchManager, "supported", new=PropertyMock(return_value=["aarch64"])
|
||||
),
|
||||
patch.object(
|
||||
CpuArchManager, "default", new=PropertyMock(return_value="aarch64")
|
||||
),
|
||||
patch.object(AddonBuild, "get_docker_args", return_value={}),
|
||||
):
|
||||
resp = await api_client.post("/addons/local_ssh/rebuild")
|
||||
assert resp.status == 500
|
||||
body = await resp.json()
|
||||
assert (
|
||||
body["message"]
|
||||
== "An unknown error occurred while trying to build the image for addon local_ssh. Check supervisor logs for details (check with 'ha supervisor logs')"
|
||||
)
|
||||
assert body["error_key"] == "addon_build_failed_unknown_error"
|
||||
assert body["extra_fields"] == {
|
||||
"addon": "local_ssh",
|
||||
"logs_command": "ha supervisor logs",
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Test auth API."""
|
||||
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from aiohttp.hdrs import WWW_AUTHENTICATE
|
||||
@@ -9,6 +10,8 @@ import pytest
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.exceptions import HomeAssistantAPIError, HomeAssistantWSError
|
||||
from supervisor.homeassistant.api import HomeAssistantAPI
|
||||
|
||||
from tests.common import MockResponse
|
||||
from tests.const import TEST_ADDON_SLUG
|
||||
@@ -100,6 +103,52 @@ async def test_password_reset(
|
||||
assert "Successful password reset for 'john'" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("post_mock", "expected_log"),
|
||||
[
|
||||
(
|
||||
MagicMock(return_value=MockResponse(status=400)),
|
||||
"The user 'john' is not registered",
|
||||
),
|
||||
(
|
||||
MagicMock(side_effect=HomeAssistantAPIError("fail")),
|
||||
"Can't request password reset on Home Assistant: fail",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_failed_password_reset(
|
||||
api_client: TestClient,
|
||||
coresys: CoreSys,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
websession: MagicMock,
|
||||
post_mock: MagicMock,
|
||||
expected_log: str,
|
||||
):
|
||||
"""Test failed password reset."""
|
||||
coresys.homeassistant.api.access_token = "abc123"
|
||||
# pylint: disable-next=protected-access
|
||||
coresys.homeassistant.api._access_token_expires = datetime.now(tz=UTC) + timedelta(
|
||||
days=1
|
||||
)
|
||||
|
||||
websession.post = post_mock
|
||||
resp = await api_client.post(
|
||||
"/auth/reset", json={"username": "john", "password": "doe"}
|
||||
)
|
||||
assert resp.status == 400
|
||||
body = await resp.json()
|
||||
assert (
|
||||
body["message"]
|
||||
== "Username 'john' does not exist. Check list of users using 'ha auth list'."
|
||||
)
|
||||
assert body["error_key"] == "auth_password_reset_error"
|
||||
assert body["extra_fields"] == {
|
||||
"user": "john",
|
||||
"auth_list_command": "ha auth list",
|
||||
}
|
||||
assert expected_log in caplog.text
|
||||
|
||||
|
||||
async def test_list_users(
|
||||
api_client: TestClient, coresys: CoreSys, ha_ws_client: AsyncMock
|
||||
):
|
||||
@@ -120,6 +169,48 @@ async def test_list_users(
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("send_command_mock", "error_response", "expected_log"),
|
||||
[
|
||||
(
|
||||
AsyncMock(return_value=None),
|
||||
{
|
||||
"result": "error",
|
||||
"message": "Home Assistant returned invalid response of `None` instead of a list of users. Check Home Assistant logs for details (check with `ha core logs`)",
|
||||
"error_key": "auth_list_users_none_response_error",
|
||||
"extra_fields": {"none": "None", "logs_command": "ha core logs"},
|
||||
},
|
||||
"Home Assistant returned invalid response of `None` instead of a list of users. Check Home Assistant logs for details (check with `ha core logs`)",
|
||||
),
|
||||
(
|
||||
AsyncMock(side_effect=HomeAssistantWSError("fail")),
|
||||
{
|
||||
"result": "error",
|
||||
"message": "Can't request listing users on Home Assistant. Check supervisor logs for details (check with 'ha supervisor logs')",
|
||||
"error_key": "auth_list_users_error",
|
||||
"extra_fields": {"logs_command": "ha supervisor logs"},
|
||||
},
|
||||
"Can't request listing users on Home Assistant: fail",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_list_users_failure(
|
||||
api_client: TestClient,
|
||||
ha_ws_client: AsyncMock,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
send_command_mock: AsyncMock,
|
||||
error_response: dict[str, Any],
|
||||
expected_log: str,
|
||||
):
|
||||
"""Test failure listing users via API."""
|
||||
ha_ws_client.async_send_command = send_command_mock
|
||||
resp = await api_client.get("/auth/list")
|
||||
assert resp.status == 500
|
||||
result = await resp.json()
|
||||
assert result == error_response
|
||||
assert expected_log in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("field", "api_client"),
|
||||
[("username", TEST_ADDON_SLUG), ("user", TEST_ADDON_SLUG)],
|
||||
@@ -156,6 +247,13 @@ async def test_auth_json_failure_none(
|
||||
mock_check_login.return_value = True
|
||||
resp = await api_client.post("/auth", json={"username": user, "password": password})
|
||||
assert resp.status == 401
|
||||
assert (
|
||||
resp.headers["WWW-Authenticate"]
|
||||
== 'Basic realm="Home Assistant Authentication"'
|
||||
)
|
||||
body = await resp.json()
|
||||
assert body["message"] == "Username and password must be strings"
|
||||
assert body["error_key"] == "auth_invalid_non_string_value_error"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True)
|
||||
@@ -267,3 +365,26 @@ async def test_non_addon_token_no_auth_access(api_client: TestClient):
|
||||
"""Test auth where add-on is not allowed to access auth API."""
|
||||
resp = await api_client.post("/auth", json={"username": "test", "password": "pass"})
|
||||
assert resp.status == 403
|
||||
|
||||
|
||||
@pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True)
|
||||
@pytest.mark.usefixtures("install_addon_ssh")
|
||||
async def test_auth_backend_login_failure(api_client: TestClient):
|
||||
"""Test backend login failure on auth."""
|
||||
with (
|
||||
patch.object(HomeAssistantAPI, "check_api_state", return_value=True),
|
||||
patch.object(
|
||||
HomeAssistantAPI, "make_request", side_effect=HomeAssistantAPIError("fail")
|
||||
),
|
||||
):
|
||||
resp = await api_client.post(
|
||||
"/auth", json={"username": "test", "password": "pass"}
|
||||
)
|
||||
assert resp.status == 500
|
||||
body = await resp.json()
|
||||
assert (
|
||||
body["message"]
|
||||
== "Unable to validate authentication details with Home Assistant. Check supervisor logs for details (check with 'ha supervisor logs')"
|
||||
)
|
||||
assert body["error_key"] == "auth_home_assistant_api_validation_error"
|
||||
assert body["extra_fields"] == {"logs_command": "ha supervisor logs"}
|
||||
|
||||
@@ -17,6 +17,7 @@ from supervisor.const import CoreState
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.docker.manager import DockerAPI
|
||||
from supervisor.exceptions import (
|
||||
AddonPrePostBackupCommandReturnedError,
|
||||
AddonsError,
|
||||
BackupInvalidError,
|
||||
HomeAssistantBackupError,
|
||||
@@ -24,6 +25,7 @@ from supervisor.exceptions import (
|
||||
from supervisor.homeassistant.core import HomeAssistantCore
|
||||
from supervisor.homeassistant.module import HomeAssistant
|
||||
from supervisor.homeassistant.websocket import HomeAssistantWebSocket
|
||||
from supervisor.jobs import SupervisorJob
|
||||
from supervisor.mounts.mount import Mount
|
||||
from supervisor.supervisor import Supervisor
|
||||
|
||||
@@ -401,6 +403,8 @@ async def test_api_backup_errors(
|
||||
"type": "BackupError",
|
||||
"message": str(err),
|
||||
"stage": None,
|
||||
"error_key": None,
|
||||
"extra_fields": None,
|
||||
}
|
||||
]
|
||||
assert job["child_jobs"][2]["name"] == "backup_store_folders"
|
||||
@@ -437,6 +441,8 @@ async def test_api_backup_errors(
|
||||
"type": "HomeAssistantBackupError",
|
||||
"message": "Backup error",
|
||||
"stage": "home_assistant",
|
||||
"error_key": None,
|
||||
"extra_fields": None,
|
||||
}
|
||||
]
|
||||
assert job["child_jobs"][0]["name"] == "backup_store_homeassistant"
|
||||
@@ -445,6 +451,8 @@ async def test_api_backup_errors(
|
||||
"type": "HomeAssistantBackupError",
|
||||
"message": "Backup error",
|
||||
"stage": None,
|
||||
"error_key": None,
|
||||
"extra_fields": None,
|
||||
}
|
||||
]
|
||||
assert len(job["child_jobs"]) == 1
|
||||
@@ -749,6 +757,8 @@ async def test_backup_to_multiple_locations_error_on_copy(
|
||||
"type": "BackupError",
|
||||
"message": "Could not copy backup to .cloud_backup due to: ",
|
||||
"stage": None,
|
||||
"error_key": None,
|
||||
"extra_fields": None,
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1483,3 +1493,44 @@ async def test_immediate_list_after_missing_file_restore(
|
||||
result = await resp.json()
|
||||
assert len(result["data"]["backups"]) == 1
|
||||
assert result["data"]["backups"][0]["slug"] == "93b462f8"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("command", ["backup_pre", "backup_post"])
|
||||
@pytest.mark.usefixtures("install_addon_example", "tmp_supervisor_data")
|
||||
async def test_pre_post_backup_command_error(
|
||||
api_client: TestClient, coresys: CoreSys, container: MagicMock, command: str
|
||||
):
|
||||
"""Test pre/post backup command error."""
|
||||
await coresys.core.set_state(CoreState.RUNNING)
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
container.status = "running"
|
||||
container.exec_run.return_value = (1, b"")
|
||||
with patch.object(Addon, command, new=PropertyMock(return_value="test")):
|
||||
resp = await api_client.post(
|
||||
"/backups/new/partial", json={"addons": ["local_example"]}
|
||||
)
|
||||
|
||||
assert resp.status == 200
|
||||
body = await resp.json()
|
||||
job_id = body["data"]["job_id"]
|
||||
job: SupervisorJob | None = None
|
||||
for j in coresys.jobs.jobs:
|
||||
if j.name == "backup_store_addons" and j.parent_id == job_id:
|
||||
job = j
|
||||
break
|
||||
|
||||
assert job
|
||||
assert job.done is True
|
||||
assert job.errors[0].type_ == AddonPrePostBackupCommandReturnedError
|
||||
assert job.errors[0].message == (
|
||||
"Pre-/Post backup command for add-on local_example returned error code: "
|
||||
"1. Please report this to the addon developer. Enable debug "
|
||||
"logging to capture complete command output using ha supervisor options --logging debug"
|
||||
)
|
||||
assert job.errors[0].error_key == "addon_pre_post_backup_command_returned_error"
|
||||
assert job.errors[0].extra_fields == {
|
||||
"addon": "local_example",
|
||||
"exit_code": 1,
|
||||
"debug_logging_command": "ha supervisor options --logging debug",
|
||||
}
|
||||
|
||||
@@ -374,6 +374,8 @@ async def test_job_with_error(
|
||||
"type": "SupervisorError",
|
||||
"message": "bad",
|
||||
"stage": "test",
|
||||
"error_key": None,
|
||||
"extra_fields": None,
|
||||
}
|
||||
],
|
||||
"child_jobs": [
|
||||
@@ -391,6 +393,8 @@ async def test_job_with_error(
|
||||
"type": "SupervisorError",
|
||||
"message": "bad",
|
||||
"stage": None,
|
||||
"error_key": None,
|
||||
"extra_fields": None,
|
||||
}
|
||||
],
|
||||
"child_jobs": [],
|
||||
|
||||
@@ -4,7 +4,6 @@ import asyncio
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch
|
||||
|
||||
from aiohttp import ClientResponse
|
||||
from aiohttp.test_utils import TestClient
|
||||
from awesomeversion import AwesomeVersion
|
||||
import pytest
|
||||
@@ -292,14 +291,6 @@ async def test_api_detached_addon_documentation(
|
||||
assert result == "Addon local_ssh does not exist in the store"
|
||||
|
||||
|
||||
async def get_message(resp: ClientResponse, json_expected: bool) -> str:
|
||||
"""Get message from response based on response type."""
|
||||
if json_expected:
|
||||
body = await resp.json()
|
||||
return body["message"]
|
||||
return await resp.text()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("method", "url", "json_expected"),
|
||||
[
|
||||
@@ -325,7 +316,13 @@ async def test_store_addon_not_found(
|
||||
"""Test store addon not found error."""
|
||||
resp = await api_client.request(method, url)
|
||||
assert resp.status == 404
|
||||
assert await get_message(resp, json_expected) == "Addon bad does not exist"
|
||||
if json_expected:
|
||||
body = await resp.json()
|
||||
assert body["message"] == "Addon bad does not exist in the store"
|
||||
assert body["error_key"] == "store_addon_not_found_error"
|
||||
assert body["extra_fields"] == {"addon": "bad"}
|
||||
else:
|
||||
assert await resp.text() == "Addon bad does not exist in the store"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -7,6 +7,7 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch
|
||||
from aiohttp.test_utils import TestClient
|
||||
from awesomeversion import AwesomeVersion
|
||||
from blockbuster import BlockingError
|
||||
from docker.errors import DockerException
|
||||
import pytest
|
||||
|
||||
from supervisor.const import CoreState
|
||||
@@ -407,3 +408,37 @@ async def test_api_progress_updates_supervisor_update(
|
||||
"done": True,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
async def test_api_supervisor_stats(api_client: TestClient, coresys: CoreSys):
|
||||
"""Test supervisor stats."""
|
||||
coresys.docker.containers.get.return_value.status = "running"
|
||||
coresys.docker.containers.get.return_value.stats.return_value = load_json_fixture(
|
||||
"container_stats.json"
|
||||
)
|
||||
|
||||
resp = await api_client.get("/supervisor/stats")
|
||||
assert resp.status == 200
|
||||
result = await resp.json()
|
||||
assert result["data"]["cpu_percent"] == 90.0
|
||||
assert result["data"]["memory_usage"] == 59700000
|
||||
assert result["data"]["memory_limit"] == 4000000000
|
||||
assert result["data"]["memory_percent"] == 1.49
|
||||
|
||||
|
||||
async def test_supervisor_api_stats_failure(
|
||||
api_client: TestClient, coresys: CoreSys, caplog: pytest.LogCaptureFixture
|
||||
):
|
||||
"""Test supervisor stats failure."""
|
||||
coresys.docker.containers.get.side_effect = DockerException("fail")
|
||||
|
||||
resp = await api_client.get("/supervisor/stats")
|
||||
assert resp.status == 500
|
||||
body = await resp.json()
|
||||
assert (
|
||||
body["message"]
|
||||
== "An unknown error occurred with Supervisor. Check supervisor logs for details (check with 'ha supervisor logs')"
|
||||
)
|
||||
assert body["error_key"] == "supervisor_unknown_error"
|
||||
assert body["extra_fields"] == {"logs_command": "ha supervisor logs"}
|
||||
assert "Could not inspect container 'hassio_supervisor': fail" in caplog.text
|
||||
|
||||
@@ -144,9 +144,9 @@ async def docker() -> DockerAPI:
|
||||
|
||||
docker_images.inspect.return_value = image_inspect
|
||||
docker_images.list.return_value = [image_inspect]
|
||||
docker_images.import_image.return_value = [
|
||||
{"stream": "Loaded image: test:latest\n"}
|
||||
]
|
||||
docker_images.import_image = AsyncMock(
|
||||
return_value=[{"stream": "Loaded image: test:latest\n"}]
|
||||
)
|
||||
|
||||
docker_images.pull.return_value = AsyncIterator([{}])
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from unittest.mock import Mock, PropertyMock, patch
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
import pytest
|
||||
|
||||
from supervisor.dbus.const import ConnectionStateType
|
||||
from supervisor.dbus.const import ConnectionState
|
||||
from supervisor.dbus.network import NetworkManager
|
||||
from supervisor.dbus.network.interface import NetworkInterface
|
||||
from supervisor.exceptions import (
|
||||
@@ -93,7 +93,7 @@ async def test_activate_connection(
|
||||
"/org/freedesktop/NetworkManager/Settings/1",
|
||||
"/org/freedesktop/NetworkManager/Devices/1",
|
||||
)
|
||||
assert connection.state == ConnectionStateType.ACTIVATED
|
||||
assert connection.state == ConnectionState.ACTIVATED
|
||||
assert (
|
||||
connection.settings.object_path == "/org/freedesktop/NetworkManager/Settings/1"
|
||||
)
|
||||
@@ -117,7 +117,7 @@ async def test_add_and_activate_connection(
|
||||
)
|
||||
assert settings.connection.uuid == "0c23631e-2118-355c-bbb0-8943229cb0d6"
|
||||
assert settings.ipv4.method == "auto"
|
||||
assert connection.state == ConnectionStateType.ACTIVATED
|
||||
assert connection.state == ConnectionState.ACTIVATED
|
||||
assert (
|
||||
connection.settings.object_path == "/org/freedesktop/NetworkManager/Settings/1"
|
||||
)
|
||||
|
||||
@@ -1,9 +1,49 @@
|
||||
"""Test docker login."""
|
||||
|
||||
import pytest
|
||||
|
||||
# pylint: disable=protected-access
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.docker.const import DOCKER_HUB
|
||||
from supervisor.docker.const import DOCKER_HUB, DOCKER_HUB_LEGACY
|
||||
from supervisor.docker.interface import DockerInterface
|
||||
from supervisor.docker.utils import get_registry_from_image
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("image_ref", "expected_registry"),
|
||||
[
|
||||
# No registry - Docker Hub images
|
||||
("nginx", None),
|
||||
("nginx:latest", None),
|
||||
("library/nginx", None),
|
||||
("library/nginx:latest", None),
|
||||
("homeassistant/amd64-supervisor", None),
|
||||
("homeassistant/amd64-supervisor:1.2.3", None),
|
||||
# Registry with dot
|
||||
("ghcr.io/homeassistant/amd64-supervisor", "ghcr.io"),
|
||||
("ghcr.io/homeassistant/amd64-supervisor:latest", "ghcr.io"),
|
||||
("myregistry.com/nginx", "myregistry.com"),
|
||||
("registry.example.com/org/image:v1", "registry.example.com"),
|
||||
("127.0.0.1/myimage", "127.0.0.1"),
|
||||
# Registry with port
|
||||
("myregistry:5000/myimage", "myregistry:5000"),
|
||||
("localhost:5000/myimage", "localhost:5000"),
|
||||
("registry.io:5000/org/app:v1", "registry.io:5000"),
|
||||
# localhost special case
|
||||
("localhost/myimage", "localhost"),
|
||||
("localhost/myimage:tag", "localhost"),
|
||||
# IPv6
|
||||
("[::1]:5000/myimage", "[::1]:5000"),
|
||||
("[2001:db8::1]:5000/myimage:tag", "[2001:db8::1]:5000"),
|
||||
],
|
||||
)
|
||||
def test_get_registry_from_image(image_ref: str, expected_registry: str | None):
|
||||
"""Test get_registry_from_image extracts registry from image reference.
|
||||
|
||||
Based on Docker's reference implementation:
|
||||
vendor/github.com/distribution/reference/normalize.go
|
||||
"""
|
||||
assert get_registry_from_image(image_ref) == expected_registry
|
||||
|
||||
|
||||
def test_no_credentials(coresys: CoreSys, test_docker_interface: DockerInterface):
|
||||
@@ -47,3 +87,36 @@ def test_matching_credentials(coresys: CoreSys, test_docker_interface: DockerInt
|
||||
)
|
||||
assert credentials["username"] == "Spongebob Squarepants"
|
||||
assert "registry" not in credentials
|
||||
|
||||
|
||||
def test_legacy_docker_hub_credentials(
|
||||
coresys: CoreSys, test_docker_interface: DockerInterface
|
||||
):
|
||||
"""Test legacy hub.docker.com credentials are used for Docker Hub images."""
|
||||
coresys.docker.config._data["registries"] = {
|
||||
DOCKER_HUB_LEGACY: {"username": "LegacyUser", "password": "Password1!"},
|
||||
}
|
||||
|
||||
credentials = test_docker_interface._get_credentials(
|
||||
"homeassistant/amd64-supervisor"
|
||||
)
|
||||
assert credentials["username"] == "LegacyUser"
|
||||
# No registry should be included for Docker Hub
|
||||
assert "registry" not in credentials
|
||||
|
||||
|
||||
def test_docker_hub_preferred_over_legacy(
|
||||
coresys: CoreSys, test_docker_interface: DockerInterface
|
||||
):
|
||||
"""Test docker.io is preferred over legacy hub.docker.com when both exist."""
|
||||
coresys.docker.config._data["registries"] = {
|
||||
DOCKER_HUB: {"username": "NewUser", "password": "Password1!"},
|
||||
DOCKER_HUB_LEGACY: {"username": "LegacyUser", "password": "Password2!"},
|
||||
}
|
||||
|
||||
credentials = test_docker_interface._get_credentials(
|
||||
"homeassistant/amd64-supervisor"
|
||||
)
|
||||
# docker.io should be preferred
|
||||
assert credentials["username"] == "NewUser"
|
||||
assert "registry" not in credentials
|
||||
|
||||
@@ -54,7 +54,7 @@ async def test_docker_image_platform(
|
||||
coresys.docker.images.inspect.return_value = {"Id": "test:1.2.3"}
|
||||
await test_docker_interface.install(AwesomeVersion("1.2.3"), "test", arch=cpu_arch)
|
||||
coresys.docker.images.pull.assert_called_once_with(
|
||||
"test", tag="1.2.3", platform=platform, auth=None, stream=True
|
||||
"test", tag="1.2.3", platform=platform, auth=None, stream=True, timeout=None
|
||||
)
|
||||
coresys.docker.images.inspect.assert_called_once_with("test:1.2.3")
|
||||
|
||||
@@ -71,7 +71,12 @@ async def test_docker_image_default_platform(
|
||||
):
|
||||
await test_docker_interface.install(AwesomeVersion("1.2.3"), "test")
|
||||
coresys.docker.images.pull.assert_called_once_with(
|
||||
"test", tag="1.2.3", platform="linux/386", auth=None, stream=True
|
||||
"test",
|
||||
tag="1.2.3",
|
||||
platform="linux/386",
|
||||
auth=None,
|
||||
stream=True,
|
||||
timeout=None,
|
||||
)
|
||||
|
||||
coresys.docker.images.inspect.assert_called_once_with("test:1.2.3")
|
||||
@@ -111,7 +116,12 @@ async def test_private_registry_credentials_passed_to_pull(
|
||||
expected_auth["registry"] = registry_key
|
||||
|
||||
coresys.docker.images.pull.assert_called_once_with(
|
||||
image, tag="1.2.3", platform="linux/amd64", auth=expected_auth, stream=True
|
||||
image,
|
||||
tag="1.2.3",
|
||||
platform="linux/amd64",
|
||||
auth=expected_auth,
|
||||
stream=True,
|
||||
timeout=None,
|
||||
)
|
||||
|
||||
|
||||
@@ -360,7 +370,12 @@ async def test_install_fires_progress_events(
|
||||
):
|
||||
await test_docker_interface.install(AwesomeVersion("1.2.3"), "test")
|
||||
coresys.docker.images.pull.assert_called_once_with(
|
||||
"test", tag="1.2.3", platform="linux/386", auth=None, stream=True
|
||||
"test",
|
||||
tag="1.2.3",
|
||||
platform="linux/386",
|
||||
auth=None,
|
||||
stream=True,
|
||||
timeout=None,
|
||||
)
|
||||
coresys.docker.images.inspect.assert_called_once_with("test:1.2.3")
|
||||
|
||||
@@ -817,7 +832,12 @@ async def test_install_progress_containerd_snapshot(
|
||||
with patch.object(Supervisor, "arch", PropertyMock(return_value="i386")):
|
||||
await test_docker_interface.mock_install()
|
||||
coresys.docker.images.pull.assert_called_once_with(
|
||||
"test", tag="1.2.3", platform="linux/386", auth=None, stream=True
|
||||
"test",
|
||||
tag="1.2.3",
|
||||
platform="linux/386",
|
||||
auth=None,
|
||||
stream=True,
|
||||
timeout=None,
|
||||
)
|
||||
coresys.docker.images.inspect.assert_called_once_with("test:1.2.3")
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from docker.errors import APIError, DockerException, NotFound
|
||||
import pytest
|
||||
@@ -412,9 +412,9 @@ async def test_repair_failures(coresys: CoreSys, caplog: pytest.LogCaptureFixtur
|
||||
async def test_import_image(coresys: CoreSys, tmp_path: Path, log_starter: str):
|
||||
"""Test importing an image into docker."""
|
||||
(test_tar := tmp_path / "test.tar").touch()
|
||||
coresys.docker.images.import_image.return_value = [
|
||||
{"stream": f"{log_starter}: imported"}
|
||||
]
|
||||
coresys.docker.images.import_image = AsyncMock(
|
||||
return_value=[{"stream": f"{log_starter}: imported"}]
|
||||
)
|
||||
coresys.docker.images.inspect.return_value = {"Id": "imported"}
|
||||
|
||||
image = await coresys.docker.import_image(test_tar)
|
||||
@@ -426,9 +426,9 @@ async def test_import_image(coresys: CoreSys, tmp_path: Path, log_starter: str):
|
||||
async def test_import_image_error(coresys: CoreSys, tmp_path: Path):
|
||||
"""Test failure importing an image into docker."""
|
||||
(test_tar := tmp_path / "test.tar").touch()
|
||||
coresys.docker.images.import_image.return_value = [
|
||||
{"errorDetail": {"message": "fail"}}
|
||||
]
|
||||
coresys.docker.images.import_image = AsyncMock(
|
||||
return_value=[{"errorDetail": {"message": "fail"}}]
|
||||
)
|
||||
|
||||
with pytest.raises(DockerError, match="Can't import image from tar: fail"):
|
||||
await coresys.docker.import_image(test_tar)
|
||||
@@ -441,10 +441,12 @@ async def test_import_multiple_images_in_tar(
|
||||
):
|
||||
"""Test importing an image into docker."""
|
||||
(test_tar := tmp_path / "test.tar").touch()
|
||||
coresys.docker.images.import_image.return_value = [
|
||||
{"stream": "Loaded image: imported-1"},
|
||||
{"stream": "Loaded image: imported-2"},
|
||||
]
|
||||
coresys.docker.images.import_image = AsyncMock(
|
||||
return_value=[
|
||||
{"stream": "Loaded image: imported-1"},
|
||||
{"stream": "Loaded image: imported-2"},
|
||||
]
|
||||
)
|
||||
|
||||
assert await coresys.docker.import_image(test_tar) is None
|
||||
|
||||
|
||||
@@ -200,6 +200,8 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
||||
"type": "HassioError",
|
||||
"message": "Unknown error, see Supervisor logs (check with 'ha supervisor logs')",
|
||||
"stage": "test",
|
||||
"error_key": None,
|
||||
"extra_fields": None,
|
||||
}
|
||||
],
|
||||
"created": ANY,
|
||||
@@ -228,6 +230,8 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
||||
"type": "HassioError",
|
||||
"message": "Unknown error, see Supervisor logs (check with 'ha supervisor logs')",
|
||||
"stage": "test",
|
||||
"error_key": None,
|
||||
"extra_fields": None,
|
||||
}
|
||||
],
|
||||
"created": ANY,
|
||||
|
||||
0
wheels/.gitkeep
Normal file
0
wheels/.gitkeep
Normal file
Reference in New Issue
Block a user