mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-01 12:20:22 +00:00
Compare commits
78 Commits
2024.04.2
...
need-updat
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b7c53d9e40 | ||
![]() |
b684c8673e | ||
![]() |
547f42439d | ||
![]() |
c51ceb000f | ||
![]() |
4cbede1bc8 | ||
![]() |
5eac8c7780 | ||
![]() |
ab78d87304 | ||
![]() |
09166e3867 | ||
![]() |
8a5c813cdd | ||
![]() |
4200622f43 | ||
![]() |
c4452a85b4 | ||
![]() |
e57de4a3c1 | ||
![]() |
9fd2c91c55 | ||
![]() |
fbd70013a8 | ||
![]() |
8d18f3e66e | ||
![]() |
5f5754e860 | ||
![]() |
974c882b9a | ||
![]() |
a9ea90096b | ||
![]() |
45c72c426e | ||
![]() |
4e5b75fe19 | ||
![]() |
3cd617e68f | ||
![]() |
ddff02f73b | ||
![]() |
b59347b3d3 | ||
![]() |
1dc769076f | ||
![]() |
f150a19c0f | ||
![]() |
c4bc1e3824 | ||
![]() |
eca99b69db | ||
![]() |
043af72847 | ||
![]() |
05c7b6c639 | ||
![]() |
3385c99f1f | ||
![]() |
895117f857 | ||
![]() |
9e3135e2de | ||
![]() |
9a1c517437 | ||
![]() |
c0c0c4b7ad | ||
![]() |
be6e39fed0 | ||
![]() |
b384921ee0 | ||
![]() |
0d05a6eae3 | ||
![]() |
430aef68c6 | ||
![]() |
eac6070e12 | ||
![]() |
6693b7c2e6 | ||
![]() |
7898c3e433 | ||
![]() |
420ecd064e | ||
![]() |
4289be53f8 | ||
![]() |
29b41b564e | ||
![]() |
998eb69583 | ||
![]() |
8ebc097ff4 | ||
![]() |
c05984ca49 | ||
![]() |
1a700c3013 | ||
![]() |
a9c92cdec8 | ||
![]() |
da8b938d5b | ||
![]() |
71e91328f1 | ||
![]() |
6356be4c52 | ||
![]() |
e26e5440b6 | ||
![]() |
fecfbd1a3e | ||
![]() |
c00d6dfc76 | ||
![]() |
85be66d90d | ||
![]() |
1ac506b391 | ||
![]() |
f7738b77de | ||
![]() |
824037bb7d | ||
![]() |
221292ad14 | ||
![]() |
16f8c75e9f | ||
![]() |
90a37079f1 | ||
![]() |
798092af5e | ||
![]() |
2a622a929d | ||
![]() |
ca8eeaa68c | ||
![]() |
d1b8ac1249 | ||
![]() |
3f629c4d60 | ||
![]() |
3fa910e68b | ||
![]() |
e3cf2989c9 | ||
![]() |
136b2f402d | ||
![]() |
8d18d2d9c6 | ||
![]() |
f18213361a | ||
![]() |
18d9d32bca | ||
![]() |
1246e429c9 | ||
![]() |
77bc46bc37 | ||
![]() |
ce16963c94 | ||
![]() |
a70e8cfe58 | ||
![]() |
ba922a1aaa |
10
.github/workflows/builder.yml
vendored
10
.github/workflows/builder.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -149,7 +149,7 @@ jobs:
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v3.1.0
|
||||
uses: docker/login-action@v3.2.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -178,7 +178,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
|
||||
- name: Initialize git
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
@@ -203,7 +203,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
|
||||
- name: Build the Supervisor
|
||||
if: needs.init.outputs.publish != 'true'
|
||||
|
24
.github/workflows/ci.yaml
vendored
24
.github/workflows/ci.yaml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
name: Prepare Python dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -110,7 +110,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -153,7 +153,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -168,7 +168,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -212,7 +212,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -256,7 +256,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -288,7 +288,7 @@ jobs:
|
||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -335,7 +335,7 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
@@ -346,7 +346,7 @@ jobs:
|
||||
needs: ["pytest", "prepare"]
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
@@ -365,7 +365,7 @@ jobs:
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -373,4 +373,4 @@ jobs:
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v4.3.0
|
||||
uses: codecov/codecov-action@v4.4.1
|
||||
|
2
.github/workflows/release-drafter.yml
vendored
2
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
2
.github/workflows/sentry.yaml
vendored
2
.github/workflows/sentry.yaml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
|
@@ -8,22 +8,23 @@ brotli==1.1.0
|
||||
ciso8601==2.3.1
|
||||
colorlog==6.8.2
|
||||
cpe==1.2.1
|
||||
cryptography==42.0.5
|
||||
cryptography==42.0.8
|
||||
debugpy==1.8.1
|
||||
deepmerge==1.1.1
|
||||
dirhash==0.3.0
|
||||
docker==7.0.0
|
||||
dirhash==0.4.0
|
||||
docker==7.1.0
|
||||
faust-cchardet==2.1.19
|
||||
gitpython==3.1.43
|
||||
jinja2==3.1.3
|
||||
orjson==3.10.1
|
||||
jinja2==3.1.4
|
||||
orjson==3.9.15
|
||||
pulsectl==24.4.0
|
||||
pyudev==0.24.1
|
||||
pyudev==0.24.3
|
||||
PyYAML==6.0.1
|
||||
requests==2.32.3
|
||||
securetar==2024.2.1
|
||||
sentry-sdk==1.45.0
|
||||
setuptools==69.5.1
|
||||
sentry-sdk==2.5.1
|
||||
setuptools==70.0.0
|
||||
voluptuous==0.14.2
|
||||
dbus-fast==2.21.1
|
||||
typing_extensions==4.11.0
|
||||
dbus-fast==2.21.3
|
||||
typing_extensions==4.12.2
|
||||
zlib-fast==0.2.0
|
||||
|
@@ -1,12 +1,12 @@
|
||||
coverage==7.4.4
|
||||
pre-commit==3.7.0
|
||||
pylint==3.1.0
|
||||
coverage==7.5.3
|
||||
pre-commit==3.7.1
|
||||
pylint==3.2.3
|
||||
pytest-aiohttp==1.0.5
|
||||
pytest-asyncio==0.23.5
|
||||
pytest-asyncio==0.23.6
|
||||
pytest-cov==5.0.0
|
||||
pytest-timeout==2.3.1
|
||||
pytest==8.1.1
|
||||
ruff==0.3.7
|
||||
pytest==8.2.2
|
||||
ruff==0.4.8
|
||||
time-machine==2.14.1
|
||||
typing_extensions==4.11.0
|
||||
typing_extensions==4.12.2
|
||||
urllib3==2.2.1
|
||||
|
@@ -285,9 +285,13 @@ class Addon(AddonModel):
|
||||
@property
|
||||
def need_update(self) -> bool:
|
||||
"""Return True if an update is available."""
|
||||
if self.is_detached:
|
||||
if self.is_detached or self.version == self.latest_version:
|
||||
return False
|
||||
return self.version != self.latest_version
|
||||
|
||||
with suppress(AddonsNotSupportedError):
|
||||
self._validate_availability(self.data_store)
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def dns(self) -> list[str]:
|
||||
@@ -1343,11 +1347,11 @@ class Addon(AddonModel):
|
||||
)
|
||||
raise AddonsError() from err
|
||||
|
||||
finally:
|
||||
# Is add-on loaded
|
||||
if not self.loaded:
|
||||
await self.load()
|
||||
|
||||
finally:
|
||||
# Run add-on
|
||||
if data[ATTR_STATE] == AddonState.STARTED:
|
||||
wait_for_start = await self.start()
|
||||
|
@@ -9,7 +9,7 @@ from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispa
|
||||
|
||||
from ..const import AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import APIAddonNotInstalled
|
||||
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
||||
from ..utils.sentry import capture_exception
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
@@ -401,7 +401,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
async def get_supervisor_logs(*args, **kwargs):
|
||||
try:
|
||||
return await self._api_host.advanced_logs(
|
||||
return await self._api_host.advanced_logs_handler(
|
||||
*args, identifier="hassio_supervisor", **kwargs
|
||||
)
|
||||
except Exception as err: # pylint: disable=broad-exception-caught
|
||||
@@ -410,7 +410,10 @@ class RestAPI(CoreSysAttributes):
|
||||
_LOGGER.exception(
|
||||
"Failed to get supervisor logs using advanced_logs API"
|
||||
)
|
||||
capture_exception(err)
|
||||
if not isinstance(err, HostNotSupportedError):
|
||||
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||
# is known and reported to the user using the resolution center.
|
||||
capture_exception(err)
|
||||
return await api_supervisor.logs(*args, **kwargs)
|
||||
|
||||
self.webapp.add_routes(
|
||||
@@ -694,7 +697,6 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/store", api_store.store_info),
|
||||
web.get("/store/addons", api_store.addons_list),
|
||||
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
||||
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
||||
web.get(
|
||||
@@ -716,6 +718,8 @@ class RestAPI(CoreSysAttributes):
|
||||
"/store/addons/{addon}/update/{version}",
|
||||
api_store.addons_addon_update,
|
||||
),
|
||||
# Must be below others since it has a wildcard in resource path
|
||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||
web.post("/store/reload", api_store.reload),
|
||||
web.get("/store/repositories", api_store.repositories_list),
|
||||
web.get(
|
||||
|
@@ -16,7 +16,7 @@ from ..const import (
|
||||
ATTR_SYSTEM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..dbus.udisks2 import UDisks2
|
||||
from ..dbus.udisks2 import UDisks2Manager
|
||||
from ..dbus.udisks2.block import UDisks2Block
|
||||
from ..dbus.udisks2.drive import UDisks2Drive
|
||||
from ..hardware.data import Device
|
||||
@@ -72,7 +72,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
|
||||
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
|
||||
"""Return a dict with information of a disk to be used in the API."""
|
||||
return {
|
||||
ATTR_VENDOR: drive.vendor,
|
||||
|
@@ -182,9 +182,13 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
)
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
async def rebuild(self, request: web.Request) -> None:
|
||||
"""Rebuild Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
||||
body = await api_validate(SCHEMA_RESTART, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def check(self, request: web.Request) -> None:
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Init file for Supervisor host RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
@@ -163,8 +164,7 @@ class APIHost(CoreSysAttributes):
|
||||
raise APIError() from err
|
||||
return possible_offset
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||
async def advanced_logs(
|
||||
async def advanced_logs_handler(
|
||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs."""
|
||||
@@ -218,3 +218,10 @@ class APIHost(CoreSysAttributes):
|
||||
"Connection reset when trying to fetch data from systemd-journald."
|
||||
) from ex
|
||||
return response
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||
async def advanced_logs(
|
||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs. Wrapped as standard API handler."""
|
||||
return await self.advanced_logs_handler(request, identifier, follow)
|
||||
|
@@ -249,9 +249,14 @@ class APIStore(CoreSysAttributes):
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def addons_addon_changelog(self, request: web.Request) -> str:
|
||||
"""Return changelog from add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
# Frontend can't handle error response here, need to return 200 and error as text for now
|
||||
try:
|
||||
addon = self._extract_addon(request)
|
||||
except APIError as err:
|
||||
return str(err)
|
||||
|
||||
if not addon.with_changelog:
|
||||
raise APIError(f"No changelog found for add-on {addon.slug}!")
|
||||
return f"No changelog found for add-on {addon.slug}!"
|
||||
|
||||
with addon.path_changelog.open("r") as changelog:
|
||||
return changelog.read()
|
||||
@@ -259,9 +264,14 @@ class APIStore(CoreSysAttributes):
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def addons_addon_documentation(self, request: web.Request) -> str:
|
||||
"""Return documentation from add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
# Frontend can't handle error response here, need to return 200 and error as text for now
|
||||
try:
|
||||
addon = self._extract_addon(request)
|
||||
except APIError as err:
|
||||
return str(err)
|
||||
|
||||
if not addon.with_documentation:
|
||||
raise APIError(f"No documentation found for add-on {addon.slug}!")
|
||||
return f"No documentation found for add-on {addon.slug}!"
|
||||
|
||||
with addon.path_documentation.open("r") as documentation:
|
||||
return documentation.read()
|
||||
|
@@ -345,9 +345,6 @@ class Core(CoreSysAttributes):
|
||||
if self.state == CoreState.RUNNING:
|
||||
self.state = CoreState.SHUTDOWN
|
||||
|
||||
# Stop docker monitoring
|
||||
await self.sys_docker.unload()
|
||||
|
||||
# Shutdown Application Add-ons, using Home Assistant API
|
||||
await self.sys_addons.shutdown(AddonStartup.APPLICATION)
|
||||
|
||||
|
@@ -61,7 +61,8 @@ DBUS_OBJECT_RESOLVED = "/org/freedesktop/resolve1"
|
||||
DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings"
|
||||
DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1"
|
||||
DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1"
|
||||
DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2/Manager"
|
||||
DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2"
|
||||
DBUS_OBJECT_UDISKS2_MANAGER = "/org/freedesktop/UDisks2/Manager"
|
||||
|
||||
DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint"
|
||||
DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection"
|
||||
|
@@ -17,7 +17,7 @@ from .rauc import Rauc
|
||||
from .resolved import Resolved
|
||||
from .systemd import Systemd
|
||||
from .timedate import TimeDate
|
||||
from .udisks2 import UDisks2
|
||||
from .udisks2 import UDisks2Manager
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -37,7 +37,7 @@ class DBusManager(CoreSysAttributes):
|
||||
self._agent: OSAgent = OSAgent()
|
||||
self._timedate: TimeDate = TimeDate()
|
||||
self._resolved: Resolved = Resolved()
|
||||
self._udisks2: UDisks2 = UDisks2()
|
||||
self._udisks2: UDisks2Manager = UDisks2Manager()
|
||||
self._bus: MessageBus | None = None
|
||||
|
||||
@property
|
||||
@@ -81,7 +81,7 @@ class DBusManager(CoreSysAttributes):
|
||||
return self._resolved
|
||||
|
||||
@property
|
||||
def udisks2(self) -> UDisks2:
|
||||
def udisks2(self) -> UDisks2Manager:
|
||||
"""Return the udisks2 interface."""
|
||||
return self._udisks2
|
||||
|
||||
|
@@ -15,12 +15,15 @@ from ...exceptions import (
|
||||
from ..const import (
|
||||
DBUS_ATTR_SUPPORTED_FILESYSTEMS,
|
||||
DBUS_ATTR_VERSION,
|
||||
DBUS_IFACE_BLOCK,
|
||||
DBUS_IFACE_DRIVE,
|
||||
DBUS_IFACE_UDISKS2_MANAGER,
|
||||
DBUS_NAME_UDISKS2,
|
||||
DBUS_OBJECT_BASE,
|
||||
DBUS_OBJECT_UDISKS2,
|
||||
DBUS_OBJECT_UDISKS2_MANAGER,
|
||||
)
|
||||
from ..interface import DBusInterfaceProxy, dbus_property
|
||||
from ..interface import DBusInterface, DBusInterfaceProxy, dbus_property
|
||||
from ..utils import dbus_connected
|
||||
from .block import UDisks2Block
|
||||
from .const import UDISKS2_DEFAULT_OPTIONS
|
||||
@@ -30,7 +33,15 @@ from .drive import UDisks2Drive
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UDisks2(DBusInterfaceProxy):
|
||||
class UDisks2(DBusInterface):
|
||||
"""Handle D-Bus interface for UDisks2 root object."""
|
||||
|
||||
name: str = DBUS_NAME_UDISKS2
|
||||
bus_name: str = DBUS_NAME_UDISKS2
|
||||
object_path: str = DBUS_OBJECT_UDISKS2
|
||||
|
||||
|
||||
class UDisks2Manager(DBusInterfaceProxy):
|
||||
"""Handle D-Bus interface for UDisks2.
|
||||
|
||||
http://storaged.org/doc/udisks2-api/latest/
|
||||
@@ -38,22 +49,36 @@ class UDisks2(DBusInterfaceProxy):
|
||||
|
||||
name: str = DBUS_NAME_UDISKS2
|
||||
bus_name: str = DBUS_NAME_UDISKS2
|
||||
object_path: str = DBUS_OBJECT_UDISKS2
|
||||
object_path: str = DBUS_OBJECT_UDISKS2_MANAGER
|
||||
properties_interface: str = DBUS_IFACE_UDISKS2_MANAGER
|
||||
|
||||
_block_devices: dict[str, UDisks2Block] = {}
|
||||
_drives: dict[str, UDisks2Drive] = {}
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize object."""
|
||||
super().__init__()
|
||||
self.udisks2_object_manager = UDisks2()
|
||||
|
||||
async def connect(self, bus: MessageBus):
|
||||
"""Connect to D-Bus."""
|
||||
try:
|
||||
await super().connect(bus)
|
||||
await self.udisks2_object_manager.connect(bus)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to udisks2")
|
||||
except (DBusServiceUnkownError, DBusInterfaceError):
|
||||
_LOGGER.warning(
|
||||
"No udisks2 support on the host. Host control has been disabled."
|
||||
)
|
||||
else:
|
||||
# Register for signals on devices added/removed
|
||||
self.udisks2_object_manager.dbus.object_manager.on_interfaces_added(
|
||||
self._interfaces_added
|
||||
)
|
||||
self.udisks2_object_manager.dbus.object_manager.on_interfaces_removed(
|
||||
self._interfaces_removed
|
||||
)
|
||||
|
||||
@dbus_connected
|
||||
async def update(self, changed: dict[str, Any] | None = None) -> None:
|
||||
@@ -161,11 +186,47 @@ class UDisks2(DBusInterfaceProxy):
|
||||
]
|
||||
)
|
||||
|
||||
async def _interfaces_added(
|
||||
self, object_path: str, properties: dict[str, dict[str, Any]]
|
||||
) -> None:
|
||||
"""Interfaces added to a UDisks2 object."""
|
||||
if object_path in self._block_devices:
|
||||
await self._block_devices[object_path].update()
|
||||
return
|
||||
if object_path in self._drives:
|
||||
await self._drives[object_path].update()
|
||||
return
|
||||
|
||||
if DBUS_IFACE_BLOCK in properties:
|
||||
self._block_devices[object_path] = await UDisks2Block.new(
|
||||
object_path, self.dbus.bus
|
||||
)
|
||||
return
|
||||
|
||||
if DBUS_IFACE_DRIVE in properties:
|
||||
self._drives[object_path] = await UDisks2Drive.new(
|
||||
object_path, self.dbus.bus
|
||||
)
|
||||
|
||||
async def _interfaces_removed(
|
||||
self, object_path: str, interfaces: list[str]
|
||||
) -> None:
|
||||
"""Interfaces removed from a UDisks2 object."""
|
||||
if object_path in self._block_devices and DBUS_IFACE_BLOCK in interfaces:
|
||||
self._block_devices[object_path].shutdown()
|
||||
del self._block_devices[object_path]
|
||||
return
|
||||
|
||||
if object_path in self._drives and DBUS_IFACE_DRIVE in interfaces:
|
||||
self._drives[object_path].shutdown()
|
||||
del self._drives[object_path]
|
||||
|
||||
def shutdown(self) -> None:
|
||||
"""Shutdown the object and disconnect from D-Bus.
|
||||
|
||||
This method is irreversible.
|
||||
"""
|
||||
self.udisks2_object_manager.shutdown()
|
||||
for block_device in self.block_devices:
|
||||
block_device.shutdown()
|
||||
for drive in self.drives:
|
||||
|
@@ -103,7 +103,13 @@ class HardwareManager(CoreSysAttributes):
|
||||
# Exctract all devices
|
||||
for device in self._udev.list_devices():
|
||||
# Skip devices without mapping
|
||||
if not device.device_node or self.helper.hide_virtual_device(device):
|
||||
try:
|
||||
if not device.device_node or self.helper.hide_virtual_device(device):
|
||||
continue
|
||||
except UnicodeDecodeError as err:
|
||||
# Some udev properties have an unkown/different encoding. This is a general
|
||||
# problem with pyudev, see https://github.com/pyudev/pyudev/pull/230
|
||||
_LOGGER.warning("Ignoring udev device due to error: %s", err)
|
||||
continue
|
||||
self._devices[device.sys_name] = Device.import_udev(device)
|
||||
|
||||
|
@@ -367,6 +367,7 @@ class HomeAssistantCore(JobGroup):
|
||||
"""Restart Home Assistant Docker."""
|
||||
# Create safe mode marker file if necessary
|
||||
if safe_mode:
|
||||
_LOGGER.debug("Creating safe mode marker file.")
|
||||
await self.sys_run_in_executor(
|
||||
(self.sys_config.path_homeassistant / SAFE_MODE_FILENAME).touch
|
||||
)
|
||||
@@ -383,8 +384,15 @@ class HomeAssistantCore(JobGroup):
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=HomeAssistantJobError,
|
||||
)
|
||||
async def rebuild(self) -> None:
|
||||
async def rebuild(self, *, safe_mode: bool = False) -> None:
|
||||
"""Rebuild Home Assistant Docker container."""
|
||||
# Create safe mode marker file if necessary
|
||||
if safe_mode:
|
||||
_LOGGER.debug("Creating safe mode marker file.")
|
||||
await self.sys_run_in_executor(
|
||||
(self.sys_config.path_homeassistant / SAFE_MODE_FILENAME).touch
|
||||
)
|
||||
|
||||
with suppress(DockerError):
|
||||
await self.instance.stop()
|
||||
await self.start()
|
||||
|
@@ -48,7 +48,7 @@ from ..utils import remove_folder
|
||||
from ..utils.common import FileConfiguration
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from .api import HomeAssistantAPI
|
||||
from .const import ATTR_OVERRIDE_IMAGE, WSType
|
||||
from .const import ATTR_OVERRIDE_IMAGE, LANDINGPAGE, WSType
|
||||
from .core import HomeAssistantCore
|
||||
from .secrets import HomeAssistantSecrets
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
@@ -328,6 +328,7 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes):
|
||||
if (
|
||||
not self.sys_hardware.policy.is_match_cgroup(PolicyGroup.UART, device)
|
||||
or not self.version
|
||||
or self.version == LANDINGPAGE
|
||||
or self.version < "2021.9.0"
|
||||
):
|
||||
return
|
||||
|
@@ -1,14 +1,16 @@
|
||||
"""Home Assistant Operating-System DataDisk."""
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Final
|
||||
from typing import Any, Final
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..dbus.const import DBUS_ATTR_ID_LABEL, DBUS_IFACE_BLOCK
|
||||
from ..dbus.udisks2.block import UDisks2Block
|
||||
from ..dbus.udisks2.const import FormatType
|
||||
from ..dbus.udisks2.drive import UDisks2Drive
|
||||
@@ -22,8 +24,12 @@ from ..exceptions import (
|
||||
)
|
||||
from ..jobs.const import JobCondition, JobExecutionLimit
|
||||
from ..jobs.decorator import Job
|
||||
from ..resolution.checks.disabled_data_disk import CheckDisabledDataDisk
|
||||
from ..resolution.checks.multiple_data_disks import CheckMultipleDataDisks
|
||||
from ..utils.sentry import capture_exception
|
||||
from .const import (
|
||||
FILESYSTEM_LABEL_DATA_DISK,
|
||||
FILESYSTEM_LABEL_DISABLED_DATA_DISK,
|
||||
PARTITION_NAME_EXTERNAL_DATA_DISK,
|
||||
PARTITION_NAME_OLD_EXTERNAL_DATA_DISK,
|
||||
)
|
||||
@@ -157,6 +163,16 @@ class DataDisk(CoreSysAttributes):
|
||||
|
||||
return available
|
||||
|
||||
@property
|
||||
def check_multiple_data_disks(self) -> CheckMultipleDataDisks:
|
||||
"""Resolution center check for multiple data disks."""
|
||||
return self.sys_resolution.check.get("multiple_data_disks")
|
||||
|
||||
@property
|
||||
def check_disabled_data_disk(self) -> CheckDisabledDataDisk:
|
||||
"""Resolution center check for disabled data disk."""
|
||||
return self.sys_resolution.check.get("disabled_data_disk")
|
||||
|
||||
def _get_block_devices_for_drive(self, drive: UDisks2Drive) -> list[UDisks2Block]:
|
||||
"""Get block devices for a drive."""
|
||||
return [
|
||||
@@ -172,6 +188,14 @@ class DataDisk(CoreSysAttributes):
|
||||
if self.sys_dbus.agent.version >= AwesomeVersion("1.2.0"):
|
||||
await self.sys_dbus.agent.datadisk.reload_device()
|
||||
|
||||
# Register for signals on devices added/removed
|
||||
self.sys_dbus.udisks2.udisks2_object_manager.dbus.object_manager.on_interfaces_added(
|
||||
self._udisks2_interface_added
|
||||
)
|
||||
self.sys_dbus.udisks2.udisks2_object_manager.dbus.object_manager.on_interfaces_removed(
|
||||
self._udisks2_interface_removed
|
||||
)
|
||||
|
||||
@Job(
|
||||
name="data_disk_migrate",
|
||||
conditions=[JobCondition.HAOS, JobCondition.OS_AGENT, JobCondition.HEALTHY],
|
||||
@@ -348,3 +372,54 @@ class DataDisk(CoreSysAttributes):
|
||||
"New data partition prepared on device %s", partition_block.device
|
||||
)
|
||||
return partition_block
|
||||
|
||||
async def _udisks2_interface_added(
|
||||
self, _: str, properties: dict[str, dict[str, Any]]
|
||||
):
|
||||
"""If a data disk is added, trigger the resolution check."""
|
||||
if (
|
||||
DBUS_IFACE_BLOCK not in properties
|
||||
or DBUS_ATTR_ID_LABEL not in properties[DBUS_IFACE_BLOCK]
|
||||
):
|
||||
return
|
||||
|
||||
if (
|
||||
properties[DBUS_IFACE_BLOCK][DBUS_ATTR_ID_LABEL]
|
||||
== FILESYSTEM_LABEL_DATA_DISK
|
||||
):
|
||||
check = self.check_multiple_data_disks
|
||||
elif (
|
||||
properties[DBUS_IFACE_BLOCK][DBUS_ATTR_ID_LABEL]
|
||||
== FILESYSTEM_LABEL_DISABLED_DATA_DISK
|
||||
):
|
||||
check = self.check_disabled_data_disk
|
||||
else:
|
||||
return
|
||||
|
||||
# Delay briefly before running check to allow data updates to occur
|
||||
await asyncio.sleep(0.1)
|
||||
await check()
|
||||
|
||||
async def _udisks2_interface_removed(self, _: str, interfaces: list[str]):
|
||||
"""If affected by a data disk issue, re-check on removal of a block device."""
|
||||
if DBUS_IFACE_BLOCK not in interfaces:
|
||||
return
|
||||
|
||||
if any(
|
||||
issue.type == self.check_multiple_data_disks.issue
|
||||
and issue.context == self.check_multiple_data_disks.context
|
||||
for issue in self.sys_resolution.issues
|
||||
):
|
||||
check = self.check_multiple_data_disks
|
||||
elif any(
|
||||
issue.type == self.check_disabled_data_disk.issue
|
||||
and issue.context == self.check_disabled_data_disk.context
|
||||
for issue in self.sys_resolution.issues
|
||||
):
|
||||
check = self.check_disabled_data_disk
|
||||
else:
|
||||
return
|
||||
|
||||
# Delay briefly before running check to allow data updates to occur
|
||||
await asyncio.sleep(0.1)
|
||||
await check()
|
||||
|
49
supervisor/resolution/checks/detached_addon_missing.py
Normal file
49
supervisor/resolution/checks/detached_addon_missing.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Helpers to check for detached addons due to repo misisng."""
|
||||
|
||||
from ...const import CoreState
|
||||
from ...coresys import CoreSys
|
||||
from ..const import ContextType, IssueType
|
||||
from .base import CheckBase
|
||||
|
||||
|
||||
def setup(coresys: CoreSys) -> CheckBase:
|
||||
"""Check setup function."""
|
||||
return CheckDetachedAddonMissing(coresys)
|
||||
|
||||
|
||||
class CheckDetachedAddonMissing(CheckBase):
|
||||
"""CheckDetachedAddonMissing class for check."""
|
||||
|
||||
async def run_check(self) -> None:
|
||||
"""Run check if not affected by issue."""
|
||||
for addon in self.sys_addons.installed:
|
||||
if (
|
||||
addon.is_detached
|
||||
and addon.repository not in self.sys_store.repositories
|
||||
):
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.DETACHED_ADDON_MISSING,
|
||||
ContextType.ADDON,
|
||||
reference=addon.slug,
|
||||
)
|
||||
|
||||
async def approve_check(self, reference: str | None = None) -> bool:
|
||||
"""Approve check if it is affected by issue."""
|
||||
return (
|
||||
addon := self.sys_addons.get(reference, local_only=True)
|
||||
) and addon.is_detached
|
||||
|
||||
@property
|
||||
def issue(self) -> IssueType:
|
||||
"""Return a IssueType enum."""
|
||||
return IssueType.DETACHED_ADDON_MISSING
|
||||
|
||||
@property
|
||||
def context(self) -> ContextType:
|
||||
"""Return a ContextType enum."""
|
||||
return ContextType.ADDON
|
||||
|
||||
@property
|
||||
def states(self) -> list[CoreState]:
|
||||
"""Return a list of valid states when this check can run."""
|
||||
return [CoreState.SETUP]
|
47
supervisor/resolution/checks/detached_addon_removed.py
Normal file
47
supervisor/resolution/checks/detached_addon_removed.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Helpers to check for detached addons due to removal from repo."""
|
||||
|
||||
from ...const import CoreState
|
||||
from ...coresys import CoreSys
|
||||
from ..const import ContextType, IssueType, SuggestionType
|
||||
from .base import CheckBase
|
||||
|
||||
|
||||
def setup(coresys: CoreSys) -> CheckBase:
|
||||
"""Check setup function."""
|
||||
return CheckDetachedAddonRemoved(coresys)
|
||||
|
||||
|
||||
class CheckDetachedAddonRemoved(CheckBase):
|
||||
"""CheckDetachedAddonRemoved class for check."""
|
||||
|
||||
async def run_check(self) -> None:
|
||||
"""Run check if not affected by issue."""
|
||||
for addon in self.sys_addons.installed:
|
||||
if addon.is_detached and addon.repository in self.sys_store.repositories:
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.DETACHED_ADDON_REMOVED,
|
||||
ContextType.ADDON,
|
||||
reference=addon.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REMOVE],
|
||||
)
|
||||
|
||||
async def approve_check(self, reference: str | None = None) -> bool:
|
||||
"""Approve check if it is affected by issue."""
|
||||
return (
|
||||
addon := self.sys_addons.get(reference, local_only=True)
|
||||
) and addon.is_detached
|
||||
|
||||
@property
|
||||
def issue(self) -> IssueType:
|
||||
"""Return a IssueType enum."""
|
||||
return IssueType.DETACHED_ADDON_REMOVED
|
||||
|
||||
@property
|
||||
def context(self) -> ContextType:
|
||||
"""Return a ContextType enum."""
|
||||
return ContextType.ADDON
|
||||
|
||||
@property
|
||||
def states(self) -> list[CoreState]:
|
||||
"""Return a list of valid states when this check can run."""
|
||||
return [CoreState.SETUP]
|
@@ -73,6 +73,8 @@ class IssueType(StrEnum):
|
||||
CORRUPT_DOCKER = "corrupt_docker"
|
||||
CORRUPT_REPOSITORY = "corrupt_repository"
|
||||
CORRUPT_FILESYSTEM = "corrupt_filesystem"
|
||||
DETACHED_ADDON_MISSING = "detached_addon_missing"
|
||||
DETACHED_ADDON_REMOVED = "detached_addon_removed"
|
||||
DISABLED_DATA_DISK = "disabled_data_disk"
|
||||
DNS_LOOP = "dns_loop"
|
||||
DNS_SERVER_FAILED = "dns_server_failed"
|
||||
|
52
supervisor/resolution/fixups/addon_execute_remove.py
Normal file
52
supervisor/resolution/fixups/addon_execute_remove.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Helpers to fix addon issue by removing it."""
|
||||
import logging
|
||||
|
||||
from ...coresys import CoreSys
|
||||
from ...exceptions import AddonsError, ResolutionFixupError
|
||||
from ..const import ContextType, IssueType, SuggestionType
|
||||
from .base import FixupBase
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup(coresys: CoreSys) -> FixupBase:
|
||||
"""Check setup function."""
|
||||
return FixupAddonExecuteRemove(coresys)
|
||||
|
||||
|
||||
class FixupAddonExecuteRemove(FixupBase):
|
||||
"""Storage class for fixup."""
|
||||
|
||||
async def process_fixup(self, reference: str | None = None) -> None:
|
||||
"""Initialize the fixup class."""
|
||||
if not (addon := self.sys_addons.get(reference, local_only=True)):
|
||||
_LOGGER.info("Addon %s already removed", reference)
|
||||
return
|
||||
|
||||
# Remove addon
|
||||
_LOGGER.info("Remove addon: %s", reference)
|
||||
try:
|
||||
addon.uninstall()
|
||||
except AddonsError as err:
|
||||
_LOGGER.error("Could not remove %s due to %s", reference, err)
|
||||
raise ResolutionFixupError() from None
|
||||
|
||||
@property
|
||||
def suggestion(self) -> SuggestionType:
|
||||
"""Return a SuggestionType enum."""
|
||||
return SuggestionType.EXECUTE_REMOVE
|
||||
|
||||
@property
|
||||
def context(self) -> ContextType:
|
||||
"""Return a ContextType enum."""
|
||||
return ContextType.ADDON
|
||||
|
||||
@property
|
||||
def issues(self) -> list[IssueType]:
|
||||
"""Return a IssueType enum list."""
|
||||
return [IssueType.DETACHED_ADDON_REMOVED]
|
||||
|
||||
@property
|
||||
def auto(self) -> bool:
|
||||
"""Return if a fixup can be apply as auto fix."""
|
||||
return False
|
@@ -7,6 +7,7 @@ from ..const import ContextType, IssueType, SuggestionType
|
||||
from .base import FixupBase
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
MAX_AUTO_ATTEMPTS = 5
|
||||
|
||||
|
||||
def setup(coresys: CoreSys) -> FixupBase:
|
||||
@@ -17,6 +18,11 @@ def setup(coresys: CoreSys) -> FixupBase:
|
||||
class FixupAddonExecuteRepair(FixupBase):
|
||||
"""Storage class for fixup."""
|
||||
|
||||
def __init__(self, coresys: CoreSys) -> None:
|
||||
"""Initialize the add-on execute repair fixup class."""
|
||||
super().__init__(coresys)
|
||||
self.attempts = 0
|
||||
|
||||
async def process_fixup(self, reference: str | None = None) -> None:
|
||||
"""Pull the addons image."""
|
||||
addon = self.sys_addons.get(reference, local_only=True)
|
||||
@@ -34,6 +40,7 @@ class FixupAddonExecuteRepair(FixupBase):
|
||||
return
|
||||
|
||||
_LOGGER.info("Installing image for addon %s")
|
||||
self.attempts += 1
|
||||
await addon.instance.install(addon.version)
|
||||
|
||||
@property
|
||||
@@ -54,4 +61,4 @@ class FixupAddonExecuteRepair(FixupBase):
|
||||
@property
|
||||
def auto(self) -> bool:
|
||||
"""Return if a fixup can be apply as auto fix."""
|
||||
return True
|
||||
return self.attempts < MAX_AUTO_ATTEMPTS
|
||||
|
@@ -58,9 +58,9 @@ class FixupBase(ABC, CoreSysAttributes):
|
||||
"""Return a ContextType enum."""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def issues(self) -> list[IssueType]:
|
||||
"""Return a IssueType enum list."""
|
||||
return []
|
||||
|
||||
@property
|
||||
def auto(self) -> bool:
|
||||
|
@@ -2,7 +2,7 @@
|
||||
import logging
|
||||
|
||||
from ...coresys import CoreSys
|
||||
from ..const import ContextType, SuggestionType
|
||||
from ..const import ContextType, IssueType, SuggestionType
|
||||
from .base import FixupBase
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -21,6 +21,11 @@ class FixupSystemCreateFullBackup(FixupBase):
|
||||
_LOGGER.info("Creating a full backup")
|
||||
await self.sys_backups.do_backup_full()
|
||||
|
||||
@property
|
||||
def issues(self) -> list[IssueType]:
|
||||
"""Return a IssueType enum list."""
|
||||
return [IssueType.NO_CURRENT_BACKUP]
|
||||
|
||||
@property
|
||||
def suggestion(self) -> SuggestionType:
|
||||
"""Return a SuggestionType enum."""
|
||||
|
@@ -37,6 +37,7 @@ from .sentry import capture_exception
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
DBUS_INTERFACE_OBJECT_MANAGER: str = "org.freedesktop.DBus.ObjectManager"
|
||||
DBUS_INTERFACE_PROPERTIES: str = "org.freedesktop.DBus.Properties"
|
||||
DBUS_METHOD_GETALL: str = "org.freedesktop.DBus.Properties.GetAll"
|
||||
|
||||
@@ -196,6 +197,13 @@ class DBus:
|
||||
return None
|
||||
return DBusCallWrapper(self, DBUS_INTERFACE_PROPERTIES)
|
||||
|
||||
@property
|
||||
def object_manager(self) -> DBusCallWrapper | None:
|
||||
"""Get object manager proxy interface."""
|
||||
if DBUS_INTERFACE_OBJECT_MANAGER not in self._proxies:
|
||||
return None
|
||||
return DBusCallWrapper(self, DBUS_INTERFACE_OBJECT_MANAGER)
|
||||
|
||||
async def get_properties(self, interface: str) -> dict[str, Any]:
|
||||
"""Read all properties from interface."""
|
||||
if not self.properties:
|
||||
|
@@ -4,6 +4,8 @@ import asyncio
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from aiohttp.test_utils import TestClient
|
||||
from awesomeversion import AwesomeVersion
|
||||
import pytest
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.addons.build import AddonBuild
|
||||
@@ -285,3 +287,37 @@ async def test_api_addon_uninstall_remove_config(
|
||||
assert resp.status == 200
|
||||
assert not coresys.addons.get("local_example", local_only=True)
|
||||
assert not test_folder.exists()
|
||||
|
||||
|
||||
async def test_api_update_available_validates_version(
|
||||
api_client: TestClient,
|
||||
coresys: CoreSys,
|
||||
install_addon_example: Addon,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
tmp_supervisor_data,
|
||||
path_extern,
|
||||
):
|
||||
"""Test update available field is only true if user can update to latest version."""
|
||||
install_addon_example.data["ingress"] = False
|
||||
install_addon_example.data_store["version"] = "1.3.0"
|
||||
caplog.clear()
|
||||
|
||||
resp = await api_client.get("/addons/local_example/info")
|
||||
assert resp.status == 200
|
||||
result = await resp.json()
|
||||
assert result["data"]["version"] == "1.2.0"
|
||||
assert result["data"]["version_latest"] == "1.3.0"
|
||||
assert result["data"]["update_available"] is True
|
||||
|
||||
# If new version can't be installed due to HA version, then no update is available
|
||||
coresys.homeassistant.version = AwesomeVersion("2024.04.0")
|
||||
install_addon_example.data_store["homeassistant"] = "2024.06.0"
|
||||
|
||||
resp = await api_client.get("/addons/local_example/info")
|
||||
assert resp.status == 200
|
||||
result = await resp.json()
|
||||
assert result["data"]["version"] == "1.2.0"
|
||||
assert result["data"]["version_latest"] == "1.3.0"
|
||||
assert result["data"]["update_available"] is False
|
||||
|
||||
assert "Add-on local_example not supported" not in caplog.text
|
||||
|
@@ -4,6 +4,7 @@ from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from aiohttp.test_utils import TestClient
|
||||
from awesomeversion import AwesomeVersion
|
||||
import pytest
|
||||
|
||||
from supervisor.coresys import CoreSys
|
||||
@@ -115,3 +116,29 @@ async def test_api_restart(
|
||||
|
||||
assert container.restart.call_count == 2
|
||||
assert safe_mode_marker.exists()
|
||||
|
||||
|
||||
async def test_api_rebuild(
|
||||
api_client: TestClient,
|
||||
coresys: CoreSys,
|
||||
container: MagicMock,
|
||||
tmp_supervisor_data: Path,
|
||||
path_extern,
|
||||
):
|
||||
"""Test rebuilding homeassistant."""
|
||||
coresys.homeassistant.version = AwesomeVersion("2023.09.0")
|
||||
safe_mode_marker = tmp_supervisor_data / "homeassistant" / "safe-mode"
|
||||
|
||||
with patch.object(HomeAssistantCore, "_block_till_run"):
|
||||
await api_client.post("/homeassistant/rebuild")
|
||||
|
||||
assert container.remove.call_count == 2
|
||||
container.start.assert_called_once()
|
||||
assert not safe_mode_marker.exists()
|
||||
|
||||
with patch.object(HomeAssistantCore, "_block_till_run"):
|
||||
await api_client.post("/homeassistant/rebuild", json={"safe_mode": True})
|
||||
|
||||
assert container.remove.call_count == 4
|
||||
assert container.start.call_count == 2
|
||||
assert safe_mode_marker.exists()
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Test Store API."""
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from aiohttp.test_utils import TestClient
|
||||
@@ -8,6 +9,7 @@ import pytest
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.arch import CpuArch
|
||||
from supervisor.config import CoreConfig
|
||||
from supervisor.const import AddonState
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.docker.addon import DockerAddon
|
||||
@@ -188,3 +190,91 @@ async def test_api_store_update_healthcheck(
|
||||
assert resp.status == 200
|
||||
|
||||
await _container_events_task
|
||||
|
||||
|
||||
@pytest.mark.parametrize("resource", ["store/addons", "addons"])
|
||||
async def test_api_store_addons_no_changelog(
|
||||
api_client: TestClient, coresys: CoreSys, store_addon: AddonStore, resource: str
|
||||
):
|
||||
"""Test /store/addons/{addon}/changelog REST API.
|
||||
|
||||
Currently the frontend expects a valid body even in the error case. Make sure that is
|
||||
what the API returns.
|
||||
"""
|
||||
assert store_addon.with_changelog is False
|
||||
resp = await api_client.get(f"/{resource}/{store_addon.slug}/changelog")
|
||||
assert resp.status == 200
|
||||
result = await resp.text()
|
||||
assert result == "No changelog found for add-on test_store_addon!"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("resource", ["store/addons", "addons"])
|
||||
async def test_api_detached_addon_changelog(
|
||||
api_client: TestClient,
|
||||
coresys: CoreSys,
|
||||
install_addon_ssh: Addon,
|
||||
tmp_supervisor_data: Path,
|
||||
resource: str,
|
||||
):
|
||||
"""Test /store/addons/{addon}/changelog for an detached addon.
|
||||
|
||||
Currently the frontend expects a valid body even in the error case. Make sure that is
|
||||
what the API returns.
|
||||
"""
|
||||
(addons_dir := tmp_supervisor_data / "addons" / "local").mkdir()
|
||||
with patch.object(
|
||||
CoreConfig, "path_addons_local", new=PropertyMock(return_value=addons_dir)
|
||||
):
|
||||
await coresys.store.load()
|
||||
|
||||
assert install_addon_ssh.is_detached is True
|
||||
assert install_addon_ssh.with_changelog is False
|
||||
|
||||
resp = await api_client.get(f"/{resource}/{install_addon_ssh.slug}/changelog")
|
||||
assert resp.status == 200
|
||||
result = await resp.text()
|
||||
assert result == "Addon local_ssh with version latest does not exist in the store"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("resource", ["store/addons", "addons"])
|
||||
async def test_api_store_addons_no_documentation(
|
||||
api_client: TestClient, coresys: CoreSys, store_addon: AddonStore, resource: str
|
||||
):
|
||||
"""Test /store/addons/{addon}/documentation REST API.
|
||||
|
||||
Currently the frontend expects a valid body even in the error case. Make sure that is
|
||||
what the API returns.
|
||||
"""
|
||||
assert store_addon.with_documentation is False
|
||||
resp = await api_client.get(f"/{resource}/{store_addon.slug}/documentation")
|
||||
assert resp.status == 200
|
||||
result = await resp.text()
|
||||
assert result == "No documentation found for add-on test_store_addon!"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("resource", ["store/addons", "addons"])
|
||||
async def test_api_detached_addon_documentation(
|
||||
api_client: TestClient,
|
||||
coresys: CoreSys,
|
||||
install_addon_ssh: Addon,
|
||||
tmp_supervisor_data: Path,
|
||||
resource: str,
|
||||
):
|
||||
"""Test /store/addons/{addon}/changelog for an detached addon.
|
||||
|
||||
Currently the frontend expects a valid body even in the error case. Make sure that is
|
||||
what the API returns.
|
||||
"""
|
||||
(addons_dir := tmp_supervisor_data / "addons" / "local").mkdir()
|
||||
with patch.object(
|
||||
CoreConfig, "path_addons_local", new=PropertyMock(return_value=addons_dir)
|
||||
):
|
||||
await coresys.store.load()
|
||||
|
||||
assert install_addon_ssh.is_detached is True
|
||||
assert install_addon_ssh.with_documentation is False
|
||||
|
||||
resp = await api_client.get(f"/{resource}/{install_addon_ssh.slug}/documentation")
|
||||
assert resp.status == 200
|
||||
result = await resp.text()
|
||||
assert result == "Addon local_ssh with version latest does not exist in the store"
|
||||
|
@@ -6,7 +6,12 @@ from aiohttp.test_utils import TestClient
|
||||
import pytest
|
||||
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.exceptions import StoreGitError, StoreNotFound
|
||||
from supervisor.exceptions import (
|
||||
HassioError,
|
||||
HostNotSupportedError,
|
||||
StoreGitError,
|
||||
StoreNotFound,
|
||||
)
|
||||
from supervisor.store.repository import Repository
|
||||
|
||||
from tests.api import common_test_api_advanced_logs
|
||||
@@ -160,7 +165,7 @@ async def test_api_supervisor_fallback(
|
||||
api_client: TestClient, journald_logs: MagicMock, docker_logs: MagicMock
|
||||
):
|
||||
"""Check that supervisor logs read from container logs if reading from journald gateway fails badly."""
|
||||
journald_logs.side_effect = OSError("Something bad happened!")
|
||||
journald_logs.side_effect = HassioError("Something bad happened!")
|
||||
|
||||
with patch("supervisor.api._LOGGER.exception") as logger:
|
||||
resp = await api_client.get("/supervisor/logs")
|
||||
@@ -176,6 +181,40 @@ async def test_api_supervisor_fallback(
|
||||
b"\x1b[36m22-10-11 14:04:23 DEBUG (MainThread) [supervisor.utils.dbus] D-Bus call - org.freedesktop.DBus.Properties.call_get_all on /io/hass/os/AppArmor\x1b[0m",
|
||||
]
|
||||
|
||||
journald_logs.reset_mock()
|
||||
|
||||
# also check generic Python error
|
||||
journald_logs.side_effect = OSError("Something bad happened!")
|
||||
|
||||
with patch("supervisor.api._LOGGER.exception") as logger:
|
||||
resp = await api_client.get("/supervisor/logs")
|
||||
logger.assert_called_once_with(
|
||||
"Failed to get supervisor logs using advanced_logs API"
|
||||
)
|
||||
assert resp.status == 200
|
||||
assert resp.content_type == "text/plain"
|
||||
|
||||
|
||||
async def test_api_supervisor_fallback_log_capture(
|
||||
api_client: TestClient, journald_logs: MagicMock, docker_logs: MagicMock
|
||||
):
|
||||
"""Check that Sentry log capture is executed only for unexpected errors."""
|
||||
journald_logs.side_effect = HostNotSupportedError(
|
||||
"No systemd-journal-gatewayd Unix socket available!"
|
||||
)
|
||||
|
||||
with patch("supervisor.api.capture_exception") as capture_exception:
|
||||
await api_client.get("/supervisor/logs")
|
||||
capture_exception.assert_not_called()
|
||||
|
||||
journald_logs.reset_mock()
|
||||
|
||||
journald_logs.side_effect = HassioError("Something bad happened!")
|
||||
|
||||
with patch("supervisor.api.capture_exception") as capture_exception:
|
||||
await api_client.get("/supervisor/logs")
|
||||
capture_exception.assert_called_once()
|
||||
|
||||
|
||||
async def test_api_supervisor_reload(api_client: TestClient):
|
||||
"""Test supervisor reload."""
|
||||
|
@@ -1750,3 +1750,40 @@ async def test_reload_error(
|
||||
|
||||
assert "Could not list backups" in caplog.text
|
||||
assert coresys.core.healthy is healthy_expected
|
||||
|
||||
|
||||
async def test_monitoring_after_full_restore(
|
||||
coresys: CoreSys, full_backup_mock, install_addon_ssh, container
|
||||
):
|
||||
"""Test monitoring of addon state still works after full restore."""
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
coresys.homeassistant.core.start = AsyncMock(return_value=None)
|
||||
coresys.homeassistant.core.stop = AsyncMock(return_value=None)
|
||||
coresys.homeassistant.core.update = AsyncMock(return_value=None)
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
|
||||
backup_instance = full_backup_mock.return_value
|
||||
assert await manager.do_restore_full(backup_instance)
|
||||
|
||||
backup_instance.restore_addons.assert_called_once_with([TEST_ADDON_SLUG])
|
||||
assert coresys.core.state == CoreState.RUNNING
|
||||
coresys.docker.unload.assert_not_called()
|
||||
|
||||
|
||||
async def test_monitoring_after_partial_restore(
|
||||
coresys: CoreSys, partial_backup_mock, install_addon_ssh, container
|
||||
):
|
||||
"""Test monitoring of addon state still works after full restore."""
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
|
||||
backup_instance = partial_backup_mock.return_value
|
||||
assert await manager.do_restore_partial(backup_instance, addons=[TEST_ADDON_SLUG])
|
||||
|
||||
backup_instance.restore_addons.assert_called_once_with([TEST_ADDON_SLUG])
|
||||
assert coresys.core.state == CoreState.RUNNING
|
||||
coresys.docker.unload.assert_not_called()
|
||||
|
@@ -229,6 +229,7 @@ async def fixture_udisks2_services(
|
||||
],
|
||||
"udisks2_loop": None,
|
||||
"udisks2_manager": None,
|
||||
"udisks2": None,
|
||||
"udisks2_partition_table": [
|
||||
"/org/freedesktop/UDisks2/block_devices/mmcblk1",
|
||||
"/org/freedesktop/UDisks2/block_devices/sda",
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Test UDisks2 Manager interface."""
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
@@ -7,13 +8,14 @@ from dbus_fast import Variant
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
import pytest
|
||||
|
||||
from supervisor.dbus.udisks2 import UDisks2
|
||||
from supervisor.dbus.udisks2 import UDisks2Manager
|
||||
from supervisor.dbus.udisks2.const import PartitionTableType
|
||||
from supervisor.dbus.udisks2.data import DeviceSpecification
|
||||
from supervisor.exceptions import DBusNotConnectedError, DBusObjectError
|
||||
|
||||
from tests.common import mock_dbus_services
|
||||
from tests.dbus_service_mocks.base import DBusServiceMock
|
||||
from tests.dbus_service_mocks.udisks2 import UDisks2 as UDisks2Service
|
||||
from tests.dbus_service_mocks.udisks2_manager import (
|
||||
UDisks2Manager as UDisks2ManagerService,
|
||||
)
|
||||
@@ -27,12 +29,20 @@ async def fixture_udisks2_manager_service(
|
||||
yield udisks2_services["udisks2_manager"]
|
||||
|
||||
|
||||
@pytest.fixture(name="udisks2_service")
|
||||
async def fixture_udisks2_service(
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> UDisks2Service:
|
||||
"""Mock UDisks2 base service."""
|
||||
yield udisks2_services["udisks2"]
|
||||
|
||||
|
||||
async def test_udisks2_manager_info(
|
||||
udisks2_manager_service: UDisks2ManagerService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test udisks2 manager dbus connection."""
|
||||
udisks2_manager_service.GetBlockDevices.calls.clear()
|
||||
udisks2 = UDisks2()
|
||||
udisks2 = UDisks2Manager()
|
||||
|
||||
assert udisks2.supported_filesystems is None
|
||||
|
||||
@@ -95,6 +105,7 @@ async def test_update_checks_devices_and_drives(dbus_session_bus: MessageBus):
|
||||
"""Test update rechecks block devices and drives correctly."""
|
||||
mocked = await mock_dbus_services(
|
||||
{
|
||||
"udisks2": None,
|
||||
"udisks2_manager": None,
|
||||
"udisks2_block": [
|
||||
"/org/freedesktop/UDisks2/block_devices/sda",
|
||||
@@ -115,7 +126,7 @@ async def test_update_checks_devices_and_drives(dbus_session_bus: MessageBus):
|
||||
"/org/freedesktop/UDisks2/block_devices/sdb",
|
||||
]
|
||||
|
||||
udisks2 = UDisks2()
|
||||
udisks2 = UDisks2Manager()
|
||||
await udisks2.connect(dbus_session_bus)
|
||||
|
||||
assert len(udisks2.block_devices) == 3
|
||||
@@ -214,7 +225,7 @@ async def test_get_block_device(
|
||||
udisks2_manager_service: UDisks2ManagerService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test get block device by object path."""
|
||||
udisks2 = UDisks2()
|
||||
udisks2 = UDisks2Manager()
|
||||
|
||||
with pytest.raises(DBusNotConnectedError):
|
||||
udisks2.get_block_device("/org/freedesktop/UDisks2/block_devices/sda1")
|
||||
@@ -234,7 +245,7 @@ async def test_get_drive(
|
||||
udisks2_manager_service: UDisks2ManagerService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test get drive by object path."""
|
||||
udisks2 = UDisks2()
|
||||
udisks2 = UDisks2Manager()
|
||||
|
||||
with pytest.raises(DBusNotConnectedError):
|
||||
udisks2.get_drive("/org/freedesktop/UDisks2/drives/BJTD4R_0x97cde291")
|
||||
@@ -253,7 +264,7 @@ async def test_resolve_device(
|
||||
):
|
||||
"""Test resolve device."""
|
||||
udisks2_manager_service.ResolveDevice.calls.clear()
|
||||
udisks2 = UDisks2()
|
||||
udisks2 = UDisks2Manager()
|
||||
|
||||
with pytest.raises(DBusNotConnectedError):
|
||||
await udisks2.resolve_device(DeviceSpecification(path=Path("/dev/sda1")))
|
||||
@@ -269,3 +280,52 @@ async def test_resolve_device(
|
||||
{"auth.no_user_interaction": Variant("b", True)},
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def test_block_devices_add_remove_signals(
|
||||
udisks2_service: UDisks2Service, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test signals processed for added and removed block devices."""
|
||||
udisks2 = UDisks2Manager()
|
||||
await udisks2.connect(dbus_session_bus)
|
||||
|
||||
assert any(
|
||||
device
|
||||
for device in udisks2.block_devices
|
||||
if device.object_path == "/org/freedesktop/UDisks2/block_devices/zram1"
|
||||
)
|
||||
udisks2_service.InterfacesRemoved(
|
||||
"/org/freedesktop/UDisks2/block_devices/zram1",
|
||||
["org.freedesktop.UDisks2.Block"],
|
||||
)
|
||||
await udisks2_service.ping()
|
||||
|
||||
assert not any(
|
||||
device
|
||||
for device in udisks2.block_devices
|
||||
if device.object_path == "/org/freedesktop/UDisks2/block_devices/zram1"
|
||||
)
|
||||
|
||||
udisks2_service.InterfacesAdded(
|
||||
"/org/freedesktop/UDisks2/block_devices/zram1",
|
||||
{
|
||||
"org.freedesktop.UDisks2.Block": {
|
||||
"Device": Variant("ay", b"/dev/zram1"),
|
||||
"PreferredDevice": Variant("ay", b"/dev/zram1"),
|
||||
"DeviceNumber": Variant("t", 64769),
|
||||
"Id": Variant("s", ""),
|
||||
"IdUsage": Variant("s", ""),
|
||||
"IdType": Variant("s", ""),
|
||||
"IdVersion": Variant("s", ""),
|
||||
"IdLabel": Variant("s", ""),
|
||||
"IdUUID": Variant("s", ""),
|
||||
}
|
||||
},
|
||||
)
|
||||
await udisks2_service.ping()
|
||||
await asyncio.sleep(0.1)
|
||||
assert any(
|
||||
device
|
||||
for device in udisks2.block_devices
|
||||
if device.object_path == "/org/freedesktop/UDisks2/block_devices/zram1"
|
||||
)
|
||||
|
41
tests/dbus_service_mocks/udisks2.py
Normal file
41
tests/dbus_service_mocks/udisks2.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Mock of base UDisks2 service."""
|
||||
|
||||
from dbus_fast import Variant
|
||||
from dbus_fast.service import signal
|
||||
|
||||
from .base import DBusServiceMock, dbus_method
|
||||
|
||||
BUS_NAME = "org.freedesktop.UDisks2"
|
||||
|
||||
|
||||
def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
"""Create dbus mock object."""
|
||||
return UDisks2()
|
||||
|
||||
|
||||
class UDisks2(DBusServiceMock):
|
||||
"""UDisks2 base object mock.
|
||||
|
||||
gdbus introspect --system --dest org.freedesktop.UDisks2 --object-path /org/freedesktop/UDisks2
|
||||
"""
|
||||
|
||||
interface = "org.freedesktop.DBus.ObjectManager"
|
||||
object_path = "/org/freedesktop/UDisks2"
|
||||
response_get_managed_objects: dict[str, dict[str, dict[str, Variant]]] = {}
|
||||
|
||||
@dbus_method()
|
||||
def GetManagedObjects(self) -> "a{oa{sa{sv}}}":
|
||||
"""Do GetManagedObjects method."""
|
||||
return self.response_get_managed_objects
|
||||
|
||||
@signal()
|
||||
def InterfacesAdded(
|
||||
self, object_path: str, interfaces_and_properties: dict[str, dict[str, Variant]]
|
||||
) -> "oa{sa{sv}}":
|
||||
"""Signal interfaces added."""
|
||||
return [object_path, interfaces_and_properties]
|
||||
|
||||
@signal()
|
||||
def InterfacesRemoved(self, object_path: str, interfaces: list[str]) -> "oas":
|
||||
"""Signal interfaces removed."""
|
||||
return [object_path, interfaces]
|
@@ -1,4 +1,6 @@
|
||||
"""Test OS API."""
|
||||
|
||||
import asyncio
|
||||
from dataclasses import replace
|
||||
from pathlib import PosixPath
|
||||
from unittest.mock import patch
|
||||
@@ -6,16 +8,20 @@ from unittest.mock import patch
|
||||
from dbus_fast import DBusError, ErrorType, Variant
|
||||
import pytest
|
||||
|
||||
from supervisor.const import CoreState
|
||||
from supervisor.core import Core
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.exceptions import HassOSDataDiskError, HassOSError
|
||||
from supervisor.os.data_disk import Disk
|
||||
from supervisor.resolution.const import ContextType, IssueType
|
||||
from supervisor.resolution.data import Issue
|
||||
|
||||
from tests.common import mock_dbus_services
|
||||
from tests.dbus_service_mocks.agent_datadisk import DataDisk as DataDiskService
|
||||
from tests.dbus_service_mocks.agent_system import System as SystemService
|
||||
from tests.dbus_service_mocks.base import DBusServiceMock
|
||||
from tests.dbus_service_mocks.logind import Logind as LogindService
|
||||
from tests.dbus_service_mocks.udisks2 import UDisks2 as UDisks2Service
|
||||
from tests.dbus_service_mocks.udisks2_block import Block as BlockService
|
||||
from tests.dbus_service_mocks.udisks2_filesystem import Filesystem as FilesystemService
|
||||
from tests.dbus_service_mocks.udisks2_partition import Partition as PartitionService
|
||||
@@ -313,3 +319,107 @@ async def test_datadisk_wipe_errors(
|
||||
|
||||
assert system_service.ScheduleWipeDevice.calls == [()]
|
||||
assert logind_service.Reboot.calls == [(False,)]
|
||||
|
||||
|
||||
async def test_multiple_datadisk_add_remove_signals(
|
||||
coresys: CoreSys,
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
os_available,
|
||||
):
|
||||
"""Test multiple data disk issue created/removed on signal."""
|
||||
udisks2_service: UDisks2Service = udisks2_services["udisks2"]
|
||||
sdb1_block: BlockService = udisks2_services["udisks2_block"][
|
||||
"/org/freedesktop/UDisks2/block_devices/sdb1"
|
||||
]
|
||||
|
||||
await coresys.os.datadisk.load()
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
|
||||
assert coresys.resolution.issues == []
|
||||
assert coresys.resolution.suggestions == []
|
||||
|
||||
sdb1_block.fixture = replace(sdb1_block.fixture, IdLabel="hassos-data")
|
||||
udisks2_service.InterfacesAdded(
|
||||
"/org/freedesktop/UDisks2/block_devices/sdb1",
|
||||
{
|
||||
"org.freedesktop.UDisks2.Block": {
|
||||
"Device": Variant("ay", b"/dev/sdb1"),
|
||||
"PreferredDevice": Variant("ay", b"/dev/sdb1"),
|
||||
"DeviceNumber": Variant("t", 2065),
|
||||
"Id": Variant("s", ""),
|
||||
"IdUsage": Variant("s", ""),
|
||||
"IdType": Variant("s", ""),
|
||||
"IdVersion": Variant("s", ""),
|
||||
"IdLabel": Variant("s", "hassos-data"),
|
||||
"IdUUID": Variant("s", ""),
|
||||
}
|
||||
},
|
||||
)
|
||||
await udisks2_service.ping()
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
assert (
|
||||
Issue(IssueType.MULTIPLE_DATA_DISKS, ContextType.SYSTEM, reference="/dev/sdb1")
|
||||
in coresys.resolution.issues
|
||||
)
|
||||
|
||||
udisks2_service.InterfacesRemoved(
|
||||
"/org/freedesktop/UDisks2/block_devices/sdb1",
|
||||
["org.freedesktop.UDisks2.Block", "org.freedesktop.UDisks2.Filesystem"],
|
||||
)
|
||||
await udisks2_service.ping()
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
assert coresys.resolution.issues == []
|
||||
|
||||
|
||||
async def test_disabled_datadisk_add_remove_signals(
|
||||
coresys: CoreSys,
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
os_available,
|
||||
):
|
||||
"""Test disabled data disk issue created/removed on signal."""
|
||||
udisks2_service: UDisks2Service = udisks2_services["udisks2"]
|
||||
sdb1_block: BlockService = udisks2_services["udisks2_block"][
|
||||
"/org/freedesktop/UDisks2/block_devices/sdb1"
|
||||
]
|
||||
|
||||
await coresys.os.datadisk.load()
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
|
||||
assert coresys.resolution.issues == []
|
||||
assert coresys.resolution.suggestions == []
|
||||
|
||||
sdb1_block.fixture = replace(sdb1_block.fixture, IdLabel="hassos-data-dis")
|
||||
udisks2_service.InterfacesAdded(
|
||||
"/org/freedesktop/UDisks2/block_devices/sdb1",
|
||||
{
|
||||
"org.freedesktop.UDisks2.Block": {
|
||||
"Device": Variant("ay", b"/dev/sdb1"),
|
||||
"PreferredDevice": Variant("ay", b"/dev/sdb1"),
|
||||
"DeviceNumber": Variant("t", 2065),
|
||||
"Id": Variant("s", ""),
|
||||
"IdUsage": Variant("s", ""),
|
||||
"IdType": Variant("s", ""),
|
||||
"IdVersion": Variant("s", ""),
|
||||
"IdLabel": Variant("s", "hassos-data-dis"),
|
||||
"IdUUID": Variant("s", ""),
|
||||
}
|
||||
},
|
||||
)
|
||||
await udisks2_service.ping()
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
assert (
|
||||
Issue(IssueType.DISABLED_DATA_DISK, ContextType.SYSTEM, reference="/dev/sdb1")
|
||||
in coresys.resolution.issues
|
||||
)
|
||||
|
||||
udisks2_service.InterfacesRemoved(
|
||||
"/org/freedesktop/UDisks2/block_devices/sdb1",
|
||||
["org.freedesktop.UDisks2.Block", "org.freedesktop.UDisks2.Filesystem"],
|
||||
)
|
||||
await udisks2_service.ping()
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
assert coresys.resolution.issues == []
|
||||
|
86
tests/resolution/check/test_check_detached_addon_missing.py
Normal file
86
tests/resolution/check/test_check_detached_addon_missing.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Test check for detached addons due to repo missing."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.const import CoreState
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.resolution.checks.detached_addon_missing import (
|
||||
CheckDetachedAddonMissing,
|
||||
)
|
||||
from supervisor.resolution.const import ContextType, IssueType
|
||||
|
||||
|
||||
async def test_base(coresys: CoreSys):
|
||||
"""Test check basics."""
|
||||
detached_addon_missing = CheckDetachedAddonMissing(coresys)
|
||||
assert detached_addon_missing.slug == "detached_addon_missing"
|
||||
assert detached_addon_missing.enabled
|
||||
|
||||
|
||||
async def test_check(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test check for detached addons."""
|
||||
detached_addon_missing = CheckDetachedAddonMissing(coresys)
|
||||
coresys.core.state = CoreState.SETUP
|
||||
|
||||
await detached_addon_missing()
|
||||
assert len(coresys.resolution.issues) == 0
|
||||
|
||||
# Mock test addon was been installed from a now non-existent store
|
||||
install_addon_ssh.slug = "abc123_ssh"
|
||||
coresys.addons.data.system["abc123_ssh"] = coresys.addons.data.system["local_ssh"]
|
||||
coresys.addons.local["abc123_ssh"] = coresys.addons.local["local_ssh"]
|
||||
install_addon_ssh.data["repository"] = "abc123"
|
||||
|
||||
await detached_addon_missing()
|
||||
|
||||
assert len(coresys.resolution.issues) == 1
|
||||
assert coresys.resolution.issues[0].type is IssueType.DETACHED_ADDON_MISSING
|
||||
assert coresys.resolution.issues[0].context is ContextType.ADDON
|
||||
assert coresys.resolution.issues[0].reference == install_addon_ssh.slug
|
||||
assert len(coresys.resolution.suggestions) == 0
|
||||
|
||||
|
||||
async def test_approve(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test approve existing detached addon issues."""
|
||||
detached_addon_missing = CheckDetachedAddonMissing(coresys)
|
||||
coresys.core.state = CoreState.SETUP
|
||||
|
||||
assert (
|
||||
await detached_addon_missing.approve_check(reference=install_addon_ssh.slug)
|
||||
is False
|
||||
)
|
||||
|
||||
# Mock test addon was been installed from a now non-existent store
|
||||
install_addon_ssh.slug = "abc123_ssh"
|
||||
coresys.addons.data.system["abc123_ssh"] = coresys.addons.data.system["local_ssh"]
|
||||
coresys.addons.local["abc123_ssh"] = coresys.addons.local["local_ssh"]
|
||||
install_addon_ssh.data["repository"] = "abc123"
|
||||
|
||||
assert (
|
||||
await detached_addon_missing.approve_check(reference=install_addon_ssh.slug)
|
||||
is True
|
||||
)
|
||||
|
||||
|
||||
async def test_did_run(coresys: CoreSys):
|
||||
"""Test that the check ran as expected."""
|
||||
detached_addon_missing = CheckDetachedAddonMissing(coresys)
|
||||
should_run = detached_addon_missing.states
|
||||
should_not_run = [state for state in CoreState if state not in should_run]
|
||||
assert should_run == [CoreState.SETUP]
|
||||
assert len(should_not_run) != 0
|
||||
|
||||
with patch.object(
|
||||
CheckDetachedAddonMissing, "run_check", return_value=None
|
||||
) as check:
|
||||
for state in should_run:
|
||||
coresys.core.state = state
|
||||
await detached_addon_missing()
|
||||
check.assert_called_once()
|
||||
check.reset_mock()
|
||||
|
||||
for state in should_not_run:
|
||||
coresys.core.state = state
|
||||
await detached_addon_missing()
|
||||
check.assert_not_called()
|
||||
check.reset_mock()
|
97
tests/resolution/check/test_check_detached_addon_removed.py
Normal file
97
tests/resolution/check/test_check_detached_addon_removed.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""Test check for detached addons due to removal from repo."""
|
||||
from pathlib import Path
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.config import CoreConfig
|
||||
from supervisor.const import CoreState
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.resolution.checks.detached_addon_removed import (
|
||||
CheckDetachedAddonRemoved,
|
||||
)
|
||||
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||
|
||||
|
||||
async def test_base(coresys: CoreSys):
|
||||
"""Test check basics."""
|
||||
detached_addon_removed = CheckDetachedAddonRemoved(coresys)
|
||||
assert detached_addon_removed.slug == "detached_addon_removed"
|
||||
assert detached_addon_removed.enabled
|
||||
|
||||
|
||||
async def test_check(
|
||||
coresys: CoreSys, install_addon_ssh: Addon, tmp_supervisor_data: Path
|
||||
):
|
||||
"""Test check for detached addons."""
|
||||
detached_addon_removed = CheckDetachedAddonRemoved(coresys)
|
||||
coresys.core.state = CoreState.SETUP
|
||||
|
||||
await detached_addon_removed()
|
||||
assert len(coresys.resolution.issues) == 0
|
||||
assert len(coresys.resolution.suggestions) == 0
|
||||
|
||||
(addons_dir := tmp_supervisor_data / "addons" / "local").mkdir()
|
||||
with patch.object(
|
||||
CoreConfig, "path_addons_local", new=PropertyMock(return_value=addons_dir)
|
||||
):
|
||||
await coresys.store.load()
|
||||
|
||||
await detached_addon_removed()
|
||||
|
||||
assert len(coresys.resolution.issues) == 1
|
||||
assert coresys.resolution.issues[0].type is IssueType.DETACHED_ADDON_REMOVED
|
||||
assert coresys.resolution.issues[0].context is ContextType.ADDON
|
||||
assert coresys.resolution.issues[0].reference == install_addon_ssh.slug
|
||||
|
||||
assert len(coresys.resolution.suggestions) == 1
|
||||
assert coresys.resolution.suggestions[0].type is SuggestionType.EXECUTE_REMOVE
|
||||
assert coresys.resolution.suggestions[0].context is ContextType.ADDON
|
||||
assert coresys.resolution.suggestions[0].reference == install_addon_ssh.slug
|
||||
|
||||
|
||||
async def test_approve(
|
||||
coresys: CoreSys, install_addon_ssh: Addon, tmp_supervisor_data: Path
|
||||
):
|
||||
"""Test approve existing detached addon issues."""
|
||||
detached_addon_removed = CheckDetachedAddonRemoved(coresys)
|
||||
coresys.core.state = CoreState.SETUP
|
||||
|
||||
assert (
|
||||
await detached_addon_removed.approve_check(reference=install_addon_ssh.slug)
|
||||
is False
|
||||
)
|
||||
|
||||
(addons_dir := tmp_supervisor_data / "addons" / "local").mkdir()
|
||||
with patch.object(
|
||||
CoreConfig, "path_addons_local", new=PropertyMock(return_value=addons_dir)
|
||||
):
|
||||
await coresys.store.load()
|
||||
|
||||
assert (
|
||||
await detached_addon_removed.approve_check(reference=install_addon_ssh.slug)
|
||||
is True
|
||||
)
|
||||
|
||||
|
||||
async def test_did_run(coresys: CoreSys):
|
||||
"""Test that the check ran as expected."""
|
||||
detached_addon_removed = CheckDetachedAddonRemoved(coresys)
|
||||
should_run = detached_addon_removed.states
|
||||
should_not_run = [state for state in CoreState if state not in should_run]
|
||||
assert should_run == [CoreState.SETUP]
|
||||
assert len(should_not_run) != 0
|
||||
|
||||
with patch.object(
|
||||
CheckDetachedAddonRemoved, "run_check", return_value=None
|
||||
) as check:
|
||||
for state in should_run:
|
||||
coresys.core.state = state
|
||||
await detached_addon_removed()
|
||||
check.assert_called_once()
|
||||
check.reset_mock()
|
||||
|
||||
for state in should_not_run:
|
||||
coresys.core.state = state
|
||||
await detached_addon_removed()
|
||||
check.assert_not_called()
|
||||
check.reset_mock()
|
34
tests/resolution/fixup/test_addon_execute_remove.py
Normal file
34
tests/resolution/fixup/test_addon_execute_remove.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Test evaluation base."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||
from supervisor.resolution.data import Issue, Suggestion
|
||||
from supervisor.resolution.fixups.addon_execute_remove import FixupAddonExecuteRemove
|
||||
|
||||
|
||||
async def test_fixup(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test fixup."""
|
||||
addon_execute_remove = FixupAddonExecuteRemove(coresys)
|
||||
|
||||
assert addon_execute_remove.auto is False
|
||||
|
||||
coresys.resolution.suggestions = Suggestion(
|
||||
SuggestionType.EXECUTE_REMOVE,
|
||||
ContextType.ADDON,
|
||||
reference=install_addon_ssh.slug,
|
||||
)
|
||||
coresys.resolution.issues = Issue(
|
||||
IssueType.DETACHED_ADDON_REMOVED,
|
||||
ContextType.ADDON,
|
||||
reference=install_addon_ssh.slug,
|
||||
)
|
||||
|
||||
with patch.object(Addon, "uninstall") as uninstall:
|
||||
await addon_execute_remove()
|
||||
|
||||
assert uninstall.called
|
||||
|
||||
assert len(coresys.resolution.suggestions) == 0
|
||||
assert len(coresys.resolution.issues) == 0
|
@@ -3,12 +3,14 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from docker.errors import NotFound
|
||||
import pytest
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.docker.addon import DockerAddon
|
||||
from supervisor.docker.interface import DockerInterface
|
||||
from supervisor.docker.manager import DockerAPI
|
||||
from supervisor.exceptions import DockerError
|
||||
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||
from supervisor.resolution.fixups.addon_execute_repair import FixupAddonExecuteRepair
|
||||
|
||||
@@ -35,6 +37,30 @@ async def test_fixup(docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Add
|
||||
assert not coresys.resolution.suggestions
|
||||
|
||||
|
||||
async def test_fixup_max_auto_attempts(
|
||||
docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Addon
|
||||
):
|
||||
"""Test fixup stops being auto-applied after 5 failures."""
|
||||
docker.images.get.side_effect = NotFound("missing")
|
||||
install_addon_ssh.data["image"] = "test_image"
|
||||
|
||||
addon_execute_repair = FixupAddonExecuteRepair(coresys)
|
||||
|
||||
coresys.resolution.create_issue(
|
||||
IssueType.MISSING_IMAGE,
|
||||
ContextType.ADDON,
|
||||
reference="local_ssh",
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
with patch.object(DockerInterface, "install", side_effect=DockerError):
|
||||
for _ in range(5):
|
||||
assert addon_execute_repair.auto is True
|
||||
with pytest.raises(DockerError):
|
||||
await addon_execute_repair()
|
||||
|
||||
assert addon_execute_repair.auto is False
|
||||
|
||||
|
||||
async def test_fixup_no_addon(coresys: CoreSys):
|
||||
"""Test fixup dismisses if addon is missing."""
|
||||
addon_execute_repair = FixupAddonExecuteRepair(coresys)
|
||||
|
Reference in New Issue
Block a user