diff --git a/supervisor/api/backups.py b/supervisor/api/backups.py index 7f0b9075c..0b7e04476 100644 --- a/supervisor/api/backups.py +++ b/supervisor/api/backups.py @@ -36,7 +36,6 @@ from ..const import ( ATTR_LOCATION, ATTR_NAME, ATTR_PASSWORD, - ATTR_PATH, ATTR_PROTECTED, ATTR_REPOSITORIES, ATTR_SIZE, @@ -156,8 +155,8 @@ class APIBackups(CoreSysAttributes): """Make location attributes dictionary.""" return { loc if loc else LOCATION_LOCAL: { - ATTR_PROTECTED: backup.all_locations[loc][ATTR_PROTECTED], - ATTR_SIZE_BYTES: backup.all_locations[loc][ATTR_SIZE_BYTES], + ATTR_PROTECTED: backup.all_locations[loc].protected, + ATTR_SIZE_BYTES: backup.all_locations[loc].size_bytes, } for loc in backup.locations } @@ -262,7 +261,7 @@ class APIBackups(CoreSysAttributes): def _location_to_mount(self, location: str | None) -> LOCATION_TYPE: """Convert a single location to a mount if possible.""" if not location or location == LOCATION_CLOUD_BACKUP: - return location + return cast(LOCATION_TYPE, location) mount = self.sys_mounts.get(location) if mount.usage != MountUsage.BACKUP: @@ -474,7 +473,7 @@ class APIBackups(CoreSysAttributes): raise APIError(f"Backup {backup.slug} is not in location {location}") _LOGGER.info("Downloading backup %s", backup.slug) - filename = backup.all_locations[location][ATTR_PATH] + filename = backup.all_locations[location].path # If the file is missing, return 404 and trigger reload of location if not await self.sys_run_in_executor(filename.is_file): self.sys_create_task(self.sys_backups.reload(location)) @@ -512,7 +511,7 @@ class APIBackups(CoreSysAttributes): location = locations.pop(0) if location and location != LOCATION_CLOUD_BACKUP: - tmp_path = cast(Mount, location).local_where or tmp_path + tmp_path = cast(Mount, location).local_where filename: str | None = None if ATTR_FILENAME in request.query: diff --git a/supervisor/api/os.py b/supervisor/api/os.py index b565d3c31..c7592c7dc 100644 --- a/supervisor/api/os.py +++ b/supervisor/api/os.py @@ -228,7 +228,11 @@ class APIOS(CoreSysAttributes): @api_process async def config_swap_info(self, request: web.Request) -> dict[str, Any]: """Get swap settings.""" - if not self.coresys.os.available or self.coresys.os.version < "15.0": + if ( + not self.coresys.os.available + or not self.coresys.os.version + or self.coresys.os.version < "15.0" + ): raise APINotFound( "Home Assistant OS 15.0 or newer required for swap settings" ) @@ -241,7 +245,11 @@ class APIOS(CoreSysAttributes): @api_process async def config_swap_options(self, request: web.Request) -> None: """Update swap settings.""" - if not self.coresys.os.available or self.coresys.os.version < "15.0": + if ( + not self.coresys.os.available + or not self.coresys.os.version + or self.coresys.os.version < "15.0" + ): raise APINotFound( "Home Assistant OS 15.0 or newer required for swap settings" ) diff --git a/supervisor/api/proxy.py b/supervisor/api/proxy.py index 26e85bdef..69e095e9a 100644 --- a/supervisor/api/proxy.py +++ b/supervisor/api/proxy.py @@ -6,7 +6,7 @@ from contextlib import asynccontextmanager import logging import aiohttp -from aiohttp import WSMessageTypeError, web +from aiohttp import WSCloseCode, WSMessageTypeError, web from aiohttp.client_exceptions import ClientConnectorError from aiohttp.client_ws import ClientWebSocketResponse from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE @@ -205,7 +205,9 @@ class APIProxy(CoreSysAttributes): logger.warning( "Error WebSocket message received while proxying: %r", msg.data ) - await target.close(code=source.close_code) + await target.close( + code=source.close_code or WSCloseCode.INTERNAL_ERROR + ) case _: logger.warning( "Cannot proxy WebSocket message of unsupported type: %r", diff --git a/supervisor/backups/backup.py b/supervisor/backups/backup.py index e3a665f8f..43842945a 100644 --- a/supervisor/backups/backup.py +++ b/supervisor/backups/backup.py @@ -5,6 +5,7 @@ from collections import defaultdict from collections.abc import AsyncGenerator, Awaitable from contextlib import asynccontextmanager from copy import deepcopy +from dataclasses import dataclass from datetime import timedelta import io import json @@ -14,7 +15,7 @@ import tarfile from tarfile import TarFile from tempfile import TemporaryDirectory import time -from typing import Any, Self +from typing import Any, Self, cast from awesomeversion import AwesomeVersion, AwesomeVersionCompareException from cryptography.hazmat.backends import default_backend @@ -35,11 +36,9 @@ from ..const import ( ATTR_FOLDERS, ATTR_HOMEASSISTANT, ATTR_NAME, - ATTR_PATH, ATTR_PROTECTED, ATTR_REPOSITORIES, ATTR_SIZE, - ATTR_SIZE_BYTES, ATTR_SLUG, ATTR_SUPERVISOR_VERSION, ATTR_TYPE, @@ -69,6 +68,15 @@ from .validate import SCHEMA_BACKUP _LOGGER: logging.Logger = logging.getLogger(__name__) +@dataclass(slots=True) +class BackupLocation: + """Backup location metadata.""" + + path: Path + protected: bool + size_bytes: int + + def location_sort_key(value: str | None) -> str: """Sort locations, None is always first else alphabetical.""" return value if value else "" @@ -91,16 +99,16 @@ class Backup(JobGroup): coresys, JOB_GROUP_BACKUP.format_map(defaultdict(str, slug=slug)), slug ) self._data: dict[str, Any] = data or {ATTR_SLUG: slug} - self._tmp: TemporaryDirectory = None + self._tmp: TemporaryDirectory | None = None self._outer_secure_tarfile: SecureTarFile | None = None self._key: bytes | None = None self._aes: Cipher | None = None - self._locations: dict[str | None, dict[str, Path | bool]] = { - location: { - ATTR_PATH: tar_file, - ATTR_PROTECTED: data.get(ATTR_PROTECTED, False) if data else False, - ATTR_SIZE_BYTES: size_bytes, - } + self._locations: dict[str | None, BackupLocation] = { + location: BackupLocation( + path=tar_file, + protected=data.get(ATTR_PROTECTED, False) if data else False, + size_bytes=size_bytes, + ) } @property @@ -131,7 +139,7 @@ class Backup(JobGroup): @property def protected(self) -> bool: """Return backup date.""" - return self._locations[self.location][ATTR_PROTECTED] + return self._locations[self.location].protected @property def compressed(self) -> bool: @@ -208,7 +216,7 @@ class Backup(JobGroup): return self.locations[0] @property - def all_locations(self) -> dict[str | None, dict[str, Path | bool]]: + def all_locations(self) -> dict[str | None, BackupLocation]: """Return all locations this backup was found in.""" return self._locations @@ -234,7 +242,7 @@ class Backup(JobGroup): @property def size_bytes(self) -> int: """Return backup size in bytes.""" - return self._locations[self.location][ATTR_SIZE_BYTES] + return self._locations[self.location].size_bytes @property def is_new(self) -> bool: @@ -244,7 +252,7 @@ class Backup(JobGroup): @property def tarfile(self) -> Path: """Return path to backup tarfile.""" - return self._locations[self.location][ATTR_PATH] + return self._locations[self.location].path @property def is_current(self) -> bool: @@ -296,7 +304,7 @@ class Backup(JobGroup): # In case of conflict we always ignore the ones from the first one. But log them to let the user know if conflict := { - loc: val[ATTR_PATH] + loc: val.path for loc, val in self.all_locations.items() if loc in backup.all_locations and backup.all_locations[loc] != val }: @@ -334,7 +342,7 @@ class Backup(JobGroup): self._init_password(password) self._data[ATTR_PROTECTED] = True self._data[ATTR_CRYPTO] = CRYPTO_AES128 - self._locations[self.location][ATTR_PROTECTED] = True + self._locations[self.location].protected = True if not compressed: self._data[ATTR_COMPRESSED] = False @@ -361,7 +369,7 @@ class Backup(JobGroup): Checks if we can access the backup file and decrypt if necessary. """ - backup_file: Path = self.all_locations[location][ATTR_PATH] + backup_file: Path = self.all_locations[location].path def _validate_file() -> None: ending = f".tar{'.gz' if self.compressed else ''}" @@ -416,6 +424,9 @@ class Backup(JobGroup): json_file = backup.extractfile("./snapshot.json") else: json_file = backup.extractfile("./backup.json") + + if not json_file: + raise BackupInvalidError("Metadata file cannot be read") return size_bytes, json_file.read() # read backup.json @@ -424,7 +435,7 @@ class Backup(JobGroup): except FileNotFoundError: _LOGGER.error("No tarfile located at %s", self.tarfile) return False - except (tarfile.TarError, KeyError) as err: + except (BackupInvalidError, tarfile.TarError, KeyError) as err: _LOGGER.error("Can't read backup tarfile %s: %s", self.tarfile, err) return False @@ -447,8 +458,8 @@ class Backup(JobGroup): return False if self._data[ATTR_PROTECTED]: - self._locations[self.location][ATTR_PROTECTED] = True - self._locations[self.location][ATTR_SIZE_BYTES] = size_bytes + self._locations[self.location].protected = True + self._locations[self.location].size_bytes = size_bytes return True @@ -456,7 +467,7 @@ class Backup(JobGroup): async def create(self) -> AsyncGenerator[None]: """Create new backup file.""" - def _open_outer_tarfile(): + def _open_outer_tarfile() -> tuple[SecureTarFile, tarfile.TarFile]: """Create and open outer tarfile.""" if self.tarfile.is_file(): raise BackupFileExistError( @@ -485,20 +496,22 @@ class Backup(JobGroup): return _outer_secure_tarfile, _outer_tarfile - def _close_outer_tarfile() -> int: - """Close outer tarfile.""" - self._outer_secure_tarfile.close() - return self.tarfile.stat().st_size - - self._outer_secure_tarfile, outer_tarfile = await self.sys_run_in_executor( + outer_secure_tarfile, outer_tarfile = await self.sys_run_in_executor( _open_outer_tarfile ) + self._outer_secure_tarfile = outer_secure_tarfile + + def _close_outer_tarfile() -> int: + """Close outer tarfile.""" + outer_secure_tarfile.close() + return self.tarfile.stat().st_size + try: yield finally: await self._create_cleanup(outer_tarfile) size_bytes = await self.sys_run_in_executor(_close_outer_tarfile) - self._locations[self.location][ATTR_SIZE_BYTES] = size_bytes + self._locations[self.location].size_bytes = size_bytes self._outer_secure_tarfile = None @asynccontextmanager @@ -513,7 +526,7 @@ class Backup(JobGroup): backup_tarfile = ( self.tarfile if location == DEFAULT - else self.all_locations[location][ATTR_PATH] + else self.all_locations[cast(str | None, location)].path ) # extract an existing backup @@ -579,6 +592,10 @@ class Backup(JobGroup): async def _addon_save(self, addon: Addon) -> asyncio.Task | None: """Store an add-on into backup.""" self.sys_jobs.current.reference = addon.slug + if not self._outer_secure_tarfile: + raise RuntimeError( + "Cannot backup components without initializing backup tar" + ) tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}" @@ -610,7 +627,7 @@ class Backup(JobGroup): return start_task @Job(name="backup_store_addons", cleanup=False) - async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]: + async def store_addons(self, addon_list: list[Addon]) -> list[asyncio.Task]: """Add a list of add-ons into backup. For each addon that needs to be started after backup, returns a Task which @@ -631,6 +648,8 @@ class Backup(JobGroup): async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None: """Restore an add-on from backup.""" self.sys_jobs.current.reference = addon_slug + if not self._tmp: + raise RuntimeError("Cannot restore components without opening backup tar") tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}" addon_file = SecureTarFile( @@ -696,6 +715,12 @@ class Backup(JobGroup): async def _folder_save(self, name: str): """Take backup of a folder.""" self.sys_jobs.current.reference = name + if not self._outer_secure_tarfile: + raise RuntimeError( + "Cannot backup components without initializing backup tar" + ) + + outer_secure_tarfile = self._outer_secure_tarfile slug_name = name.replace("/", "_") tar_name = f"{slug_name}.tar{'.gz' if self.compressed else ''}" origin_dir = Path(self.sys_config.path_supervisor, name) @@ -725,7 +750,7 @@ class Backup(JobGroup): return False - with self._outer_secure_tarfile.create_inner_tar( + with outer_secure_tarfile.create_inner_tar( f"./{tar_name}", gzip=self.compressed, key=self._key, @@ -759,6 +784,8 @@ class Backup(JobGroup): async def _folder_restore(self, name: str) -> None: """Restore a folder.""" self.sys_jobs.current.reference = name + if not self._tmp: + raise RuntimeError("Cannot restore components without opening backup tar") slug_name = name.replace("/", "_") tar_name = Path( @@ -767,7 +794,7 @@ class Backup(JobGroup): origin_dir = Path(self.sys_config.path_supervisor, name) # Perform a restore - def _restore() -> bool: + def _restore() -> None: # Check if exists inside backup if not tar_name.exists(): raise BackupInvalidError( @@ -795,7 +822,6 @@ class Backup(JobGroup): raise BackupError( f"Can't restore folder {name}: {err}", _LOGGER.warning ) from err - return True # Unmount any mounts within folder bind_mounts = [ @@ -808,7 +834,7 @@ class Backup(JobGroup): await asyncio.gather(*[bind_mount.unmount() for bind_mount in bind_mounts]) try: - return await self.sys_run_in_executor(_restore) + await self.sys_run_in_executor(_restore) finally: if bind_mounts: await asyncio.gather( @@ -832,6 +858,11 @@ class Backup(JobGroup): @Job(name="backup_store_homeassistant", cleanup=False) async def store_homeassistant(self, exclude_database: bool = False): """Backup Home Assistant Core configuration folder.""" + if not self._outer_secure_tarfile: + raise RuntimeError( + "Cannot backup components without initializing backup tar" + ) + self._data[ATTR_HOMEASSISTANT] = { ATTR_VERSION: self.sys_homeassistant.version, ATTR_EXCLUDE_DATABASE: exclude_database, @@ -855,6 +886,9 @@ class Backup(JobGroup): @Job(name="backup_restore_homeassistant", cleanup=False) async def restore_homeassistant(self) -> Awaitable[None]: """Restore Home Assistant Core configuration folder.""" + if not self._tmp: + raise RuntimeError("Cannot restore components without opening backup tar") + await self.sys_homeassistant.core.stop(remove_container=True) # Restore Home Assistant Core config directory diff --git a/supervisor/backups/const.py b/supervisor/backups/const.py index b0cd396a1..d8d69bba5 100644 --- a/supervisor/backups/const.py +++ b/supervisor/backups/const.py @@ -9,7 +9,7 @@ BUF_SIZE = 2**20 * 4 # 4MB DEFAULT_FREEZE_TIMEOUT = 600 LOCATION_CLOUD_BACKUP = ".cloud_backup" -LOCATION_TYPE = Mount | Literal[LOCATION_CLOUD_BACKUP] | None +LOCATION_TYPE = Mount | Literal[".cloud_backup"] | None class BackupType(StrEnum): diff --git a/supervisor/backups/manager.py b/supervisor/backups/manager.py index c0904bfaa..7cbf0558e 100644 --- a/supervisor/backups/manager.py +++ b/supervisor/backups/manager.py @@ -8,17 +8,16 @@ import errno import logging from pathlib import Path from shutil import copy +from typing import cast from ..addons.addon import Addon from ..const import ( ATTR_DAYS_UNTIL_STALE, - ATTR_PATH, - ATTR_PROTECTED, - ATTR_SIZE_BYTES, FILE_HASSIO_BACKUPS, FOLDER_HOMEASSISTANT, CoreState, ) +from ..coresys import CoreSys from ..dbus.const import UnitActiveState from ..exceptions import ( BackupDataDiskBadMessageError, @@ -37,7 +36,7 @@ from ..utils.common import FileConfiguration from ..utils.dt import utcnow from ..utils.sentinel import DEFAULT from ..utils.sentry import async_capture_exception -from .backup import Backup +from .backup import Backup, BackupLocation from .const import ( DEFAULT_FREEZE_TIMEOUT, LOCATION_CLOUD_BACKUP, @@ -58,7 +57,7 @@ JOB_PARTIAL_RESTORE = "backup_manager_partial_restore" class BackupManager(FileConfiguration, JobGroup): """Manage backups.""" - def __init__(self, coresys): + def __init__(self, coresys: CoreSys): """Initialize a backup manager.""" super().__init__(FILE_HASSIO_BACKUPS, SCHEMA_BACKUPS_CONFIG) super(FileConfiguration, self).__init__(coresys, JOB_GROUP_BACKUP_MANAGER) @@ -69,7 +68,7 @@ class BackupManager(FileConfiguration, JobGroup): @property def list_backups(self) -> list[Backup]: """Return a list of all backup objects.""" - return self._backups.values() + return list(self._backups.values()) @property def days_until_stale(self) -> int: @@ -90,7 +89,7 @@ class BackupManager(FileConfiguration, JobGroup): } | { mount.name: mount.local_where for mount in self.sys_mounts.backup_mounts - if mount.state == UnitActiveState.ACTIVE + if mount.state == UnitActiveState.ACTIVE and mount.local_where } @property @@ -103,7 +102,7 @@ class BackupManager(FileConfiguration, JobGroup): return job.uuid return None - def get(self, slug: str) -> Backup: + def get(self, slug: str) -> Backup | None: """Return backup object.""" return self._backups.get(slug) @@ -119,8 +118,7 @@ class BackupManager(FileConfiguration, JobGroup): location = self.sys_mounts.default_backup_mount if location: - location_mount: Mount = location - return location_mount.local_where + return cast(Mount, location).local_where return self.sys_config.path_backup @@ -129,13 +127,14 @@ class BackupManager(FileConfiguration, JobGroup): if location == DEFAULT and self.sys_mounts.default_backup_mount: location = self.sys_mounts.default_backup_mount - if location not in (DEFAULT, LOCATION_CLOUD_BACKUP, None): - location_mount: Mount = location - if not await location_mount.is_mounted(): - raise BackupMountDownError( - f"{location_mount.name} is down, cannot back-up to it", - _LOGGER.error, - ) + if ( + location not in (DEFAULT, LOCATION_CLOUD_BACKUP, None) + and not await (location_mount := cast(Mount, location)).is_mounted() + ): + raise BackupMountDownError( + f"{location_mount.name} is down, cannot back-up to it", + _LOGGER.error, + ) def _get_location_name( self, @@ -143,13 +142,13 @@ class BackupManager(FileConfiguration, JobGroup): ) -> str | None: """Get name of location (or None for local backup folder).""" if location == LOCATION_CLOUD_BACKUP: - return location + return cast(str, location) if location == DEFAULT and self.sys_mounts.default_backup_mount: location = self.sys_mounts.default_backup_mount if location: - return location.name + return cast(Mount, location).name return None def _change_stage( @@ -161,7 +160,7 @@ class BackupManager(FileConfiguration, JobGroup): Must be called from an existing backup/restore job. """ - job_name = self.sys_jobs.current.name + job_name = cast(str, self.sys_jobs.current.name) if "restore" in job_name: action = "Restore" elif "freeze" in job_name: @@ -237,12 +236,9 @@ class BackupManager(FileConfiguration, JobGroup): return backup - def load(self) -> Awaitable[None]: - """Load exists backups data. - - Return a coroutine. - """ - return self.reload() + async def load(self) -> None: + """Load exists backups data.""" + await self.reload() async def reload(self, location: str | None | type[DEFAULT] = DEFAULT) -> bool: """Load exists backups.""" @@ -278,10 +274,12 @@ class BackupManager(FileConfiguration, JobGroup): return False + # This is just so we don't have to cast repeatedly. Variable will only be used when location is not DEFAULT + location_name = "" if location == DEFAULT else cast(str | None, location) locations = ( self.backup_locations if location == DEFAULT - else {location: self.backup_locations[location]} + else {location_name: self.backup_locations[location_name]} ) tasks = [ self.sys_create_task(_load_backup(_location, tar_file)) @@ -311,9 +309,9 @@ class BackupManager(FileConfiguration, JobGroup): err, ) - elif location in backup.all_locations: + elif location_name in backup.all_locations: if len(backup.all_locations) > 1: - del backup.all_locations[location] + del backup.all_locations[location_name] else: del self._backups[backup.slug] @@ -336,7 +334,7 @@ class BackupManager(FileConfiguration, JobGroup): else list(backup.all_locations.keys()) ) for location in targets: - backup_tarfile = backup.all_locations[location][ATTR_PATH] + backup_tarfile = backup.all_locations[location].path try: await self.sys_run_in_executor(backup_tarfile.unlink) del backup.all_locations[location] @@ -370,7 +368,7 @@ class BackupManager(FileConfiguration, JobGroup): all_new_locations: dict[str | None, Path] = {} - def copy_to_additional_locations() -> dict[str | None, Path]: + def copy_to_additional_locations() -> None: """Copy backup file to additional locations.""" nonlocal all_new_locations for location in locations: @@ -380,7 +378,7 @@ class BackupManager(FileConfiguration, JobGroup): copy(backup.tarfile, self.sys_config.path_core_backup) ) elif location: - location_mount: Mount = location + location_mount = cast(Mount, location) if not location_mount.local_where.is_mount(): raise BackupMountDownError( f"{location_mount.name} is down, cannot copy to it", @@ -413,11 +411,11 @@ class BackupManager(FileConfiguration, JobGroup): finally: backup.all_locations.update( { - loc: { - ATTR_PATH: path, - ATTR_PROTECTED: backup.protected, - ATTR_SIZE_BYTES: backup.size_bytes, - } + loc: BackupLocation( + path=path, + protected=backup.protected, + size_bytes=backup.size_bytes, + ) for loc, path in all_new_locations.items() } ) @@ -591,13 +589,13 @@ class BackupManager(FileConfiguration, JobGroup): self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_full" ) - backup = self._create_backup( + new_backup = self._create_backup( name, filename, BackupType.FULL, password, compressed, location, extra ) - _LOGGER.info("Creating new full backup with slug %s", backup.slug) + _LOGGER.info("Creating new full backup with slug %s", new_backup.slug) backup = await self._do_backup( - backup, + new_backup, self.sys_addons.installed, ALL_FOLDERS, True, @@ -652,21 +650,21 @@ class BackupManager(FileConfiguration, JobGroup): if len(addons) == 0 and len(folders) == 0 and not homeassistant: _LOGGER.error("Nothing to create backup for") - backup = self._create_backup( + new_backup = self._create_backup( name, filename, BackupType.PARTIAL, password, compressed, location, extra ) - _LOGGER.info("Creating new partial backup with slug %s", backup.slug) + _LOGGER.info("Creating new partial backup with slug %s", new_backup.slug) addon_list = [] for addon_slug in addons: addon = self.sys_addons.get(addon_slug) if addon and addon.is_installed: - addon_list.append(addon) + addon_list.append(cast(Addon, addon)) continue _LOGGER.warning("Add-on %s not found/installed", addon_slug) backup = await self._do_backup( - backup, + new_backup, addon_list, folders, homeassistant, @@ -772,13 +770,15 @@ class BackupManager(FileConfiguration, JobGroup): f"Backup {backup.slug} does not exist in {location}", _LOGGER.error ) - location = location if location != DEFAULT else backup.location - if backup.all_locations[location][ATTR_PROTECTED]: + location_name = ( + cast(str | None, location) if location != DEFAULT else backup.location + ) + if backup.all_locations[location_name].protected: backup.set_password(password) else: backup.set_password(None) - await backup.validate_backup(location) + await backup.validate_backup(location_name) @Job( name=JOB_FULL_RESTORE, @@ -857,7 +857,7 @@ class BackupManager(FileConfiguration, JobGroup): backup: Backup, homeassistant: bool = False, addons: list[str] | None = None, - folders: list[Path] | None = None, + folders: list[str] | None = None, password: str | None = None, location: str | None | type[DEFAULT] = DEFAULT, ) -> bool: diff --git a/supervisor/dbus/agent/__init__.py b/supervisor/dbus/agent/__init__.py index 54a5f406e..155b393ea 100644 --- a/supervisor/dbus/agent/__init__.py +++ b/supervisor/dbus/agent/__init__.py @@ -89,9 +89,10 @@ class OSAgent(DBusInterfaceProxy): """Return if diagnostics is enabled on OS-Agent.""" return self.properties[DBUS_ATTR_DIAGNOSTICS] + @dbus_connected def set_diagnostics(self, value: bool) -> Awaitable[None]: """Enable or disable OS-Agent diagnostics.""" - return self.dbus.set_diagnostics(value) + return self.connected_dbus.set("diagnostics", value) @property def all(self) -> list[DBusInterface]: diff --git a/supervisor/dbus/agent/apparmor.py b/supervisor/dbus/agent/apparmor.py index ffeafddf9..b74a451e3 100644 --- a/supervisor/dbus/agent/apparmor.py +++ b/supervisor/dbus/agent/apparmor.py @@ -30,11 +30,13 @@ class AppArmor(DBusInterfaceProxy): @dbus_connected async def load_profile(self, profile: Path, cache: Path) -> None: """Load/Update AppArmor profile.""" - await self.dbus.AppArmor.call_load_profile(profile.as_posix(), cache.as_posix()) + await self.connected_dbus.AppArmor.call( + "load_profile", profile.as_posix(), cache.as_posix() + ) @dbus_connected async def unload_profile(self, profile: Path, cache: Path) -> None: """Remove AppArmor profile.""" - await self.dbus.AppArmor.call_unload_profile( - profile.as_posix(), cache.as_posix() + await self.connected_dbus.AppArmor.call( + "unload_profile", profile.as_posix(), cache.as_posix() ) diff --git a/supervisor/dbus/agent/boards/__init__.py b/supervisor/dbus/agent/boards/__init__.py index 35650fec5..83fec88e3 100644 --- a/supervisor/dbus/agent/boards/__init__.py +++ b/supervisor/dbus/agent/boards/__init__.py @@ -1,6 +1,7 @@ """Board management for OS Agent.""" import logging +from typing import cast from dbus_fast.aio.message_bus import MessageBus @@ -47,7 +48,7 @@ class BoardManager(DBusInterfaceProxy): if self.board != BOARD_NAME_GREEN: raise BoardInvalidError("Green board is not in use", _LOGGER.error) - return self._board_proxy + return cast(Green, self._board_proxy) @property def supervised(self) -> Supervised: @@ -55,7 +56,7 @@ class BoardManager(DBusInterfaceProxy): if self.board != BOARD_NAME_SUPERVISED: raise BoardInvalidError("Supervised board is not in use", _LOGGER.error) - return self._board_proxy + return cast(Supervised, self._board_proxy) @property def yellow(self) -> Yellow: @@ -63,7 +64,7 @@ class BoardManager(DBusInterfaceProxy): if self.board != BOARD_NAME_YELLOW: raise BoardInvalidError("Yellow board is not in use", _LOGGER.error) - return self._board_proxy + return cast(Yellow, self._board_proxy) async def connect(self, bus: MessageBus) -> None: """Connect to D-Bus.""" diff --git a/supervisor/dbus/agent/boards/green.py b/supervisor/dbus/agent/boards/green.py index 360782bc6..855c01b39 100644 --- a/supervisor/dbus/agent/boards/green.py +++ b/supervisor/dbus/agent/boards/green.py @@ -8,6 +8,7 @@ from dbus_fast.aio.message_bus import MessageBus from ....const import ATTR_ACTIVITY_LED, ATTR_POWER_LED, ATTR_USER_LED from ...const import DBUS_ATTR_ACTIVITY_LED, DBUS_ATTR_POWER_LED, DBUS_ATTR_USER_LED from ...interface import dbus_property +from ...utils import dbus_connected from .const import BOARD_NAME_GREEN from .interface import BoardProxy from .validate import SCHEMA_GREEN_BOARD @@ -26,10 +27,11 @@ class Green(BoardProxy): """Get activity LED enabled.""" return self.properties[DBUS_ATTR_ACTIVITY_LED] + @dbus_connected def set_activity_led(self, enabled: bool) -> Awaitable[None]: """Enable/disable activity LED.""" self._data[ATTR_ACTIVITY_LED] = enabled - return self.dbus.Boards.Green.set_activity_led(enabled) + return self.connected_dbus.Boards.Green.set("activity_led", enabled) @property @dbus_property @@ -37,10 +39,11 @@ class Green(BoardProxy): """Get power LED enabled.""" return self.properties[DBUS_ATTR_POWER_LED] + @dbus_connected def set_power_led(self, enabled: bool) -> Awaitable[None]: """Enable/disable power LED.""" self._data[ATTR_POWER_LED] = enabled - return self.dbus.Boards.Green.set_power_led(enabled) + return self.connected_dbus.Boards.Green.set("power_led", enabled) @property @dbus_property @@ -48,10 +51,11 @@ class Green(BoardProxy): """Get user LED enabled.""" return self.properties[DBUS_ATTR_USER_LED] + @dbus_connected def set_user_led(self, enabled: bool) -> Awaitable[None]: """Enable/disable disk LED.""" self._data[ATTR_USER_LED] = enabled - return self.dbus.Boards.Green.set_user_led(enabled) + return self.connected_dbus.Boards.Green.set("user_led", enabled) async def connect(self, bus: MessageBus) -> None: """Connect to D-Bus.""" diff --git a/supervisor/dbus/agent/boards/interface.py b/supervisor/dbus/agent/boards/interface.py index ab87d60c6..d44c47fe2 100644 --- a/supervisor/dbus/agent/boards/interface.py +++ b/supervisor/dbus/agent/boards/interface.py @@ -14,16 +14,25 @@ class BoardProxy(FileConfiguration, DBusInterfaceProxy): bus_name: str = DBUS_NAME_HAOS - def __init__(self, name: str, file_schema: Schema | None = None) -> None: + def __init__(self, board_name: str, file_schema: Schema | None = None) -> None: """Initialize properties.""" + self._board_name: str = board_name + self._object_path: str = f"{DBUS_OBJECT_HAOS_BOARDS}/{board_name}" + self._properties_interface: str = f"{DBUS_IFACE_HAOS_BOARDS}.{board_name}" super().__init__(FILE_HASSIO_BOARD, file_schema or SCHEMA_BASE_BOARD) super(FileConfiguration, self).__init__() - self._name: str = name - self.object_path: str = f"{DBUS_OBJECT_HAOS_BOARDS}/{name}" - self.properties_interface: str = f"{DBUS_IFACE_HAOS_BOARDS}.{name}" + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path @property - def name(self) -> str: - """Get name.""" - return self._name + def properties_interface(self) -> str: + """Primary interface of object to get property values from.""" + return self._properties_interface + + @property + def board_name(self) -> str: + """Get board name.""" + return self._board_name diff --git a/supervisor/dbus/agent/boards/yellow.py b/supervisor/dbus/agent/boards/yellow.py index dae9dc641..a492a4187 100644 --- a/supervisor/dbus/agent/boards/yellow.py +++ b/supervisor/dbus/agent/boards/yellow.py @@ -8,6 +8,7 @@ from dbus_fast.aio.message_bus import MessageBus from ....const import ATTR_DISK_LED, ATTR_HEARTBEAT_LED, ATTR_POWER_LED from ...const import DBUS_ATTR_DISK_LED, DBUS_ATTR_HEARTBEAT_LED, DBUS_ATTR_POWER_LED from ...interface import dbus_property +from ...utils import dbus_connected from .const import BOARD_NAME_YELLOW from .interface import BoardProxy from .validate import SCHEMA_YELLOW_BOARD @@ -26,10 +27,11 @@ class Yellow(BoardProxy): """Get heartbeat LED enabled.""" return self.properties[DBUS_ATTR_HEARTBEAT_LED] + @dbus_connected def set_heartbeat_led(self, enabled: bool) -> Awaitable[None]: """Enable/disable heartbeat LED.""" self._data[ATTR_HEARTBEAT_LED] = enabled - return self.dbus.Boards.Yellow.set_heartbeat_led(enabled) + return self.connected_dbus.Boards.Yellow.set("heartbeat_led", enabled) @property @dbus_property @@ -37,10 +39,11 @@ class Yellow(BoardProxy): """Get power LED enabled.""" return self.properties[DBUS_ATTR_POWER_LED] + @dbus_connected def set_power_led(self, enabled: bool) -> Awaitable[None]: """Enable/disable power LED.""" self._data[ATTR_POWER_LED] = enabled - return self.dbus.Boards.Yellow.set_power_led(enabled) + return self.connected_dbus.Boards.Yellow.set("power_led", enabled) @property @dbus_property @@ -48,10 +51,11 @@ class Yellow(BoardProxy): """Get disk LED enabled.""" return self.properties[DBUS_ATTR_DISK_LED] + @dbus_connected def set_disk_led(self, enabled: bool) -> Awaitable[None]: """Enable/disable disk LED.""" self._data[ATTR_DISK_LED] = enabled - return self.dbus.Boards.Yellow.set_disk_led(enabled) + return self.connected_dbus.Boards.Yellow.set("disk_led", enabled) async def connect(self, bus: MessageBus) -> None: """Connect to D-Bus.""" diff --git a/supervisor/dbus/agent/cgroup.py b/supervisor/dbus/agent/cgroup.py index 1114ab8f8..f00cda898 100644 --- a/supervisor/dbus/agent/cgroup.py +++ b/supervisor/dbus/agent/cgroup.py @@ -14,4 +14,6 @@ class CGroup(DBusInterface): @dbus_connected async def add_devices_allowed(self, container_id: str, permission: str) -> None: """Update cgroup devices and add new devices.""" - await self.dbus.CGroup.call_add_devices_allowed(container_id, permission) + await self.connected_dbus.CGroup.call( + "add_devices_allowed", container_id, permission + ) diff --git a/supervisor/dbus/agent/datadisk.py b/supervisor/dbus/agent/datadisk.py index a6d679cc6..45351e0c0 100644 --- a/supervisor/dbus/agent/datadisk.py +++ b/supervisor/dbus/agent/datadisk.py @@ -28,14 +28,14 @@ class DataDisk(DBusInterfaceProxy): @dbus_connected async def change_device(self, device: Path) -> None: """Migrate data disk to a new device.""" - await self.dbus.DataDisk.call_change_device(device.as_posix()) + await self.connected_dbus.DataDisk.call("change_device", device.as_posix()) @dbus_connected async def reload_device(self) -> None: """Reload device data.""" - await self.dbus.DataDisk.call_reload_device() + await self.connected_dbus.DataDisk.call("reload_device") @dbus_connected async def mark_data_move(self) -> None: """Create marker to signal to do data disk migration next reboot.""" - await self.dbus.DataDisk.call_mark_data_move() + await self.connected_dbus.DataDisk.call("mark_data_move") diff --git a/supervisor/dbus/agent/swap.py b/supervisor/dbus/agent/swap.py index faacd95f2..544bbb77a 100644 --- a/supervisor/dbus/agent/swap.py +++ b/supervisor/dbus/agent/swap.py @@ -27,7 +27,7 @@ class Swap(DBusInterfaceProxy): def set_swap_size(self, size: str) -> Awaitable[None]: """Set swap size.""" - return self.dbus.Config.Swap.set_swap_size(size) + return self.connected_dbus.Config.Swap.set("swap_size", size) @property @dbus_property @@ -37,4 +37,4 @@ class Swap(DBusInterfaceProxy): def set_swappiness(self, swappiness: int) -> Awaitable[None]: """Set swappiness.""" - return self.dbus.Config.Swap.set_swappiness(swappiness) + return self.connected_dbus.Config.Swap.set("swappiness", swappiness) diff --git a/supervisor/dbus/agent/system.py b/supervisor/dbus/agent/system.py index ac758b0f2..3dd8b42bc 100644 --- a/supervisor/dbus/agent/system.py +++ b/supervisor/dbus/agent/system.py @@ -14,4 +14,4 @@ class System(DBusInterface): @dbus_connected async def schedule_wipe_device(self) -> bool: """Schedule a factory reset on next system boot.""" - return await self.dbus.System.call_schedule_wipe_device() + return await self.connected_dbus.System.call("schedule_wipe_device") diff --git a/supervisor/dbus/hostname.py b/supervisor/dbus/hostname.py index 2f13fa157..9b864f62a 100644 --- a/supervisor/dbus/hostname.py +++ b/supervisor/dbus/hostname.py @@ -84,4 +84,4 @@ class Hostname(DBusInterfaceProxy): @dbus_connected async def set_static_hostname(self, hostname: str) -> None: """Change local hostname.""" - await self.dbus.call_set_static_hostname(hostname, False) + await self.connected_dbus.call("set_static_hostname", hostname, False) diff --git a/supervisor/dbus/interface.py b/supervisor/dbus/interface.py index 2b46882aa..22b26e383 100644 --- a/supervisor/dbus/interface.py +++ b/supervisor/dbus/interface.py @@ -1,13 +1,13 @@ """Interface class for D-Bus wrappers.""" -from abc import ABC +from abc import ABC, abstractmethod from collections.abc import Callable from functools import wraps from typing import Any from dbus_fast.aio.message_bus import MessageBus -from supervisor.exceptions import DBusInterfaceError +from supervisor.exceptions import DBusInterfaceError, DBusNotConnectedError from ..utils.dbus import DBus from .utils import dbus_connected @@ -31,10 +31,18 @@ class DBusInterface(ABC): dbus: DBus | None = None name: str | None = None - bus_name: str | None = None - object_path: str | None = None _shutdown: bool = False + @property + @abstractmethod + def bus_name(self) -> str: + """Bus name for dbus object.""" + + @property + @abstractmethod + def object_path(self) -> str: + """Object path for dbus object.""" + @property def is_connected(self) -> bool: """Return True, if they is connected to D-Bus.""" @@ -45,6 +53,13 @@ class DBusInterface(ABC): """Return True, if the object has been shutdown.""" return self._shutdown + @property + def connected_dbus(self) -> DBus: + """Return dbus object. Raise if not connected.""" + if not self.dbus: + raise DBusNotConnectedError() + return self.dbus + async def connect(self, bus: MessageBus) -> None: """Connect to D-Bus.""" await self.initialize(await DBus.connect(bus, self.bus_name, self.object_path)) @@ -67,7 +82,7 @@ class DBusInterface(ABC): def disconnect(self) -> None: """Disconnect from D-Bus.""" if self.is_connected: - self.dbus.disconnect() + self.connected_dbus.disconnect() self.dbus = None def shutdown(self) -> None: @@ -79,17 +94,20 @@ class DBusInterface(ABC): self.disconnect() -class DBusInterfaceProxy(DBusInterface): +class DBusInterfaceProxy(DBusInterface, ABC): """Handle D-Bus interface proxy.""" - properties_interface: str | None = None - properties: dict[str, Any] | None = None sync_properties: bool = True _sync_properties_callback: Callable | None = None - def __init__(self): + def __init__(self) -> None: """Initialize properties.""" - self.properties = {} + self.properties: dict[str, Any] = {} + + @property + @abstractmethod + def properties_interface(self) -> str: + """Primary interface of object to get property values from.""" async def connect(self, bus: MessageBus) -> None: """Connect to D-Bus.""" @@ -99,7 +117,7 @@ class DBusInterfaceProxy(DBusInterface): """Initialize object with already connected dbus object.""" await super().initialize(connected_dbus) - if not self.dbus.properties: + if not self.connected_dbus.properties: self.disconnect() raise DBusInterfaceError( f"D-Bus object {self.object_path} is not usable, introspection is missing required properties interface" @@ -107,13 +125,13 @@ class DBusInterfaceProxy(DBusInterface): await self.update() if self.sync_properties and self.is_connected: - self._sync_properties_callback = self.dbus.sync_property_changes( + self._sync_properties_callback = self.connected_dbus.sync_property_changes( self.properties_interface, self.update ) def stop_sync_property_changes(self) -> None: """Stop syncing property changes to object.""" - if not self._sync_properties_callback: + if not self._sync_properties_callback or not self.dbus: return self.dbus.stop_sync_property_changes(self._sync_properties_callback) @@ -125,4 +143,6 @@ class DBusInterfaceProxy(DBusInterface): if changed and self.properties: self.properties.update(changed) else: - self.properties = await self.dbus.get_properties(self.properties_interface) + self.properties = await self.connected_dbus.get_properties( + self.properties_interface + ) diff --git a/supervisor/dbus/logind.py b/supervisor/dbus/logind.py index c50b38f15..c3a1834f4 100644 --- a/supervisor/dbus/logind.py +++ b/supervisor/dbus/logind.py @@ -35,9 +35,9 @@ class Logind(DBusInterface): @dbus_connected async def reboot(self) -> None: """Reboot host computer.""" - await self.dbus.Manager.call_reboot(False) + await self.connected_dbus.Manager.call("reboot", False) @dbus_connected async def power_off(self) -> None: """Power off host computer.""" - await self.dbus.Manager.call_power_off(False) + await self.connected_dbus.Manager.call("power_off", False) diff --git a/supervisor/dbus/manager.py b/supervisor/dbus/manager.py index 11474620a..4ff8f3b6e 100644 --- a/supervisor/dbus/manager.py +++ b/supervisor/dbus/manager.py @@ -115,7 +115,9 @@ class DBusManager(CoreSysAttributes): return try: - self._bus = await MessageBus(bus_type=BusType.SYSTEM).connect() + self._bus = connected_bus = await MessageBus( + bus_type=BusType.SYSTEM + ).connect() except Exception as err: raise DBusFatalError( "Cannot connect to system D-Bus. Disabled any kind of host control!" @@ -124,17 +126,17 @@ class DBusManager(CoreSysAttributes): _LOGGER.info("Connected to system D-Bus.") errors = await asyncio.gather( - *[dbus.connect(self.bus) for dbus in self.all], return_exceptions=True + *[dbus.connect(connected_bus) for dbus in self.all], return_exceptions=True ) - for err in errors: - if err: - dbus = self.all[errors.index(err)] + for error in errors: + if error: + dbus = self.all[errors.index(error)] _LOGGER.warning( "Can't load dbus interface %s %s: %s", dbus.name, dbus.object_path, - err, + error, ) self.sys_host.supported_features.cache_clear() diff --git a/supervisor/dbus/network/__init__.py b/supervisor/dbus/network/__init__.py index 502a759b8..ac2ff0f8c 100644 --- a/supervisor/dbus/network/__init__.py +++ b/supervisor/dbus/network/__init__.py @@ -1,7 +1,7 @@ """Network Manager implementation for DBUS.""" import logging -from typing import Any +from typing import Any, cast from awesomeversion import AwesomeVersion, AwesomeVersionException from dbus_fast.aio.message_bus import MessageBus @@ -106,11 +106,11 @@ class NetworkManager(DBusInterfaceProxy): self, connection_object: str, device_object: str ) -> NetworkConnection: """Activate a connction on a device.""" - obj_active_con = await self.dbus.call_activate_connection( - connection_object, device_object, DBUS_OBJECT_BASE + obj_active_con = await self.connected_dbus.call( + "activate_connection", connection_object, device_object, DBUS_OBJECT_BASE ) active_con = NetworkConnection(obj_active_con) - await active_con.connect(self.dbus.bus) + await active_con.connect(self.connected_dbus.bus) return active_con @dbus_connected @@ -121,21 +121,22 @@ class NetworkManager(DBusInterfaceProxy): ( _, obj_active_con, - ) = await self.dbus.call_add_and_activate_connection( - settings, device_object, DBUS_OBJECT_BASE + ) = await self.connected_dbus.call( + "add_and_activate_connection", settings, device_object, DBUS_OBJECT_BASE ) active_con = NetworkConnection(obj_active_con) - await active_con.connect(self.dbus.bus) - return active_con.settings, active_con + await active_con.connect(self.connected_dbus.bus) + # Settings were provided so settings will not be None here or call would've failed + return cast(NetworkSetting, active_con.settings), active_con @dbus_connected async def check_connectivity(self, *, force: bool = False) -> ConnectivityState: """Check the connectivity of the host.""" if force: - return await self.dbus.call_check_connectivity() + return await self.connected_dbus.call("check_connectivity") else: - return await self.dbus.get_connectivity() + return await self.connected_dbus.get("connectivity") async def connect(self, bus: MessageBus) -> None: """Connect to system's D-Bus.""" @@ -160,9 +161,10 @@ class NetworkManager(DBusInterfaceProxy): self.dns.disconnect() self.settings.disconnect() + @dbus_connected async def _validate_version(self) -> None: """Validate Version of NetworkManager.""" - self.properties = await self.dbus.get_properties(DBUS_IFACE_NM) + self.properties = await self.connected_dbus.get_properties(DBUS_IFACE_NM) try: if self.version >= MINIMAL_VERSION: @@ -206,7 +208,7 @@ class NetworkManager(DBusInterfaceProxy): # Connect to interface try: - await interface.connect(self.dbus.bus) + await interface.connect(self.connected_dbus.bus) except (DBusFatalError, DBusInterfaceError) as err: # Docker creates and deletes interfaces quite often, sometimes # this causes a race condition: A device disappears while we diff --git a/supervisor/dbus/network/accesspoint.py b/supervisor/dbus/network/accesspoint.py index 5ad4c30af..4ec782b59 100644 --- a/supervisor/dbus/network/accesspoint.py +++ b/supervisor/dbus/network/accesspoint.py @@ -25,9 +25,13 @@ class NetworkWirelessAP(DBusInterfaceProxy): def __init__(self, object_path: str) -> None: """Initialize NetworkWireless AP object.""" + self._object_path: str = object_path super().__init__() - self.object_path: str = object_path + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path @property @dbus_property diff --git a/supervisor/dbus/network/connection.py b/supervisor/dbus/network/connection.py index c43aa4001..aca98997f 100644 --- a/supervisor/dbus/network/connection.py +++ b/supervisor/dbus/network/connection.py @@ -35,14 +35,17 @@ class NetworkConnection(DBusInterfaceProxy): def __init__(self, object_path: str) -> None: """Initialize NetworkConnection object.""" - super().__init__() - - self.object_path: str = object_path - + self._object_path: str = object_path self._ipv4: IpConfiguration | None = None self._ipv6: IpConfiguration | None = None self._state_flags: set[ConnectionStateFlags] = {ConnectionStateFlags.NONE} self._settings: NetworkSetting | None = None + super().__init__() + + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path @property @dbus_property @@ -134,7 +137,7 @@ class NetworkConnection(DBusInterfaceProxy): await self.ipv4.update() elif self.properties[DBUS_ATTR_IP4CONFIG] != DBUS_OBJECT_BASE: self.ipv4 = IpConfiguration(self.properties[DBUS_ATTR_IP4CONFIG]) - await self.ipv4.connect(self.dbus.bus) + await self.ipv4.connect(self.connected_dbus.bus) else: self.ipv4 = None @@ -148,7 +151,7 @@ class NetworkConnection(DBusInterfaceProxy): await self.ipv6.update() elif self.properties[DBUS_ATTR_IP6CONFIG] != DBUS_OBJECT_BASE: self.ipv6 = IpConfiguration(self.properties[DBUS_ATTR_IP6CONFIG], False) - await self.ipv6.connect(self.dbus.bus) + await self.ipv6.connect(self.connected_dbus.bus) else: self.ipv6 = None @@ -162,7 +165,7 @@ class NetworkConnection(DBusInterfaceProxy): await self.settings.reload() elif self.properties[DBUS_ATTR_CONNECTION] != DBUS_OBJECT_BASE: self.settings = NetworkSetting(self.properties[DBUS_ATTR_CONNECTION]) - await self.settings.connect(self.dbus.bus) + await self.settings.connect(self.connected_dbus.bus) else: self.settings = None diff --git a/supervisor/dbus/network/interface.py b/supervisor/dbus/network/interface.py index 20fe41547..34ca45f88 100644 --- a/supervisor/dbus/network/interface.py +++ b/supervisor/dbus/network/interface.py @@ -36,14 +36,16 @@ class NetworkInterface(DBusInterfaceProxy): def __init__(self, object_path: str) -> None: """Initialize NetworkConnection object.""" - super().__init__() - - self.object_path: str = object_path - + self._object_path: str = object_path self.primary: bool = False - self._connection: NetworkConnection | None = None self._wireless: NetworkWireless | None = None + super().__init__() + + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path @property @dbus_property @@ -130,7 +132,9 @@ class NetworkInterface(DBusInterfaceProxy): self.sync_properties = self.managed if self.sync_properties and self.is_connected: - self.dbus.sync_property_changes(self.properties_interface, self.update) + self.connected_dbus.sync_property_changes( + self.properties_interface, self.update + ) @dbus_connected async def update(self, changed: dict[str, Any] | None = None) -> None: @@ -157,7 +161,7 @@ class NetworkInterface(DBusInterfaceProxy): self.connection = NetworkConnection( self.properties[DBUS_ATTR_ACTIVE_CONNECTION] ) - await self.connection.connect(self.dbus.bus) + await self.connection.connect(self.connected_dbus.bus) else: self.connection = None @@ -169,7 +173,7 @@ class NetworkInterface(DBusInterfaceProxy): await self.wireless.update() else: self.wireless = NetworkWireless(self.object_path) - await self.wireless.connect(self.dbus.bus) + await self.wireless.connect(self.connected_dbus.bus) def shutdown(self) -> None: """Shutdown the object and disconnect from D-Bus. diff --git a/supervisor/dbus/network/ip_configuration.py b/supervisor/dbus/network/ip_configuration.py index 6a17ab198..6eff81bc2 100644 --- a/supervisor/dbus/network/ip_configuration.py +++ b/supervisor/dbus/network/ip_configuration.py @@ -29,13 +29,22 @@ class IpConfiguration(DBusInterfaceProxy): def __init__(self, object_path: str, ip4: bool = True) -> None: """Initialize properties.""" - super().__init__() - self._ip4: bool = ip4 - self.object_path: str = object_path - self.properties_interface: str = ( + self._object_path: str = object_path + self._properties_interface: str = ( DBUS_IFACE_IP4CONFIG if ip4 else DBUS_IFACE_IP6CONFIG ) + super().__init__() + + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path + + @property + def properties_interface(self) -> str: + """Primary interface of object to get property values from.""" + return self._properties_interface @property @dbus_property diff --git a/supervisor/dbus/network/setting/__init__.py b/supervisor/dbus/network/setting/__init__.py index 5bf1dbad2..707cdf342 100644 --- a/supervisor/dbus/network/setting/__init__.py +++ b/supervisor/dbus/network/setting/__init__.py @@ -79,7 +79,7 @@ def _merge_settings_attribute( new_settings: dict[str, dict[str, Variant]], attribute: str, *, - ignore_current_value: list[str] = None, + ignore_current_value: list[str] | None = None, ) -> None: """Merge settings attribute if present.""" if attribute in new_settings: @@ -103,7 +103,7 @@ class NetworkSetting(DBusInterface): def __init__(self, object_path: str) -> None: """Initialize NetworkConnection object.""" - self.object_path: str = object_path + self._object_path: str = object_path self._connection: ConnectionProperties | None = None self._wireless: WirelessProperties | None = None @@ -113,6 +113,12 @@ class NetworkSetting(DBusInterface): self._ipv4: IpProperties | None = None self._ipv6: IpProperties | None = None self._match: MatchProperties | None = None + super().__init__() + + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path @property def connection(self) -> ConnectionProperties | None: @@ -157,14 +163,16 @@ class NetworkSetting(DBusInterface): @dbus_connected async def get_settings(self) -> dict[str, Any]: """Return connection settings.""" - return await self.dbus.Settings.Connection.call_get_settings() + return await self.connected_dbus.Settings.Connection.call("get_settings") @dbus_connected async def update(self, settings: dict[str, dict[str, Variant]]) -> None: """Update connection settings.""" new_settings: dict[ str, dict[str, Variant] - ] = await self.dbus.Settings.Connection.call_get_settings(unpack_variants=False) + ] = await self.connected_dbus.Settings.Connection.call( + "get_settings", unpack_variants=False + ) _merge_settings_attribute( new_settings, @@ -192,19 +200,19 @@ class NetworkSetting(DBusInterface): ) _merge_settings_attribute(new_settings, settings, CONF_ATTR_MATCH) - await self.dbus.Settings.Connection.call_update(new_settings) + await self.connected_dbus.Settings.Connection.call("update", new_settings) @dbus_connected async def delete(self) -> None: """Delete connection settings.""" - await self.dbus.Settings.Connection.call_delete() + await self.connected_dbus.Settings.Connection.call("delete") async def connect(self, bus: MessageBus) -> None: """Get connection information.""" await super().connect(bus) await self.reload() - self.dbus.Settings.Connection.on_updated(self.reload) + self.connected_dbus.Settings.Connection.on("updated", self.reload) @dbus_connected async def reload(self): diff --git a/supervisor/dbus/network/setting/generate.py b/supervisor/dbus/network/setting/generate.py index 554c5b230..bbd7b50f2 100644 --- a/supervisor/dbus/network/setting/generate.py +++ b/supervisor/dbus/network/setting/generate.py @@ -3,11 +3,12 @@ from __future__ import annotations import socket -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from uuid import uuid4 from dbus_fast import Variant +from ....host.configuration import VlanConfig from ....host.const import InterfaceMethod, InterfaceType from .. import NetworkManager from . import ( @@ -140,12 +141,15 @@ def get_connection_from_interface( uuid: str | None = None, ) -> dict[str, dict[str, Variant]]: """Generate message argument for network interface update.""" + # Simple input check to ensure it is safe to cast this for type checker + if interface.type == InterfaceType.VLAN and not interface.vlan: + raise ValueError("Interface has type vlan but no vlan config!") # Generate/Update ID/name if not name or not name.startswith("Supervisor"): name = f"Supervisor {interface.name}" if interface.type == InterfaceType.VLAN: - name = f"{name}.{interface.vlan.id}" + name = f"{name}.{cast(VlanConfig, interface.vlan).id}" if interface.type == InterfaceType.ETHERNET: iftype = "802-3-ethernet" @@ -186,14 +190,14 @@ def get_connection_from_interface( CONF_ATTR_802_ETHERNET_ASSIGNED_MAC: Variant("s", "preserve") } elif interface.type == "vlan": - parent = interface.vlan.interface + parent = cast(VlanConfig, interface.vlan).interface if parent in network_manager and ( parent_connection := network_manager.get(parent).connection ): parent = parent_connection.uuid conn[CONF_ATTR_VLAN] = { - CONF_ATTR_VLAN_ID: Variant("u", interface.vlan.id), + CONF_ATTR_VLAN_ID: Variant("u", cast(VlanConfig, interface.vlan).id), CONF_ATTR_VLAN_PARENT: Variant("s", parent), } elif interface.type == InterfaceType.WIRELESS: diff --git a/supervisor/dbus/network/settings.py b/supervisor/dbus/network/settings.py index 48b080514..bc57ff7f5 100644 --- a/supervisor/dbus/network/settings.py +++ b/supervisor/dbus/network/settings.py @@ -37,12 +37,14 @@ class NetworkManagerSettings(DBusInterface): @dbus_connected async def add_connection(self, settings: Any) -> NetworkSetting: """Add new connection.""" - obj_con_setting = await self.dbus.Settings.call_add_connection(settings) + obj_con_setting = await self.connected_dbus.Settings.call( + "add_connection", settings + ) con_setting = NetworkSetting(obj_con_setting) - await con_setting.connect(self.dbus.bus) + await con_setting.connect(self.connected_dbus.bus) return con_setting @dbus_connected async def reload_connections(self) -> bool: """Reload all local connection files.""" - return await self.dbus.Settings.call_reload_connections() + return await self.connected_dbus.Settings.call("reload_connections") diff --git a/supervisor/dbus/network/wireless.py b/supervisor/dbus/network/wireless.py index 711aa2688..1f6319fda 100644 --- a/supervisor/dbus/network/wireless.py +++ b/supervisor/dbus/network/wireless.py @@ -29,11 +29,14 @@ class NetworkWireless(DBusInterfaceProxy): def __init__(self, object_path: str) -> None: """Initialize NetworkConnection object.""" + self._object_path: str = object_path + self._active: NetworkWirelessAP | None = None super().__init__() - self.object_path: str = object_path - - self._active: NetworkWirelessAP | None = None + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path @property @dbus_property @@ -57,22 +60,26 @@ class NetworkWireless(DBusInterfaceProxy): @dbus_connected async def request_scan(self) -> None: """Request a new AP scan.""" - await self.dbus.Device.Wireless.call_request_scan({}) + await self.connected_dbus.Device.Wireless.call("request_scan", {}) @dbus_connected async def get_all_accesspoints(self) -> list[NetworkWirelessAP]: """Return a list of all access points path.""" - accesspoints_data = await self.dbus.Device.Wireless.call_get_all_access_points() + accesspoints_data = await self.connected_dbus.Device.Wireless.call( + "get_all_access_points" + ) accesspoints = [NetworkWirelessAP(ap_obj) for ap_obj in accesspoints_data] for err in await asyncio.gather( - *[ap.connect(self.dbus.bus) for ap in accesspoints], return_exceptions=True + *[ap.connect(self.connected_dbus.bus) for ap in accesspoints], + return_exceptions=True, ): if err: _LOGGER.warning("Can't process an AP: %s", err) return accesspoints + @dbus_connected async def update(self, changed: dict[str, Any] | None = None) -> None: """Update properties via D-Bus.""" await super().update(changed) @@ -90,6 +97,6 @@ class NetworkWireless(DBusInterfaceProxy): self.active = NetworkWirelessAP( self.properties[DBUS_ATTR_ACTIVE_ACCESSPOINT] ) - await self.active.connect(self.dbus.bus) + await self.active.connect(self.connected_dbus.bus) else: self.active = None diff --git a/supervisor/dbus/rauc.py b/supervisor/dbus/rauc.py index ab90ef0b7..643987aac 100644 --- a/supervisor/dbus/rauc.py +++ b/supervisor/dbus/rauc.py @@ -56,7 +56,7 @@ class Rauc(DBusInterfaceProxy): object_path: str = DBUS_OBJECT_BASE properties_interface: str = DBUS_IFACE_RAUC_INSTALLER - def __init__(self): + def __init__(self) -> None: """Initialize Properties.""" super().__init__() @@ -104,22 +104,22 @@ class Rauc(DBusInterfaceProxy): @dbus_connected async def install(self, raucb_file) -> None: """Install rauc bundle file.""" - await self.dbus.Installer.call_install(str(raucb_file)) + await self.connected_dbus.Installer.call("install", str(raucb_file)) @dbus_connected async def get_slot_status(self) -> list[tuple[str, SlotStatusDataType]]: """Get slot status.""" - return await self.dbus.Installer.call_get_slot_status() + return await self.connected_dbus.Installer.call("get_slot_status") @dbus_connected def signal_completed(self) -> DBusSignalWrapper: """Return a signal wrapper for completed signal.""" - return self.dbus.signal(DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED) + return self.connected_dbus.signal(DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED) @dbus_connected async def mark(self, state: RaucState, slot_identifier: str) -> tuple[str, str]: """Get slot status.""" - return await self.dbus.Installer.call_mark(state, slot_identifier) + return await self.connected_dbus.Installer.call("mark", state, slot_identifier) @dbus_connected async def update(self, changed: dict[str, Any] | None = None) -> None: diff --git a/supervisor/dbus/systemd.py b/supervisor/dbus/systemd.py index 1ff2ed1bc..284458f27 100644 --- a/supervisor/dbus/systemd.py +++ b/supervisor/dbus/systemd.py @@ -60,17 +60,22 @@ class SystemdUnit(DBusInterface): def __init__(self, object_path: str) -> None: """Initialize object.""" super().__init__() - self.object_path = object_path + self._object_path = object_path + + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path @dbus_connected async def get_active_state(self) -> UnitActiveState: """Get active state of the unit.""" - return await self.dbus.Unit.get_active_state() + return await self.connected_dbus.Unit.get("active_state") @dbus_connected def properties_changed(self) -> DBusSignalWrapper: """Return signal wrapper for properties changed.""" - return self.dbus.signal(DBUS_SIGNAL_PROPERTIES_CHANGED) + return self.connected_dbus.signal(DBUS_SIGNAL_PROPERTIES_CHANGED) class Systemd(DBusInterfaceProxy): @@ -124,64 +129,66 @@ class Systemd(DBusInterfaceProxy): @dbus_connected async def reboot(self) -> None: """Reboot host computer.""" - await self.dbus.Manager.call_reboot() + await self.connected_dbus.Manager.call("reboot") @dbus_connected async def power_off(self) -> None: """Power off host computer.""" - await self.dbus.Manager.call_power_off() + await self.connected_dbus.Manager.call("power_off") @dbus_connected @systemd_errors async def start_unit(self, unit: str, mode: StartUnitMode) -> str: """Start a systemd service unit. Returns object path of job.""" - return await self.dbus.Manager.call_start_unit(unit, mode) + return await self.connected_dbus.Manager.call("start_unit", unit, mode) @dbus_connected @systemd_errors async def stop_unit(self, unit: str, mode: StopUnitMode) -> str: """Stop a systemd service unit. Returns object path of job.""" - return await self.dbus.Manager.call_stop_unit(unit, mode) + return await self.connected_dbus.Manager.call("stop_unit", unit, mode) @dbus_connected @systemd_errors async def reload_unit(self, unit: str, mode: StartUnitMode) -> str: """Reload a systemd service unit. Returns object path of job.""" - return await self.dbus.Manager.call_reload_or_restart_unit(unit, mode) + return await self.connected_dbus.Manager.call( + "reload_or_restart_unit", unit, mode + ) @dbus_connected @systemd_errors async def restart_unit(self, unit: str, mode: StartUnitMode) -> str: """Restart a systemd service unit. Returns object path of job.""" - return await self.dbus.Manager.call_restart_unit(unit, mode) + return await self.connected_dbus.Manager.call("restart_unit", unit, mode) @dbus_connected async def list_units( self, ) -> list[tuple[str, str, str, str, str, str, str, int, str, str]]: """Return a list of available systemd services.""" - return await self.dbus.Manager.call_list_units() + return await self.connected_dbus.Manager.call("list_units") @dbus_connected async def start_transient_unit( self, unit: str, mode: StartUnitMode, properties: list[tuple[str, Variant]] ) -> str: """Start a transient unit which is released when stopped or on reboot. Returns object path of job.""" - return await self.dbus.Manager.call_start_transient_unit( - unit, mode, properties, [] + return await self.connected_dbus.Manager.call( + "start_transient_unit", unit, mode, properties, [] ) @dbus_connected @systemd_errors async def reset_failed_unit(self, unit: str) -> None: """Reset the failed state of a unit.""" - await self.dbus.Manager.call_reset_failed_unit(unit) + await self.connected_dbus.Manager.call("reset_failed_unit", unit) @dbus_connected @systemd_errors async def get_unit(self, unit: str) -> SystemdUnit: """Return systemd unit for unit name.""" - obj_path = await self.dbus.Manager.call_get_unit(unit) - unit = SystemdUnit(obj_path) - await unit.connect(self.dbus.bus) - return unit + obj_path = await self.connected_dbus.Manager.call("get_unit", unit) + systemd_unit = SystemdUnit(obj_path) + await systemd_unit.connect(self.connected_dbus.bus) + return systemd_unit diff --git a/supervisor/dbus/timedate.py b/supervisor/dbus/timedate.py index 0ebe5a5d5..cfee27a0d 100644 --- a/supervisor/dbus/timedate.py +++ b/supervisor/dbus/timedate.py @@ -35,7 +35,7 @@ class TimeDate(DBusInterfaceProxy): object_path: str = DBUS_OBJECT_TIMEDATE properties_interface: str = DBUS_IFACE_TIMEDATE - def __init__(self): + def __init__(self) -> None: """Initialize object.""" super().__init__() self._timezone_tzinfo: tzinfo | None = None @@ -97,9 +97,11 @@ class TimeDate(DBusInterfaceProxy): @dbus_connected async def set_time(self, utc: datetime) -> None: """Set time & date on host as UTC.""" - await self.dbus.call_set_time(int(utc.timestamp() * 1000000), False, False) + await self.connected_dbus.call( + "set_time", int(utc.timestamp() * 1000000), False, False + ) @dbus_connected async def set_ntp(self, use_ntp: bool) -> None: """Turn NTP on or off.""" - await self.dbus.call_set_ntp(use_ntp, False) + await self.connected_dbus.call("set_ntp", use_ntp, False) diff --git a/supervisor/dbus/udisks2/__init__.py b/supervisor/dbus/udisks2/__init__.py index 61ba7539a..53d7ef02a 100644 --- a/supervisor/dbus/udisks2/__init__.py +++ b/supervisor/dbus/udisks2/__init__.py @@ -74,11 +74,11 @@ class UDisks2Manager(DBusInterfaceProxy): ) else: # Register for signals on devices added/removed - self.udisks2_object_manager.dbus.object_manager.on_interfaces_added( - self._interfaces_added + self.udisks2_object_manager.dbus.object_manager.on( + "interfaces_added", self._interfaces_added ) - self.udisks2_object_manager.dbus.object_manager.on_interfaces_removed( - self._interfaces_removed + self.udisks2_object_manager.dbus.object_manager.on( + "interfaces_removed", self._interfaces_removed ) @dbus_connected @@ -91,8 +91,8 @@ class UDisks2Manager(DBusInterfaceProxy): if not changed: # Cache block devices - block_devices = await self.dbus.Manager.call_get_block_devices( - UDISKS2_DEFAULT_OPTIONS + block_devices = await self.connected_dbus.Manager.call( + "get_block_devices", UDISKS2_DEFAULT_OPTIONS ) unchanged_blocks = self._block_devices.keys() & set(block_devices) @@ -102,7 +102,7 @@ class UDisks2Manager(DBusInterfaceProxy): self._block_devices = { device: self._block_devices[device] if device in unchanged_blocks - else await UDisks2Block.new(device, self.dbus.bus) + else await UDisks2Block.new(device, self.connected_dbus.bus) for device in block_devices } @@ -128,7 +128,7 @@ class UDisks2Manager(DBusInterfaceProxy): self._drives = { drive: self._drives[drive] if drive in self._drives - else await UDisks2Drive.new(drive, self.dbus.bus) + else await UDisks2Drive.new(drive, self.connected_dbus.bus) for drive in drives } @@ -180,13 +180,14 @@ class UDisks2Manager(DBusInterfaceProxy): """Return list of device object paths for specification.""" return await asyncio.gather( *[ - UDisks2Block.new(path, self.dbus.bus, sync_properties=False) - for path in await self.dbus.Manager.call_resolve_device( - devspec.to_dict(), UDISKS2_DEFAULT_OPTIONS + UDisks2Block.new(path, self.connected_dbus.bus, sync_properties=False) + for path in await self.connected_dbus.Manager.call( + "resolve_device", devspec.to_dict(), UDISKS2_DEFAULT_OPTIONS ) ] ) + @dbus_connected async def _interfaces_added( self, object_path: str, properties: dict[str, dict[str, Any]] ) -> None: @@ -200,13 +201,13 @@ class UDisks2Manager(DBusInterfaceProxy): if DBUS_IFACE_BLOCK in properties: self._block_devices[object_path] = await UDisks2Block.new( - object_path, self.dbus.bus + object_path, self.connected_dbus.bus ) return if DBUS_IFACE_DRIVE in properties: self._drives[object_path] = await UDisks2Drive.new( - object_path, self.dbus.bus + object_path, self.connected_dbus.bus ) async def _interfaces_removed( diff --git a/supervisor/dbus/udisks2/block.py b/supervisor/dbus/udisks2/block.py index 77a68fd27..a99ba9e1c 100644 --- a/supervisor/dbus/udisks2/block.py +++ b/supervisor/dbus/udisks2/block.py @@ -60,7 +60,7 @@ class UDisks2Block(DBusInterfaceProxy): def __init__(self, object_path: str, *, sync_properties: bool = True) -> None: """Initialize object.""" - self.object_path = object_path + self._object_path = object_path self.sync_properties = sync_properties super().__init__() @@ -78,6 +78,11 @@ class UDisks2Block(DBusInterfaceProxy): await obj.connect(bus) return obj + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path + @property def filesystem(self) -> UDisks2Filesystem | None: """Filesystem interface if block device is one.""" @@ -212,48 +217,54 @@ class UDisks2Block(DBusInterfaceProxy): @dbus_connected async def check_type(self) -> None: """Check if type of block device has changed and adjust interfaces if so.""" - introspection = await self.dbus.introspect() + introspection = await self.connected_dbus.introspect() interfaces = {intr.name for intr in introspection.interfaces} # If interfaces changed, update the proxy from introspection and reload interfaces - if interfaces != set(self.dbus.proxies.keys()): - await self.dbus.init_proxy(introspection=introspection) + if interfaces != set(self.connected_dbus.proxies.keys()): + await self.connected_dbus.init_proxy(introspection=introspection) await self._reload_interfaces() @dbus_connected async def _reload_interfaces(self) -> None: """Reload interfaces from introspection as necessary.""" # Check if block device is a filesystem - if not self.filesystem and DBUS_IFACE_FILESYSTEM in self.dbus.proxies: + if not self.filesystem and DBUS_IFACE_FILESYSTEM in self.connected_dbus.proxies: self._filesystem = UDisks2Filesystem( self.object_path, sync_properties=self.sync_properties ) - await self._filesystem.initialize(self.dbus) + await self._filesystem.initialize(self.connected_dbus) - elif self.filesystem and DBUS_IFACE_FILESYSTEM not in self.dbus.proxies: + elif ( + self.filesystem and DBUS_IFACE_FILESYSTEM not in self.connected_dbus.proxies + ): self.filesystem.stop_sync_property_changes() self._filesystem = None # Check if block device is a partition - if not self.partition and DBUS_IFACE_PARTITION in self.dbus.proxies: + if not self.partition and DBUS_IFACE_PARTITION in self.connected_dbus.proxies: self._partition = UDisks2Partition( self.object_path, sync_properties=self.sync_properties ) - await self._partition.initialize(self.dbus) + await self._partition.initialize(self.connected_dbus) - elif self.partition and DBUS_IFACE_PARTITION not in self.dbus.proxies: + elif self.partition and DBUS_IFACE_PARTITION not in self.connected_dbus.proxies: self.partition.stop_sync_property_changes() self._partition = None # Check if block device is a partition table - if not self.partition_table and DBUS_IFACE_PARTITION_TABLE in self.dbus.proxies: + if ( + not self.partition_table + and DBUS_IFACE_PARTITION_TABLE in self.connected_dbus.proxies + ): self._partition_table = UDisks2PartitionTable( self.object_path, sync_properties=self.sync_properties ) - await self._partition_table.initialize(self.dbus) + await self._partition_table.initialize(self.connected_dbus) elif ( - self.partition_table and DBUS_IFACE_PARTITION_TABLE not in self.dbus.proxies + self.partition_table + and DBUS_IFACE_PARTITION_TABLE not in self.connected_dbus.proxies ): self.partition_table.stop_sync_property_changes() self._partition_table = None @@ -263,5 +274,7 @@ class UDisks2Block(DBusInterfaceProxy): self, type_: FormatType = FormatType.GPT, options: FormatOptions | None = None ) -> None: """Format block device.""" - options = options.to_dict() if options else {} - await self.dbus.Block.call_format(type_, options | UDISKS2_DEFAULT_OPTIONS) + format_options = options.to_dict() if options else {} + await self.connected_dbus.Block.call( + "format", type_, format_options | UDISKS2_DEFAULT_OPTIONS + ) diff --git a/supervisor/dbus/udisks2/data.py b/supervisor/dbus/udisks2/data.py index 054f3feb5..402ebfbcc 100644 --- a/supervisor/dbus/udisks2/data.py +++ b/supervisor/dbus/udisks2/data.py @@ -1,7 +1,6 @@ """Data for UDisks2.""" from dataclasses import dataclass -from inspect import get_annotations from pathlib import Path from typing import Any, NotRequired, TypedDict @@ -23,41 +22,6 @@ def _optional_variant(signature: str, value: Any | None) -> Variant | None: return Variant(signature, value) if value is not None else None -UDisks2StandardOptionsDataType = TypedDict( - "UDisks2StandardOptionsDataType", - {"auth.no_user_interaction": NotRequired[bool]}, -) - - -@dataclass(slots=True) -class UDisks2StandardOptions: - """UDisks2 standard options. - - http://storaged.org/doc/udisks2-api/latest/udisks-std-options.html - """ - - auth_no_user_interaction: bool | None = None - - @staticmethod - def from_dict(data: UDisks2StandardOptionsDataType) -> "UDisks2StandardOptions": - """Create UDisks2StandardOptions from dict.""" - return UDisks2StandardOptions( - auth_no_user_interaction=data.get("auth.no_user_interaction"), - ) - - def to_dict(self) -> dict[str, Variant]: - """Return dict representation.""" - data = { - "auth.no_user_interaction": _optional_variant( - "b", self.auth_no_user_interaction - ), - } - return {k: v for k, v in data.items() if v} - - -_udisks2_standard_options_annotations = get_annotations(UDisks2StandardOptionsDataType) - - class DeviceSpecificationDataType(TypedDict, total=False): """Device specification data type.""" @@ -81,7 +45,7 @@ class DeviceSpecification: def from_dict(data: DeviceSpecificationDataType) -> "DeviceSpecification": """Create DeviceSpecification from dict.""" return DeviceSpecification( - path=Path(data.get("path")), + path=Path(data["path"]) if "path" in data else None, label=data.get("label"), uuid=data.get("uuid"), ) @@ -109,13 +73,14 @@ FormatOptionsDataType = TypedDict( "dry-run-first": NotRequired[bool], "no-discard": NotRequired[bool], "tear-down": NotRequired[bool], - } - | _udisks2_standard_options_annotations, + # UDisks2 standard options + "auth.no_user_interaction": NotRequired[bool], + }, ) @dataclass(slots=True) -class FormatOptions(UDisks2StandardOptions): +class FormatOptions: """Options for formatting a block device. http://storaged.org/doc/udisks2-api/latest/gdbus-org.freedesktop.UDisks2.Block.html#gdbus-method-org-freedesktop-UDisks2-Block.Format @@ -131,6 +96,8 @@ class FormatOptions(UDisks2StandardOptions): dry_run_first: bool | None = None no_discard: bool | None = None tear_down: bool | None = None + # UDisks2 standard options + auth_no_user_interaction: bool | None = None @staticmethod def from_dict(data: FormatOptionsDataType) -> "FormatOptions": @@ -146,7 +113,7 @@ class FormatOptions(UDisks2StandardOptions): encrypt_type=EncryptType(data["encrypt.type"]) if "encrypt.type" in data else None, - erase=EncryptType(data["erase"]) if "erase" in data else None, + erase=EraseMode(data["erase"]) if "erase" in data else None, update_partition_type=data.get("update-partition-type"), no_block=data.get("no-block"), dry_run_first=data.get("dry-run-first"), @@ -188,13 +155,14 @@ MountOptionsDataType = TypedDict( { "fstype": NotRequired[str], "options": NotRequired[str], - } - | _udisks2_standard_options_annotations, + # UDisks2 standard options + "auth.no_user_interaction": NotRequired[bool], + }, ) @dataclass(slots=True) -class MountOptions(UDisks2StandardOptions): +class MountOptions: """Filesystem mount options. http://storaged.org/doc/udisks2-api/latest/gdbus-org.freedesktop.UDisks2.Filesystem.html#gdbus-method-org-freedesktop-UDisks2-Filesystem.Mount @@ -202,6 +170,8 @@ class MountOptions(UDisks2StandardOptions): fstype: str | None = None options: list[str] | None = None + # UDisks2 standard options + auth_no_user_interaction: bool | None = None @staticmethod def from_dict(data: MountOptionsDataType) -> "MountOptions": @@ -227,22 +197,25 @@ class MountOptions(UDisks2StandardOptions): UnmountOptionsDataType = TypedDict( - "UnountOptionsDataType", + "UnmountOptionsDataType", { "force": NotRequired[bool], - } - | _udisks2_standard_options_annotations, + # UDisks2 standard options + "auth.no_user_interaction": NotRequired[bool], + }, ) @dataclass(slots=True) -class UnmountOptions(UDisks2StandardOptions): +class UnmountOptions: """Filesystem unmount options. http://storaged.org/doc/udisks2-api/latest/gdbus-org.freedesktop.UDisks2.Filesystem.html#gdbus-method-org-freedesktop-UDisks2-Filesystem.Unmount """ force: bool | None = None + # UDisks2 standard options + auth_no_user_interaction: bool | None = None @staticmethod def from_dict(data: UnmountOptionsDataType) -> "UnmountOptions": @@ -267,18 +240,24 @@ class UnmountOptions(UDisks2StandardOptions): CreatePartitionOptionsDataType = TypedDict( "CreatePartitionOptionsDataType", - {"partition-type": NotRequired[str]} | _udisks2_standard_options_annotations, + { + "partition-type": NotRequired[str], + # UDisks2 standard options + "auth.no_user_interaction": NotRequired[bool], + }, ) @dataclass(slots=True) -class CreatePartitionOptions(UDisks2StandardOptions): +class CreatePartitionOptions: """Create partition options. http://storaged.org/doc/udisks2-api/latest/gdbus-org.freedesktop.UDisks2.PartitionTable.html#gdbus-method-org-freedesktop-UDisks2-PartitionTable.CreatePartition """ partition_type: str | None = None + # UDisks2 standard options + auth_no_user_interaction: bool | None = None @staticmethod def from_dict(data: CreatePartitionOptionsDataType) -> "CreatePartitionOptions": @@ -303,18 +282,24 @@ class CreatePartitionOptions(UDisks2StandardOptions): DeletePartitionOptionsDataType = TypedDict( "DeletePartitionOptionsDataType", - {"tear-down": NotRequired[bool]} | _udisks2_standard_options_annotations, + { + "tear-down": NotRequired[bool], + # UDisks2 standard options + "auth.no_user_interaction": NotRequired[bool], + }, ) @dataclass(slots=True) -class DeletePartitionOptions(UDisks2StandardOptions): +class DeletePartitionOptions: """Delete partition options. http://storaged.org/doc/udisks2-api/latest/gdbus-org.freedesktop.UDisks2.Partition.html#gdbus-method-org-freedesktop-UDisks2-Partition.Delete """ tear_down: bool | None = None + # UDisks2 standard options + auth_no_user_interaction: bool | None = None @staticmethod def from_dict(data: DeletePartitionOptionsDataType) -> "DeletePartitionOptions": diff --git a/supervisor/dbus/udisks2/drive.py b/supervisor/dbus/udisks2/drive.py index 4ae02078b..e81488a47 100644 --- a/supervisor/dbus/udisks2/drive.py +++ b/supervisor/dbus/udisks2/drive.py @@ -37,7 +37,7 @@ class UDisks2Drive(DBusInterfaceProxy): def __init__(self, object_path: str) -> None: """Initialize object.""" - self.object_path = object_path + self._object_path = object_path super().__init__() @staticmethod @@ -47,6 +47,11 @@ class UDisks2Drive(DBusInterfaceProxy): await obj.connect(bus) return obj + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path + @property @dbus_property def vendor(self) -> str: @@ -124,4 +129,4 @@ class UDisks2Drive(DBusInterfaceProxy): @dbus_connected async def eject(self) -> None: """Eject media from drive.""" - await self.dbus.Drive.call_eject(UDISKS2_DEFAULT_OPTIONS) + await self.connected_dbus.Drive.call("eject", UDISKS2_DEFAULT_OPTIONS) diff --git a/supervisor/dbus/udisks2/filesystem.py b/supervisor/dbus/udisks2/filesystem.py index c3f0d3e9c..018f737b4 100644 --- a/supervisor/dbus/udisks2/filesystem.py +++ b/supervisor/dbus/udisks2/filesystem.py @@ -26,10 +26,15 @@ class UDisks2Filesystem(DBusInterfaceProxy): def __init__(self, object_path: str, *, sync_properties: bool = True) -> None: """Initialize object.""" - self.object_path = object_path + self._object_path = object_path self.sync_properties = sync_properties super().__init__() + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path + @property @dbus_property def mount_points(self) -> list[Path]: @@ -53,26 +58,36 @@ class UDisks2Filesystem(DBusInterfaceProxy): if not overridden in /etc/fstab. Therefore unclear if this can be useful to supervisor. http://storaged.org/doc/udisks2-api/latest/gdbus-org.freedesktop.UDisks2.Filesystem.html#gdbus-method-org-freedesktop-UDisks2-Filesystem.Mount """ - options = options.to_dict() if options else {} - return await self.dbus.Filesystem.call_mount(options | UDISKS2_DEFAULT_OPTIONS) + mount_options = options.to_dict() if options else {} + return await self.connected_dbus.Filesystem.call( + "mount", mount_options | UDISKS2_DEFAULT_OPTIONS + ) @dbus_connected async def unmount(self, options: UnmountOptions | None = None) -> None: """Unmount filesystem.""" - options = options.to_dict() if options else {} - await self.dbus.Filesystem.call_unmount(options | UDISKS2_DEFAULT_OPTIONS) + unmount_options = options.to_dict() if options else {} + await self.connected_dbus.Filesystem.call( + "unmount", unmount_options | UDISKS2_DEFAULT_OPTIONS + ) @dbus_connected async def set_label(self, label: str) -> None: """Set filesystem label.""" - await self.dbus.Filesystem.call_set_label(label, UDISKS2_DEFAULT_OPTIONS) + await self.connected_dbus.Filesystem.call( + "set_label", label, UDISKS2_DEFAULT_OPTIONS + ) @dbus_connected async def check(self) -> bool: """Check filesystem for consistency. Returns true if it passed.""" - return await self.dbus.Filesystem.call_check(UDISKS2_DEFAULT_OPTIONS) + return await self.connected_dbus.Filesystem.call( + "check", UDISKS2_DEFAULT_OPTIONS + ) @dbus_connected async def repair(self) -> bool: """Attempt to repair filesystem. Returns true if repair was successful.""" - return await self.dbus.Filesystem.call_repair(UDISKS2_DEFAULT_OPTIONS) + return await self.connected_dbus.Filesystem.call( + "repair", UDISKS2_DEFAULT_OPTIONS + ) diff --git a/supervisor/dbus/udisks2/partition.py b/supervisor/dbus/udisks2/partition.py index 462d3e34c..0b8ca1268 100644 --- a/supervisor/dbus/udisks2/partition.py +++ b/supervisor/dbus/udisks2/partition.py @@ -29,10 +29,15 @@ class UDisks2Partition(DBusInterfaceProxy): def __init__(self, object_path: str, *, sync_properties: bool = True) -> None: """Initialize object.""" - self.object_path = object_path + self._object_path = object_path self.sync_properties = sync_properties super().__init__() + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path + @property @dbus_property def number(self) -> int: @@ -86,12 +91,16 @@ class UDisks2Partition(DBusInterfaceProxy): for GPT type tables or a hexadecimal number for dos type tables. Can also use empty string and let UDisks2 choose a default based on partition table and OS. """ - await self.dbus.Partition.call_set_type(type_, UDISKS2_DEFAULT_OPTIONS) + await self.connected_dbus.Partition.call( + "set_type", type_, UDISKS2_DEFAULT_OPTIONS + ) @dbus_connected async def set_name(self, name: str) -> None: """Set the name/label of the partition.""" - await self.dbus.Partition.call_set_name(name, UDISKS2_DEFAULT_OPTIONS) + await self.connected_dbus.Partition.call( + "set_name", name, UDISKS2_DEFAULT_OPTIONS + ) @dbus_connected async def resize(self, size: int = 0) -> None: @@ -100,10 +109,14 @@ class UDisks2Partition(DBusInterfaceProxy): Position/offset cannot be changed, only size. May be slightly bigger then requested. Raises error if allocation fails. """ - await self.dbus.Partition.call_resize(size, UDISKS2_DEFAULT_OPTIONS) + await self.connected_dbus.Partition.call( + "resize", size, UDISKS2_DEFAULT_OPTIONS + ) @dbus_connected async def delete(self, options: DeletePartitionOptions | None = None) -> None: """Delete the partition.""" - options = options.to_dict() if options else {} - return await self.dbus.Partition.call_delete(options | UDISKS2_DEFAULT_OPTIONS) + delete_options = options.to_dict() if options else {} + return await self.connected_dbus.Partition.call( + "delete", delete_options | UDISKS2_DEFAULT_OPTIONS + ) diff --git a/supervisor/dbus/udisks2/partition_table.py b/supervisor/dbus/udisks2/partition_table.py index ad4390bd8..905b1874b 100644 --- a/supervisor/dbus/udisks2/partition_table.py +++ b/supervisor/dbus/udisks2/partition_table.py @@ -24,10 +24,15 @@ class UDisks2PartitionTable(DBusInterfaceProxy): def __init__(self, object_path: str, *, sync_properties: bool = True) -> None: """Initialize object.""" - self.object_path = object_path + self._object_path = object_path self.sync_properties = sync_properties super().__init__() + @property + def object_path(self) -> str: + """Object path for dbus object.""" + return self._object_path + @property @dbus_property def partitions(self) -> list[str]: @@ -59,7 +64,12 @@ class UDisks2PartitionTable(DBusInterfaceProxy): and let UDisks2 choose a default based on partition table and OS. Provide return value with UDisks2Block.new. Or UDisks2.get_block_device after UDisks2.update. """ - options = options.to_dict() if options else {} - return await self.dbus.PartitionTable.call_create_partition( - offset, size, type_, name, options | UDISKS2_DEFAULT_OPTIONS + partition_options = options.to_dict() if options else {} + return await self.connected_dbus.PartitionTable.call( + "create_partition", + offset, + size, + type_, + name, + partition_options | UDISKS2_DEFAULT_OPTIONS, ) diff --git a/supervisor/mounts/mount.py b/supervisor/mounts/mount.py index 59b7a7665..d941b306e 100644 --- a/supervisor/mounts/mount.py +++ b/supervisor/mounts/mount.py @@ -153,16 +153,9 @@ class Mount(CoreSysAttributes, ABC): return self._state @cached_property - def local_where(self) -> Path | None: - """Return where this is mounted within supervisor container. - - This returns none if 'where' is not within supervisor's host data directory. - """ - return ( - self.sys_config.extern_to_local_path(self.where) - if self.where.is_relative_to(self.sys_config.path_extern_supervisor) - else None - ) + def local_where(self) -> Path: + """Return where this is mounted within supervisor container.""" + return self.sys_config.extern_to_local_path(self.where) @property def container_where(self) -> PurePath | None: @@ -276,27 +269,25 @@ class Mount(CoreSysAttributes, ABC): async def mount(self) -> None: """Mount using systemd.""" - # If supervisor can see where it will mount, ensure there's an empty folder there - if self.local_where: - def ensure_empty_folder() -> None: - if not self.local_where.exists(): - _LOGGER.info( - "Creating folder for mount: %s", self.local_where.as_posix() - ) - self.local_where.mkdir(parents=True) - elif not self.local_where.is_dir(): - raise MountInvalidError( - f"Cannot mount {self.name} at {self.local_where.as_posix()} as it is not a directory", - _LOGGER.error, - ) - elif any(self.local_where.iterdir()): - raise MountInvalidError( - f"Cannot mount {self.name} at {self.local_where.as_posix()} because it is not empty", - _LOGGER.error, - ) + def ensure_empty_folder() -> None: + if not self.local_where.exists(): + _LOGGER.info( + "Creating folder for mount: %s", self.local_where.as_posix() + ) + self.local_where.mkdir(parents=True) + elif not self.local_where.is_dir(): + raise MountInvalidError( + f"Cannot mount {self.name} at {self.local_where.as_posix()} as it is not a directory", + _LOGGER.error, + ) + elif any(self.local_where.iterdir()): + raise MountInvalidError( + f"Cannot mount {self.name} at {self.local_where.as_posix()} because it is not empty", + _LOGGER.error, + ) - await self.sys_run_in_executor(ensure_empty_folder) + await self.sys_run_in_executor(ensure_empty_folder) try: options = ( @@ -542,6 +533,9 @@ class BindMount(Mount): self, coresys: CoreSys, data: MountData, *, where: PurePath | None = None ) -> None: """Initialize object.""" + if where and not where.is_relative_to(coresys.config.path_extern_supervisor): + raise ValueError("Path must be within Supervisor's host data directory!") + super().__init__(coresys, data) self._where = where diff --git a/supervisor/utils/dbus.py b/supervisor/utils/dbus.py index 6284ddd2c..649bc531b 100644 --- a/supervisor/utils/dbus.py +++ b/supervisor/utils/dbus.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Awaitable, Callable, Coroutine import logging -from typing import Any +from typing import Any, cast from dbus_fast import ( ErrorType, @@ -305,9 +305,34 @@ class DBus: else: self._signal_monitors[interface][dbus_name].append(callback) + @property + def _call_wrapper(self) -> DBusCallWrapper: + """Get dbus call wrapper for current dbus object.""" + return DBusCallWrapper(self, self.bus_name) + def __getattr__(self, name: str) -> DBusCallWrapper: """Map to dbus method.""" - return getattr(DBusCallWrapper(self, self.bus_name), name) + return getattr(self._call_wrapper, name) + + def call(self, name: str, *args, unpack_variants: bool = True) -> Awaitable[Any]: + """Call a dbus method.""" + return self._call_wrapper.call(name, *args, unpack_variants=unpack_variants) + + def get(self, name: str, *, unpack_variants: bool = True) -> Awaitable[Any]: + """Get a dbus property value.""" + return self._call_wrapper.get(name, unpack_variants=unpack_variants) + + def set(self, name: str, value: Any) -> Awaitable[None]: + """Set a dbus property.""" + return self._call_wrapper.set(name, value) + + def on(self, name: str, callback: Callable) -> None: + """Add listener for a signal.""" + self._call_wrapper.on(name, callback) + + def off(self, name: str, callback: Callable) -> None: + """Remove listener for a signal.""" + self._call_wrapper.off(name, callback) class DBusCallWrapper: @@ -324,7 +349,9 @@ class DBusCallWrapper: _LOGGER.error("D-Bus method %s not exists!", self.interface) raise DBusInterfaceMethodError() - def __getattr__(self, name: str) -> Awaitable | Callable: + def _dbus_action( + self, name: str + ) -> DBusCallWrapper | Callable[..., Awaitable[Any]] | Callable[[Callable], None]: """Map to dbus method.""" if not self._proxy: return DBusCallWrapper(self.dbus, f"{self.interface}.{name}") @@ -409,6 +436,36 @@ class DBusCallWrapper: # Didn't reach the dbus call yet, just happened to hit another interface. Return a wrapper return DBusCallWrapper(self.dbus, f"{self.interface}.{name}") + def __getattr__(self, name: str) -> DBusCallWrapper: + """Map to a dbus method.""" + return cast(DBusCallWrapper, self._dbus_action(name)) + + def call(self, name: str, *args, unpack_variants: bool = True) -> Awaitable[Any]: + """Call a dbus method.""" + return cast(Callable[..., Awaitable[Any]], self._dbus_action(f"call_{name}"))( + *args, unpack_variants=unpack_variants + ) + + def get(self, name: str, *, unpack_variants: bool = True) -> Awaitable[Any]: + """Get a dbus property value.""" + return cast(Callable[[bool], Awaitable[Any]], self._dbus_action(f"get_{name}"))( + unpack_variants=unpack_variants + ) + + def set(self, name: str, value: Any) -> Awaitable[None]: + """Set a dbus property.""" + return cast(Callable[[Any], Awaitable[Any]], self._dbus_action(f"set_{name}"))( + value + ) + + def on(self, name: str, callback: Callable) -> None: + """Add listener for a signal.""" + cast(Callable[[Callable], None], self._dbus_action(f"on_{name}"))(callback) + + def off(self, name: str, callback: Callable) -> None: + """Remove listener for a signal.""" + cast(Callable[[Callable], None], self._dbus_action(f"off_{name}"))(callback) + class DBusSignalWrapper: """Wrapper for D-Bus Signal.""" diff --git a/tests/api/test_backups.py b/tests/api/test_backups.py index adc7cb4cd..6317f4a22 100644 --- a/tests/api/test_backups.py +++ b/tests/api/test_backups.py @@ -12,7 +12,7 @@ from awesomeversion import AwesomeVersion import pytest from supervisor.addons.addon import Addon -from supervisor.backups.backup import Backup +from supervisor.backups.backup import Backup, BackupLocation from supervisor.const import CoreState from supervisor.coresys import CoreSys from supervisor.docker.manager import DockerAPI @@ -505,7 +505,9 @@ async def test_restore_immediate_errors( with ( patch.object( - Backup, "all_locations", new={None: {"path": None, "protected": True}} + Backup, + "all_locations", + new={None: BackupLocation(path=Path("/"), protected=True, size_bytes=0)}, ), patch.object( Backup, @@ -586,7 +588,9 @@ async def test_cloud_backup_core_only(api_client: TestClient, mock_full_backup: # pylint: disable-next=protected-access mock_full_backup._locations = { - ".cloud_backup": {"path": None, "protected": False, "size_bytes": 10240} + ".cloud_backup": BackupLocation( + path=Path("/"), protected=False, size_bytes=10240 + ) } assert mock_full_backup.location == ".cloud_backup" @@ -672,8 +676,10 @@ async def test_backup_to_multiple_locations( assert orig_backup.exists() assert copy_backup.exists() assert coresys.backups.get(slug).all_locations == { - None: {"path": orig_backup, "protected": False, "size_bytes": 10240}, - ".cloud_backup": {"path": copy_backup, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=orig_backup, protected=False, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=copy_backup, protected=False, size_bytes=10240 + ), } assert coresys.backups.get(slug).location is None @@ -709,7 +715,7 @@ async def test_backup_to_multiple_locations_error_on_copy( orig_backup = coresys.config.path_backup / f"{slug}.tar" assert await coresys.run_in_executor(orig_backup.exists) assert coresys.backups.get(slug).all_locations == { - None: {"path": orig_backup, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=orig_backup, protected=False, size_bytes=10240), } assert coresys.backups.get(slug).location is None @@ -783,8 +789,10 @@ async def test_upload_to_multiple_locations( assert orig_backup.exists() assert copy_backup.exists() assert coresys.backups.get("7fed74c8").all_locations == { - None: {"path": orig_backup, "protected": False, "size_bytes": 10240}, - ".cloud_backup": {"path": copy_backup, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=orig_backup, protected=False, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=copy_backup, protected=False, size_bytes=10240 + ), } assert coresys.backups.get("7fed74c8").location is None @@ -798,7 +806,7 @@ async def test_upload_duplicate_backup_new_location( orig_backup = Path(copy(backup_file, coresys.config.path_backup)) await coresys.backups.reload() assert coresys.backups.get("7fed74c8").all_locations == { - None: {"path": orig_backup, "protected": False, "size_bytes": 10240} + None: BackupLocation(path=orig_backup, protected=False, size_bytes=10240), } with backup_file.open("rb") as file, MultipartWriter("form-data") as mp: @@ -815,8 +823,10 @@ async def test_upload_duplicate_backup_new_location( assert orig_backup.exists() assert copy_backup.exists() assert coresys.backups.get("7fed74c8").all_locations == { - None: {"path": orig_backup, "protected": False, "size_bytes": 10240}, - ".cloud_backup": {"path": copy_backup, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=orig_backup, protected=False, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=copy_backup, protected=False, size_bytes=10240 + ), } assert coresys.backups.get("7fed74c8").location is None @@ -853,7 +863,7 @@ async def test_upload_with_filename( orig_backup = coresys.config.path_backup / filename assert orig_backup.exists() assert coresys.backups.get("7fed74c8").all_locations == { - None: {"path": orig_backup, "protected": False, "size_bytes": 10240} + None: BackupLocation(path=orig_backup, protected=False, size_bytes=10240), } assert coresys.backups.get("7fed74c8").location is None @@ -886,8 +896,10 @@ async def test_remove_backup_from_location(api_client: TestClient, coresys: Core await coresys.backups.reload() assert (backup := coresys.backups.get("7fed74c8")) assert backup.all_locations == { - None: {"path": location_1, "protected": False, "size_bytes": 10240}, - ".cloud_backup": {"path": location_2, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=location_1, protected=False, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=location_2, protected=False, size_bytes=10240 + ), } resp = await api_client.delete( @@ -899,7 +911,7 @@ async def test_remove_backup_from_location(api_client: TestClient, coresys: Core assert not location_2.exists() assert coresys.backups.get("7fed74c8") assert backup.all_locations == { - None: {"path": location_1, "protected": False, "size_bytes": 10240} + None: BackupLocation(path=location_1, protected=False, size_bytes=10240), } @@ -912,7 +924,7 @@ async def test_remove_backup_file_not_found(api_client: TestClient, coresys: Cor await coresys.backups.reload() assert (backup := coresys.backups.get("7fed74c8")) assert backup.all_locations == { - None: {"path": location, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=location, protected=False, size_bytes=10240), } location.unlink() @@ -940,8 +952,10 @@ async def test_download_backup_from_location( await coresys.backups.reload() assert (backup := coresys.backups.get("7fed74c8")) assert backup.all_locations == { - None: {"path": location_1, "protected": False, "size_bytes": 10240}, - ".cloud_backup": {"path": location_2, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=location_1, protected=False, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=location_2, protected=False, size_bytes=10240 + ), } # The use case of this is user might want to pick a particular mount if one is flaky @@ -1019,7 +1033,7 @@ async def test_restore_backup_from_location( # The use case of this is user might want to pick a particular mount if one is flaky # To simulate this, remove the file from one location and show one works and the other doesn't assert backup.location is None - (backup_local_path := backup.all_locations[None]["path"]).unlink() + (backup_local_path := backup.all_locations[None].path).unlink() test_file.unlink() resp = await api_client.post( @@ -1055,12 +1069,12 @@ async def test_restore_backup_unencrypted_after_encrypted( backup = coresys.backups.get("d9c48f8b") assert backup.all_locations == { - None: {"path": Path(enc_tar), "protected": True, "size_bytes": 10240}, - ".cloud_backup": { - "path": Path(unc_tar), - "protected": False, - "size_bytes": 10240, - }, + None: BackupLocation(path=Path(enc_tar), protected=True, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=Path(unc_tar), + protected=False, + size_bytes=10240, + ), } # pylint: disable=fixme @@ -1173,12 +1187,12 @@ async def test_backup_mixed_encryption(api_client: TestClient, coresys: CoreSys) backup = coresys.backups.get("d9c48f8b") assert backup.all_locations == { - None: {"path": Path(enc_tar), "protected": True, "size_bytes": 10240}, - ".cloud_backup": { - "path": Path(unc_tar), - "protected": False, - "size_bytes": 10240, - }, + None: BackupLocation(path=Path(enc_tar), protected=True, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=Path(unc_tar), + protected=False, + size_bytes=10240, + ), } resp = await api_client.get("/backups") diff --git a/tests/backups/conftest.py b/tests/backups/conftest.py index 7fa2910f5..9683f85b9 100644 --- a/tests/backups/conftest.py +++ b/tests/backups/conftest.py @@ -1,9 +1,11 @@ """Mock test.""" +from pathlib import Path from unittest.mock import AsyncMock, MagicMock, patch import pytest +from supervisor.backups.backup import BackupLocation from supervisor.backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE, BackupType from supervisor.backups.validate import ALL_FOLDERS from supervisor.coresys import CoreSys @@ -41,7 +43,9 @@ def partial_backup_mock(backup_mock): backup_instance.addon_list = [TEST_ADDON_SLUG] backup_instance.supervisor_version = "9999.09.9.dev9999" backup_instance.location = None - backup_instance.all_locations = {None: {"protected": False}} + backup_instance.all_locations = { + None: BackupLocation(path=Path("/"), protected=False, size_bytes=0) + } backup_instance.validate_backup = AsyncMock() yield backup_mock @@ -55,7 +59,9 @@ def full_backup_mock(backup_mock): backup_instance.addon_list = [TEST_ADDON_SLUG] backup_instance.supervisor_version = "9999.09.9.dev9999" backup_instance.location = None - backup_instance.all_locations = {None: {"protected": False}} + backup_instance.all_locations = { + None: BackupLocation(path=Path("/"), protected=False, size_bytes=0) + } backup_instance.validate_backup = AsyncMock() yield backup_mock diff --git a/tests/backups/test_backup.py b/tests/backups/test_backup.py index 10e8ca7c0..f6fd9d082 100644 --- a/tests/backups/test_backup.py +++ b/tests/backups/test_backup.py @@ -9,7 +9,7 @@ from unittest.mock import MagicMock, patch import pytest -from supervisor.backups.backup import Backup +from supervisor.backups.backup import Backup, BackupLocation from supervisor.backups.const import BackupType from supervisor.coresys import CoreSys from supervisor.exceptions import ( @@ -86,7 +86,7 @@ async def test_consolidate_conflict_varied_encryption( in caplog.text ) assert enc_backup.all_locations == { - None: {"path": unc_tar, "protected": False, "size_bytes": 10240} + None: BackupLocation(path=unc_tar, protected=False, size_bytes=10240), } @@ -112,8 +112,8 @@ async def test_consolidate( not in caplog.text ) assert enc_backup.all_locations == { - None: {"path": enc_tar, "protected": True, "size_bytes": 10240}, - "backup_test": {"path": unc_tar, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=enc_tar, protected=True, size_bytes=10240), + "backup_test": BackupLocation(path=unc_tar, protected=False, size_bytes=10240), } diff --git a/tests/backups/test_manager.py b/tests/backups/test_manager.py index d04175032..be2db815c 100644 --- a/tests/backups/test_manager.py +++ b/tests/backups/test_manager.py @@ -14,7 +14,7 @@ import pytest from supervisor.addons.addon import Addon from supervisor.addons.const import AddonBackupMode from supervisor.addons.model import AddonModel -from supervisor.backups.backup import Backup +from supervisor.backups.backup import Backup, BackupLocation from supervisor.backups.const import LOCATION_TYPE, BackupType from supervisor.backups.manager import BackupManager from supervisor.const import FOLDER_HOMEASSISTANT, FOLDER_SHARE, AddonState, CoreState @@ -344,13 +344,13 @@ async def test_fail_invalid_full_backup( await manager.do_restore_full(partial_backup_mock.return_value) backup_instance = full_backup_mock.return_value - backup_instance.all_locations[None]["protected"] = True + backup_instance.all_locations[None].protected = True backup_instance.validate_backup.side_effect = BackupInvalidError() with pytest.raises(BackupInvalidError): await manager.do_restore_full(backup_instance) - backup_instance.all_locations[None]["protected"] = False + backup_instance.all_locations[None].protected = False backup_instance.supervisor_version = "2022.08.4" with ( patch.object( @@ -373,13 +373,13 @@ async def test_fail_invalid_partial_backup( manager = await BackupManager(coresys).load_config() backup_instance = partial_backup_mock.return_value - backup_instance.all_locations[None]["protected"] = True + backup_instance.all_locations[None].protected = True backup_instance.validate_backup.side_effect = BackupInvalidError() with pytest.raises(BackupInvalidError): await manager.do_restore_partial(backup_instance) - backup_instance.all_locations[None]["protected"] = False + backup_instance.all_locations[None].protected = False backup_instance.homeassistant = None with pytest.raises(BackupInvalidError): @@ -1747,7 +1747,7 @@ async def test_backup_remove_error( assert (backup := coresys.backups.get("7fed74c8")) assert location_name in backup.all_locations - backup.all_locations[location_name]["path"] = (tar_file_mock := MagicMock()) + backup.all_locations[location_name].path = (tar_file_mock := MagicMock()) tar_file_mock.unlink.side_effect = (err := OSError()) err.errno = errno.EBUSY @@ -2001,8 +2001,10 @@ async def test_backup_remove_multiple_locations(coresys: CoreSys): await coresys.backups.reload() assert (backup := coresys.backups.get("7fed74c8")) assert backup.all_locations == { - None: {"path": location_1, "protected": False, "size_bytes": 10240}, - ".cloud_backup": {"path": location_2, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=location_1, protected=False, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=location_2, protected=False, size_bytes=10240 + ), } await coresys.backups.remove(backup) @@ -2021,8 +2023,10 @@ async def test_backup_remove_one_location_of_multiple(coresys: CoreSys): await coresys.backups.reload() assert (backup := coresys.backups.get("7fed74c8")) assert backup.all_locations == { - None: {"path": location_1, "protected": False, "size_bytes": 10240}, - ".cloud_backup": {"path": location_2, "protected": False, "size_bytes": 10240}, + None: BackupLocation(path=location_1, protected=False, size_bytes=10240), + ".cloud_backup": BackupLocation( + path=location_2, protected=False, size_bytes=10240 + ), } await coresys.backups.remove(backup, locations=[".cloud_backup"]) @@ -2030,7 +2034,7 @@ async def test_backup_remove_one_location_of_multiple(coresys: CoreSys): assert not location_2.exists() assert coresys.backups.get("7fed74c8") assert backup.all_locations == { - None: {"path": location_1, "protected": False, "size_bytes": 10240} + None: BackupLocation(path=location_1, protected=False, size_bytes=10240), } @@ -2074,7 +2078,7 @@ async def test_remove_non_existing_backup_raises( assert (backup := coresys.backups.get("7fed74c8")) assert None in backup.all_locations - backup.all_locations[None]["path"] = (tar_file_mock := MagicMock()) + backup.all_locations[None].path = (tar_file_mock := MagicMock()) tar_file_mock.unlink.side_effect = (err := FileNotFoundError()) err.errno = errno.ENOENT diff --git a/tests/dbus/agent/boards/test_green.py b/tests/dbus/agent/boards/test_green.py index 6875431cb..eca4108d8 100644 --- a/tests/dbus/agent/boards/test_green.py +++ b/tests/dbus/agent/boards/test_green.py @@ -24,7 +24,7 @@ async def test_dbus_green(green_service: GreenService, dbus_session_bus: Message green = await Green().load_config() await green.connect(dbus_session_bus) - assert green.name == "Green" + assert green.board_name == "Green" assert green.activity_led is True assert green.power_led is True assert green.user_led is True diff --git a/tests/dbus/agent/boards/test_yellow.py b/tests/dbus/agent/boards/test_yellow.py index 817c9382a..13b6b90be 100644 --- a/tests/dbus/agent/boards/test_yellow.py +++ b/tests/dbus/agent/boards/test_yellow.py @@ -24,7 +24,7 @@ async def test_dbus_yellow(yellow_service: YellowService, dbus_session_bus: Mess yellow = await Yellow().load_config() await yellow.connect(dbus_session_bus) - assert yellow.name == "Yellow" + assert yellow.board_name == "Yellow" assert yellow.disk_led is True assert yellow.heartbeat_led is True assert yellow.power_led is True diff --git a/tests/dbus/test_interface.py b/tests/dbus/test_interface.py index 63228da6f..cb1c083a5 100644 --- a/tests/dbus/test_interface.py +++ b/tests/dbus/test_interface.py @@ -38,6 +38,14 @@ class TestInterface(DBusServiceMock): return 4 +class ServiceTest(DBusInterfaceProxy): + """DBus test class.""" + + bus_name = "service.test.TestInterface" + object_path = "/service/test/TestInterface" + properties_interface = "service.test.TestInterface" + + @pytest.fixture(name="test_service") async def fixture_test_service(dbus_session_bus: MessageBus) -> TestInterface: """Export test interface on dbus.""" @@ -54,12 +62,8 @@ async def fixture_proxy( dbus_session_bus: MessageBus, ) -> DBusInterfaceProxy: """Get a proxy.""" - proxy = DBusInterfaceProxy() - proxy.bus_name = "service.test.TestInterface" - proxy.object_path = "/service/test/TestInterface" - proxy.properties_interface = "service.test.TestInterface" + proxy = ServiceTest() proxy.sync_properties = getattr(request, "param", True) - await proxy.connect(dbus_session_bus) yield proxy @@ -122,10 +126,7 @@ async def test_dbus_connected_no_raise_after_shutdown( test_service: TestInterface, dbus_session_bus: MessageBus ): """Test dbus connected methods do not raise DBusNotConnectedError after shutdown.""" - proxy = DBusInterfaceProxy() - proxy.bus_name = "service.test.TestInterface" - proxy.object_path = "/service/test/TestInterface" - proxy.properties_interface = "service.test.TestInterface" + proxy = ServiceTest() proxy.sync_properties = False with pytest.raises(DBusNotConnectedError): @@ -141,10 +142,13 @@ async def test_dbus_connected_no_raise_after_shutdown( async def test_proxy_missing_properties_interface(dbus_session_bus: MessageBus): """Test proxy instance disconnects and errors when missing properties interface.""" - proxy = DBusInterfaceProxy() - proxy.bus_name = "test.no.properties.interface" - proxy.object_path = DBUS_OBJECT_BASE - proxy.properties_interface = "test.no.properties.interface" + + class NoPropertiesService(DBusInterfaceProxy): + bus_name = "test.no.properties.interface" + object_path = DBUS_OBJECT_BASE + properties_interface = "test.no.properties.interface" + + proxy = NoPropertiesService() def mock_introspect(*args, **kwargs): """Return introspection without properties.""" @@ -163,10 +167,12 @@ async def test_proxy_missing_properties_interface(dbus_session_bus: MessageBus): async def test_initialize(test_service: TestInterface, dbus_session_bus: MessageBus): """Test initialize for reusing connected dbus object.""" - proxy = DBusInterface() - proxy.bus_name = "service.test.TestInterface" - proxy.object_path = "/service/test/TestInterface" + class ServiceTestInterfaceOnly(DBusInterface): + bus_name = "service.test.TestInterface" + object_path = "/service/test/TestInterface" + + proxy = ServiceTestInterfaceOnly() assert proxy.is_connected is False # Not connected