From 6ef4f3cc67aeca46d5d3c547b88b193b3040a4bb Mon Sep 17 00:00:00 2001 From: Mike Degatano Date: Thu, 6 Mar 2025 16:40:13 -0500 Subject: [PATCH] Add blockbuster library and find I/O from unit tests (#5731) * Add blockbuster library and find I/O from unit tests * Fix lint and test issue * Fixes from feedback * Avoid modifying webapp object in executor * Split su options validation and only validate timezone on change --- requirements.txt | 1 + supervisor/addons/addon.py | 57 +++++++++++-------- supervisor/addons/build.py | 34 ++++++----- supervisor/addons/utils.py | 22 +++---- supervisor/api/__init__.py | 29 ++++++++-- supervisor/api/backups.py | 2 +- supervisor/api/supervisor.py | 14 +++-- supervisor/api/utils.py | 4 +- supervisor/backups/backup.py | 6 +- supervisor/backups/manager.py | 18 +++--- supervisor/bootstrap.py | 4 +- supervisor/config.py | 29 ++++++++-- supervisor/core.py | 2 +- supervisor/coresys.py | 22 +++++-- supervisor/dbus/timedate.py | 29 +++++++++- supervisor/docker/addon.py | 14 +++-- supervisor/docker/audio.py | 4 +- supervisor/docker/homeassistant.py | 4 +- supervisor/hardware/manager.py | 15 ++++- supervisor/hardware/monitor.py | 4 +- supervisor/homeassistant/secrets.py | 23 ++++---- supervisor/host/info.py | 7 ++- supervisor/host/logs.py | 13 ++++- supervisor/host/manager.py | 6 ++ supervisor/store/__init__.py | 2 +- supervisor/store/git.py | 2 +- supervisor/utils/dt.py | 5 +- supervisor/utils/validate.py | 5 +- tests/addons/test_build.py | 22 ++++--- tests/addons/test_manager.py | 2 +- tests/api/test_addons.py | 2 +- tests/api/test_host.py | 2 +- tests/api/test_panel.py | 23 ++++++++ tests/api/test_supervisor.py | 14 +++++ tests/common.py | 6 +- tests/conftest.py | 19 +++++++ tests/host/test_logs.py | 12 ++-- tests/resolution/check/test_check.py | 2 +- .../test_evaluate_operating_system.py | 8 ++- .../evaluation/test_evaluate_os_agent.py | 2 +- .../evaluation/test_evaluate_systemd.py | 2 +- .../test_evaluate_systemd_journal.py | 21 +++---- tests/resolution/fixup/test_fixup.py | 4 +- tests/store/test_store_manager.py | 1 + tests/test_coresys.py | 6 +- 45 files changed, 374 insertions(+), 151 deletions(-) create mode 100644 tests/api/test_panel.py diff --git a/requirements.txt b/requirements.txt index 0faababe3..df6ca728a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,7 @@ aiohttp==3.11.13 atomicwrites-homeassistant==1.4.1 attrs==25.1.0 awesomeversion==24.6.0 +blockbuster==1.5.23 brotli==1.1.0 ciso8601==2.3.2 colorlog==6.9.0 diff --git a/supervisor/addons/addon.py b/supervisor/addons/addon.py index 9894dd2d7..e58dbf9a5 100644 --- a/supervisor/addons/addon.py +++ b/supervisor/addons/addon.py @@ -753,9 +753,12 @@ class Addon(AddonModel): for listener in self._listeners: self.sys_bus.remove_listener(listener) - if self.path_data.is_dir(): - _LOGGER.info("Removing add-on data folder %s", self.path_data) - await remove_data(self.path_data) + def remove_data_dir(): + if self.path_data.is_dir(): + _LOGGER.info("Removing add-on data folder %s", self.path_data) + remove_data(self.path_data) + + await self.sys_run_in_executor(remove_data_dir) async def _check_ingress_port(self): """Assign a ingress port if dynamic port selection is used.""" @@ -777,11 +780,14 @@ class Addon(AddonModel): await self.sys_addons.data.install(self.addon_store) await self.load() - if not self.path_data.is_dir(): - _LOGGER.info( - "Creating Home Assistant add-on data folder %s", self.path_data - ) - self.path_data.mkdir() + def setup_data(): + if not self.path_data.is_dir(): + _LOGGER.info( + "Creating Home Assistant add-on data folder %s", self.path_data + ) + self.path_data.mkdir() + + await self.sys_run_in_executor(setup_data) # Setup/Fix AppArmor profile await self.install_apparmor() @@ -820,14 +826,17 @@ class Addon(AddonModel): await self.unload() - # Remove config if present and requested - if self.addon_config_used and remove_config: - await remove_data(self.path_config) + def cleanup_config_and_audio(): + # Remove config if present and requested + if self.addon_config_used and remove_config: + remove_data(self.path_config) - # Cleanup audio settings - if self.path_pulse.exists(): - with suppress(OSError): - self.path_pulse.unlink() + # Cleanup audio settings + if self.path_pulse.exists(): + with suppress(OSError): + self.path_pulse.unlink() + + await self.sys_run_in_executor(cleanup_config_and_audio) # Cleanup AppArmor profile with suppress(HostAppArmorError): @@ -968,7 +977,7 @@ class Addon(AddonModel): async def install_apparmor(self) -> None: """Install or Update AppArmor profile for Add-on.""" exists_local = self.sys_host.apparmor.exists(self.slug) - exists_addon = self.path_apparmor.exists() + exists_addon = await self.sys_run_in_executor(self.path_apparmor.exists) # Nothing to do if not exists_local and not exists_addon: @@ -1444,6 +1453,12 @@ class Addon(AddonModel): # Restore data and config def _restore_data(): """Restore data and config.""" + _LOGGER.info("Restoring data and config for addon %s", self.slug) + if self.path_data.is_dir(): + remove_data(self.path_data) + if self.path_config.is_dir(): + remove_data(self.path_config) + temp_data = Path(tmp.name, "data") if temp_data.is_dir(): shutil.copytree(temp_data, self.path_data, symlinks=True) @@ -1456,12 +1471,6 @@ class Addon(AddonModel): elif self.addon_config_used: self.path_config.mkdir() - _LOGGER.info("Restoring data and config for addon %s", self.slug) - if self.path_data.is_dir(): - await remove_data(self.path_data) - if self.path_config.is_dir(): - await remove_data(self.path_config) - try: await self.sys_run_in_executor(_restore_data) except shutil.Error as err: @@ -1471,7 +1480,7 @@ class Addon(AddonModel): # Restore AppArmor profile_file = Path(tmp.name, "apparmor.txt") - if profile_file.exists(): + if await self.sys_run_in_executor(profile_file.exists): try: await self.sys_host.apparmor.load_profile( self.slug, profile_file @@ -1492,7 +1501,7 @@ class Addon(AddonModel): if data[ATTR_STATE] == AddonState.STARTED: wait_for_start = await self.start() finally: - tmp.cleanup() + await self.sys_run_in_executor(tmp.cleanup) _LOGGER.info("Finished restore for add-on %s", self.slug) return wait_for_start diff --git a/supervisor/addons/build.py b/supervisor/addons/build.py index 3760908c1..ce49cc9f5 100644 --- a/supervisor/addons/build.py +++ b/supervisor/addons/build.py @@ -81,13 +81,6 @@ class AddonBuild(FileConfiguration, CoreSysAttributes): ) return self._data[ATTR_BUILD_FROM][self.arch] - @property - def dockerfile(self) -> Path: - """Return Dockerfile path.""" - if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists(): - return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}") - return self.addon.path_location.joinpath("Dockerfile") - @property def squash(self) -> bool: """Return True or False if squash is active.""" @@ -103,25 +96,40 @@ class AddonBuild(FileConfiguration, CoreSysAttributes): """Return additional Docker labels.""" return self._data[ATTR_LABELS] - @property - def is_valid(self) -> bool: + def get_dockerfile(self) -> Path: + """Return Dockerfile path. + + Must be run in executor. + """ + if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists(): + return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}") + return self.addon.path_location.joinpath("Dockerfile") + + async def is_valid(self) -> bool: """Return true if the build env is valid.""" - try: + + def build_is_valid() -> bool: return all( [ self.addon.path_location.is_dir(), - self.dockerfile.is_file(), + self.get_dockerfile().is_file(), ] ) + + try: + return await self.sys_run_in_executor(build_is_valid) except HassioArchNotFound: return False def get_docker_args(self, version: AwesomeVersion, image: str | None = None): - """Create a dict with Docker build arguments.""" + """Create a dict with Docker build arguments. + + Must be run in executor. + """ args = { "path": str(self.addon.path_location), "tag": f"{image or self.addon.image}:{version!s}", - "dockerfile": str(self.dockerfile), + "dockerfile": str(self.get_dockerfile()), "pull": True, "forcerm": not self.sys_dev, "squash": self.squash, diff --git a/supervisor/addons/utils.py b/supervisor/addons/utils.py index a88f903ec..9cf151b30 100644 --- a/supervisor/addons/utils.py +++ b/supervisor/addons/utils.py @@ -2,9 +2,9 @@ from __future__ import annotations -import asyncio import logging from pathlib import Path +import subprocess from typing import TYPE_CHECKING from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE @@ -86,18 +86,20 @@ def rating_security(addon: AddonModel) -> int: return max(min(8, rating), 1) -async def remove_data(folder: Path) -> None: - """Remove folder and reset privileged.""" - try: - proc = await asyncio.create_subprocess_exec( - "rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL - ) +def remove_data(folder: Path) -> None: + """Remove folder and reset privileged. - _, error_msg = await proc.communicate() + Must be run in executor. + """ + try: + subprocess.run( + ["rm", "-rf", str(folder)], stdout=subprocess.DEVNULL, text=True, check=True + ) except OSError as err: error_msg = str(err) + except subprocess.CalledProcessError as procerr: + error_msg = procerr.stderr.strip() else: - if proc.returncode == 0: - return + return _LOGGER.error("Can't remove Add-on Data: %s", error_msg) diff --git a/supervisor/api/__init__.py b/supervisor/api/__init__.py index fe3f17ace..7c0c33d93 100644 --- a/supervisor/api/__init__.py +++ b/supervisor/api/__init__.py @@ -1,5 +1,6 @@ """Init file for Supervisor RESTful API.""" +from dataclasses import dataclass from functools import partial import logging from pathlib import Path @@ -47,6 +48,14 @@ MAX_CLIENT_SIZE: int = 1024**2 * 16 MAX_LINE_SIZE: int = 24570 +@dataclass(slots=True, frozen=True) +class StaticResourceConfig: + """Configuration for a static resource.""" + + prefix: str + path: Path + + class RestAPI(CoreSysAttributes): """Handle RESTful API for Supervisor.""" @@ -77,6 +86,8 @@ class RestAPI(CoreSysAttributes): async def load(self) -> None: """Register REST API Calls.""" + static_resource_configs: list[StaticResourceConfig] = [] + self._api_host = APIHost() self._api_host.coresys = self.coresys @@ -98,7 +109,7 @@ class RestAPI(CoreSysAttributes): self._register_network() self._register_observer() self._register_os() - self._register_panel() + static_resource_configs.extend(self._register_panel()) self._register_proxy() self._register_resolution() self._register_root() @@ -107,6 +118,17 @@ class RestAPI(CoreSysAttributes): self._register_store() self._register_supervisor() + if static_resource_configs: + + def process_configs() -> list[web.StaticResource]: + return [ + web.StaticResource(config.prefix, config.path) + for config in static_resource_configs + ] + + for resource in await self.sys_run_in_executor(process_configs): + self.webapp.router.register_resource(resource) + await self.start() def _register_advanced_logs(self, path: str, syslog_identifier: str): @@ -750,10 +772,9 @@ class RestAPI(CoreSysAttributes): ] ) - def _register_panel(self) -> None: + def _register_panel(self) -> list[StaticResourceConfig]: """Register panel for Home Assistant.""" - panel_dir = Path(__file__).parent.joinpath("panel") - self.webapp.add_routes([web.static("/app", panel_dir)]) + return [StaticResourceConfig("/app", Path(__file__).parent.joinpath("panel"))] def _register_docker(self) -> None: """Register docker configuration functions.""" diff --git a/supervisor/api/backups.py b/supervisor/api/backups.py index 382a3e12c..d7f067ba7 100644 --- a/supervisor/api/backups.py +++ b/supervisor/api/backups.py @@ -475,7 +475,7 @@ class APIBackups(CoreSysAttributes): _LOGGER.info("Downloading backup %s", backup.slug) filename = backup.all_locations[location][ATTR_PATH] # If the file is missing, return 404 and trigger reload of location - if not filename.is_file(): + if not await self.sys_run_in_executor(filename.is_file): self.sys_create_task(self.sys_backups.reload(location)) return web.Response(status=404) diff --git a/supervisor/api/supervisor.py b/supervisor/api/supervisor.py index 9b275a21f..8b012a0f2 100644 --- a/supervisor/api/supervisor.py +++ b/supervisor/api/supervisor.py @@ -60,7 +60,7 @@ SCHEMA_OPTIONS = vol.Schema( { vol.Optional(ATTR_CHANNEL): vol.Coerce(UpdateChannel), vol.Optional(ATTR_ADDONS_REPOSITORIES): repositories, - vol.Optional(ATTR_TIMEZONE): validate_timezone, + vol.Optional(ATTR_TIMEZONE): str, vol.Optional(ATTR_WAIT_BOOT): wait_boot, vol.Optional(ATTR_LOGGING): vol.Coerce(LogLevel), vol.Optional(ATTR_DEBUG): vol.Boolean(), @@ -127,12 +127,18 @@ class APISupervisor(CoreSysAttributes): """Set Supervisor options.""" body = await api_validate(SCHEMA_OPTIONS, request) + # Timezone must be first as validation is incomplete + # If a timezone is present we do that validation after in the executor + if ( + ATTR_TIMEZONE in body + and (timezone := body[ATTR_TIMEZONE]) != self.sys_config.timezone + ): + await self.sys_run_in_executor(validate_timezone, timezone) + await self.sys_config.set_timezone(timezone) + if ATTR_CHANNEL in body: self.sys_updater.channel = body[ATTR_CHANNEL] - if ATTR_TIMEZONE in body: - self.sys_config.timezone = body[ATTR_TIMEZONE] - if ATTR_DEBUG in body: self.sys_config.debug = body[ATTR_DEBUG] diff --git a/supervisor/api/utils.py b/supervisor/api/utils.py index 1dc4b2781..4213632ae 100644 --- a/supervisor/api/utils.py +++ b/supervisor/api/utils.py @@ -174,7 +174,9 @@ def api_return_ok(data: dict[str, Any] | None = None) -> web.Response: async def api_validate( - schema: vol.Schema, request: web.Request, origin: list[str] | None = None + schema: vol.Schema, + request: web.Request, + origin: list[str] | None = None, ) -> dict[str, Any]: """Validate request data with schema.""" data: dict[str, Any] = await request.json(loads=json_loads) diff --git a/supervisor/backups/backup.py b/supervisor/backups/backup.py index 78c73f72c..c5dd67054 100644 --- a/supervisor/backups/backup.py +++ b/supervisor/backups/backup.py @@ -542,7 +542,7 @@ class Backup(JobGroup): raise err finally: if self._tmp: - self._tmp.cleanup() + await self.sys_run_in_executor(self._tmp.cleanup) async def _create_cleanup(self, outer_tarfile: TarFile) -> None: """Cleanup after backup creation. @@ -846,7 +846,9 @@ class Backup(JobGroup): await self.sys_homeassistant.backup(homeassistant_file, exclude_database) # Store size - self.homeassistant[ATTR_SIZE] = homeassistant_file.size + self.homeassistant[ATTR_SIZE] = await self.sys_run_in_executor( + getattr, homeassistant_file, "size" + ) @Job(name="backup_restore_homeassistant", cleanup=False) async def restore_homeassistant(self) -> Awaitable[None]: diff --git a/supervisor/backups/manager.py b/supervisor/backups/manager.py index d283342b7..8a2d3d80c 100644 --- a/supervisor/backups/manager.py +++ b/supervisor/backups/manager.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import Awaitable, Iterable +from collections.abc import Awaitable import errno import logging from pathlib import Path @@ -179,12 +179,18 @@ class BackupManager(FileConfiguration, JobGroup): ) self.sys_jobs.current.stage = stage - def _list_backup_files(self, path: Path) -> Iterable[Path]: + async def _list_backup_files(self, path: Path) -> list[Path]: """Return iterable of backup files, suppress and log OSError for network mounts.""" - try: + + def find_backups() -> list[Path]: # is_dir does a stat syscall which raises if the mount is down + # Returning an iterator causes I/O while iterating, coerce into list here if path.is_dir(): - return path.glob("*.tar") + return list(path.glob("*.tar")) + return [] + + try: + return await self.sys_run_in_executor(find_backups) except OSError as err: if err.errno == errno.EBADMSG and path in { self.sys_config.path_backup, @@ -278,9 +284,7 @@ class BackupManager(FileConfiguration, JobGroup): tasks = [ self.sys_create_task(_load_backup(_location, tar_file)) for _location, path in locations.items() - for tar_file in await self.sys_run_in_executor( - self._list_backup_files, path - ) + for tar_file in await self._list_backup_files(path) ] _LOGGER.info("Found %d backup files", len(tasks)) diff --git a/supervisor/bootstrap.py b/supervisor/bootstrap.py index 991cdbdb2..b2f40a980 100644 --- a/supervisor/bootstrap.py +++ b/supervisor/bootstrap.py @@ -70,8 +70,8 @@ async def initialize_coresys() -> CoreSys: coresys.homeassistant = await HomeAssistant(coresys).load_config() coresys.addons = await AddonManager(coresys).load_config() coresys.backups = await BackupManager(coresys).load_config() - coresys.host = HostManager(coresys) - coresys.hardware = HardwareManager(coresys) + coresys.host = await HostManager(coresys).post_init() + coresys.hardware = await HardwareManager(coresys).post_init() coresys.ingress = await Ingress(coresys).load_config() coresys.tasks = Tasks(coresys) coresys.services = await ServiceManager(coresys).load_config() diff --git a/supervisor/config.py b/supervisor/config.py index 9585106a5..6d6d064cc 100644 --- a/supervisor/config.py +++ b/supervisor/config.py @@ -1,6 +1,7 @@ """Bootstrap Supervisor.""" -from datetime import UTC, datetime +import asyncio +from datetime import UTC, datetime, tzinfo import logging import os from pathlib import Path, PurePath @@ -24,7 +25,7 @@ from .const import ( LogLevel, ) from .utils.common import FileConfiguration -from .utils.dt import parse_datetime +from .utils.dt import get_time_zone, parse_datetime from .validate import SCHEMA_SUPERVISOR_CONFIG _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -66,6 +67,7 @@ class CoreConfig(FileConfiguration): def __init__(self): """Initialize config object.""" super().__init__(FILE_HASSIO_CONFIG, SCHEMA_SUPERVISOR_CONFIG) + self._timezone_tzinfo: tzinfo | None = None @property def timezone(self) -> str | None: @@ -76,12 +78,19 @@ class CoreConfig(FileConfiguration): self._data.pop(ATTR_TIMEZONE, None) return None - @timezone.setter - def timezone(self, value: str) -> None: + @property + def timezone_tzinfo(self) -> tzinfo | None: + """Return system timezone as tzinfo object.""" + return self._timezone_tzinfo + + async def set_timezone(self, value: str) -> None: """Set system timezone.""" if value == _UTC: return self._data[ATTR_TIMEZONE] = value + self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor( + None, get_time_zone, value + ) @property def version(self) -> AwesomeVersion: @@ -390,3 +399,15 @@ class CoreConfig(FileConfiguration): def extern_to_local_path(self, path: PurePath) -> Path: """Translate a path relative to extern supervisor data to its path in the container.""" return self.path_supervisor / path.relative_to(self.path_extern_supervisor) + + async def read_data(self) -> None: + """Read configuration file.""" + timezone = self.timezone + await super().read_data() + + if not self.timezone: + self._timezone_tzinfo = None + elif timezone != self.timezone: + self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor( + None, get_time_zone, self.timezone + ) diff --git a/supervisor/core.py b/supervisor/core.py index 5a61b36ce..19795365b 100644 --- a/supervisor/core.py +++ b/supervisor/core.py @@ -399,7 +399,7 @@ class Core(CoreSysAttributes): _LOGGER.warning("Can't adjust Time/Date settings: %s", err) return - self.sys_config.timezone = self.sys_config.timezone or data.timezone + await self.sys_config.set_timezone(self.sys_config.timezone or data.timezone) # Calculate if system time is out of sync delta = data.dt_utc - utcnow() diff --git a/supervisor/coresys.py b/supervisor/coresys.py index e525fc4b1..a7d6f0000 100644 --- a/supervisor/coresys.py +++ b/supervisor/coresys.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Coroutine from contextvars import Context, copy_context -from datetime import datetime +from datetime import UTC, datetime, tzinfo from functools import partial import logging import os @@ -22,7 +22,6 @@ from .const import ( MACHINE_ID, SERVER_SOFTWARE, ) -from .utils.dt import UTC, get_time_zone if TYPE_CHECKING: from .addons.manager import AddonManager @@ -143,13 +142,19 @@ class CoreSys: """Return system timezone.""" if self.config.timezone: return self.config.timezone - # pylint bug with python 3.12.4 (https://github.com/pylint-dev/pylint/issues/9811) - # pylint: disable=no-member if self.host.info.timezone: return self.host.info.timezone - # pylint: enable=no-member return "UTC" + @property + def timezone_tzinfo(self) -> tzinfo: + """Return system timezone as tzinfo object.""" + if self.config.timezone_tzinfo: + return self.config.timezone_tzinfo + if self.host.info.timezone_tzinfo: + return self.host.info.timezone_tzinfo + return UTC + @property def loop(self) -> asyncio.BaseEventLoop: """Return loop object.""" @@ -555,7 +560,7 @@ class CoreSys: def now(self) -> datetime: """Return now in local timezone.""" - return datetime.now(get_time_zone(self.timezone) or UTC) + return datetime.now(self.timezone_tzinfo) def add_set_task_context_callback( self, callback: Callable[[Context], Context] @@ -642,6 +647,11 @@ class CoreSysAttributes: """Return running machine type of the Supervisor system.""" return self.coresys.machine + @property + def sys_machine_id(self) -> str | None: + """Return machine id.""" + return self.coresys.machine_id + @property def sys_dev(self) -> bool: """Return True if we run dev mode.""" diff --git a/supervisor/dbus/timedate.py b/supervisor/dbus/timedate.py index 44c662453..0ebe5a5d5 100644 --- a/supervisor/dbus/timedate.py +++ b/supervisor/dbus/timedate.py @@ -1,12 +1,14 @@ """Interface to systemd-timedate over D-Bus.""" -from datetime import datetime +import asyncio +from datetime import datetime, tzinfo import logging +from typing import Any from dbus_fast.aio.message_bus import MessageBus from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError -from ..utils.dt import utc_from_timestamp +from ..utils.dt import get_time_zone, utc_from_timestamp from .const import ( DBUS_ATTR_NTP, DBUS_ATTR_NTPSYNCHRONIZED, @@ -33,6 +35,11 @@ class TimeDate(DBusInterfaceProxy): object_path: str = DBUS_OBJECT_TIMEDATE properties_interface: str = DBUS_IFACE_TIMEDATE + def __init__(self): + """Initialize object.""" + super().__init__() + self._timezone_tzinfo: tzinfo | None = None + @property @dbus_property def timezone(self) -> str: @@ -57,6 +64,11 @@ class TimeDate(DBusInterfaceProxy): """Return the system UTC time.""" return utc_from_timestamp(self.properties[DBUS_ATTR_TIMEUSEC] / 1000000) + @property + def timezone_tzinfo(self) -> tzinfo | None: + """Return timezone as tzinfo object.""" + return self._timezone_tzinfo + async def connect(self, bus: MessageBus): """Connect to D-Bus.""" _LOGGER.info("Load dbus interface %s", self.name) @@ -69,6 +81,19 @@ class TimeDate(DBusInterfaceProxy): "No timedate support on the host. Time/Date functions have been disabled." ) + @dbus_connected + async def update(self, changed: dict[str, Any] | None = None) -> None: + """Update properties via D-Bus.""" + timezone = self.timezone + await super().update(changed) + + if not self.timezone: + self._timezone_tzinfo = None + elif timezone != self.timezone: + self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor( + None, get_time_zone, self.timezone + ) + @dbus_connected async def set_time(self, utc: datetime) -> None: """Set time & date on host as UTC.""" diff --git a/supervisor/docker/addon.py b/supervisor/docker/addon.py index 6f02e3130..900805ff7 100644 --- a/supervisor/docker/addon.py +++ b/supervisor/docker/addon.py @@ -665,18 +665,20 @@ class DockerAddon(DockerInterface): async def _build(self, version: AwesomeVersion, image: str | None = None) -> None: """Build a Docker container.""" build_env = await AddonBuild(self.coresys, self.addon).load_config() - if not build_env.is_valid: + if not await build_env.is_valid(): _LOGGER.error("Invalid build environment, can't build this add-on!") raise DockerError() _LOGGER.info("Starting build for %s:%s", self.image, version) - try: - image, log = await self.sys_run_in_executor( - self.sys_docker.images.build, - use_config_proxy=False, - **build_env.get_docker_args(version, image), + + def build_image(): + return self.sys_docker.images.build( + use_config_proxy=False, **build_env.get_docker_args(version, image) ) + try: + image, log = await self.sys_run_in_executor(build_image) + _LOGGER.debug("Build %s:%s done: %s", self.image, version, log) # Update meta data diff --git a/supervisor/docker/audio.py b/supervisor/docker/audio.py index 20d1cd65a..360621579 100644 --- a/supervisor/docker/audio.py +++ b/supervisor/docker/audio.py @@ -5,7 +5,7 @@ import logging import docker from docker.types import Mount -from ..const import DOCKER_CPU_RUNTIME_ALLOCATION, MACHINE_ID +from ..const import DOCKER_CPU_RUNTIME_ALLOCATION from ..coresys import CoreSysAttributes from ..exceptions import DockerJobError from ..hardware.const import PolicyGroup @@ -57,7 +57,7 @@ class DockerAudio(DockerInterface, CoreSysAttributes): ] # Machine ID - if MACHINE_ID.exists(): + if self.sys_machine_id: mounts.append(MOUNT_MACHINE_ID) return mounts diff --git a/supervisor/docker/homeassistant.py b/supervisor/docker/homeassistant.py index a33579268..3f1e4b99a 100644 --- a/supervisor/docker/homeassistant.py +++ b/supervisor/docker/homeassistant.py @@ -8,7 +8,7 @@ import re from awesomeversion import AwesomeVersion, AwesomeVersionCompareException from docker.types import Mount -from ..const import LABEL_MACHINE, MACHINE_ID +from ..const import LABEL_MACHINE from ..exceptions import DockerJobError from ..hardware.const import PolicyGroup from ..homeassistant.const import LANDINGPAGE @@ -154,7 +154,7 @@ class DockerHomeAssistant(DockerInterface): ) # Machine ID - if MACHINE_ID.exists(): + if self.sys_machine_id: mounts.append(MOUNT_MACHINE_ID) return mounts diff --git a/supervisor/hardware/manager.py b/supervisor/hardware/manager.py index 998344b89..eb20b7acd 100644 --- a/supervisor/hardware/manager.py +++ b/supervisor/hardware/manager.py @@ -2,6 +2,7 @@ import logging from pathlib import Path +from typing import Self import pyudev @@ -51,17 +52,25 @@ class HardwareManager(CoreSysAttributes): """Initialize Hardware Monitor object.""" self.coresys: CoreSys = coresys self._devices: dict[str, Device] = {} - self._udev = pyudev.Context() + self._udev: pyudev.Context | None = None - self._montior: HwMonitor = HwMonitor(coresys) + self._monitor: HwMonitor | None = None self._helper: HwHelper = HwHelper(coresys) self._policy: HwPolicy = HwPolicy(coresys) self._disk: HwDisk = HwDisk(coresys) + async def post_init(self) -> Self: + """Complete initialization of obect within event loop.""" + self._udev = await self.sys_run_in_executor(pyudev.Context) + self._monitor: HwMonitor = HwMonitor(self.coresys, self._udev) + return self + @property def monitor(self) -> HwMonitor: """Return Hardware Monitor instance.""" - return self._montior + if not self._monitor: + raise RuntimeError("Hardware monitor not initialized!") + return self._monitor @property def helper(self) -> HwHelper: diff --git a/supervisor/hardware/monitor.py b/supervisor/hardware/monitor.py index 0aa6fdcd6..4d96e0891 100644 --- a/supervisor/hardware/monitor.py +++ b/supervisor/hardware/monitor.py @@ -20,10 +20,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) class HwMonitor(CoreSysAttributes): """Hardware monitor for supervisor.""" - def __init__(self, coresys: CoreSys): + def __init__(self, coresys: CoreSys, context: pyudev.Context): """Initialize Hardware Monitor object.""" self.coresys: CoreSys = coresys - self.context = pyudev.Context() + self.context = context self.monitor: pyudev.Monitor | None = None self.observer: pyudev.MonitorObserver | None = None diff --git a/supervisor/homeassistant/secrets.py b/supervisor/homeassistant/secrets.py index eda6cb7c7..ab02400a8 100644 --- a/supervisor/homeassistant/secrets.py +++ b/supervisor/homeassistant/secrets.py @@ -49,18 +49,21 @@ class HomeAssistantSecrets(CoreSysAttributes): ) async def _read_secrets(self): """Read secrets.yaml into memory.""" - if not self.path_secrets.exists(): - _LOGGER.debug("Home Assistant secrets.yaml does not exist") - return - # Read secrets - try: - secrets = await self.sys_run_in_executor(read_yaml_file, self.path_secrets) - except YamlFileError as err: - _LOGGER.warning("Can't read Home Assistant secrets: %s", err) - return + def read_secrets_yaml() -> dict | None: + if not self.path_secrets.exists(): + _LOGGER.debug("Home Assistant secrets.yaml does not exist") + return None - if not isinstance(secrets, dict): + # Read secrets + try: + return read_yaml_file(self.path_secrets) + except YamlFileError as err: + _LOGGER.warning("Can't read Home Assistant secrets: %s", err) + return None + + secrets = await self.sys_run_in_executor(read_secrets_yaml) + if secrets is None or not isinstance(secrets, dict): return # Process secrets diff --git a/supervisor/host/info.py b/supervisor/host/info.py index 58d6f0656..c2484ba37 100644 --- a/supervisor/host/info.py +++ b/supervisor/host/info.py @@ -1,7 +1,7 @@ """Info control for host.""" import asyncio -from datetime import datetime +from datetime import datetime, tzinfo import logging from ..coresys import CoreSysAttributes @@ -72,6 +72,11 @@ class InfoCenter(CoreSysAttributes): """Return host timezone.""" return self.sys_dbus.timedate.timezone + @property + def timezone_tzinfo(self) -> tzinfo | None: + """Return host timezone as tzinfo object.""" + return self.sys_dbus.timedate.timezone_tzinfo + @property def dt_utc(self) -> datetime | None: """Return host UTC time.""" diff --git a/supervisor/host/logs.py b/supervisor/host/logs.py index e55785f45..978e99fde 100644 --- a/supervisor/host/logs.py +++ b/supervisor/host/logs.py @@ -8,6 +8,7 @@ import json import logging import os from pathlib import Path +from typing import Self from aiohttp import ClientError, ClientSession, ClientTimeout from aiohttp.client_exceptions import UnixClientConnectorError @@ -51,13 +52,19 @@ class LogsControl(CoreSysAttributes): self._profiles: set[str] = set() self._boot_ids: list[str] = [] self._default_identifiers: list[str] = [] + self._available: bool = False + + async def post_init(self) -> Self: + """Post init actions that must occur in event loop.""" + self._available = bool( + os.environ.get("SUPERVISOR_SYSTEMD_JOURNAL_GATEWAYD_URL") + ) or await self.sys_run_in_executor(SYSTEMD_JOURNAL_GATEWAYD_SOCKET.is_socket) + return self @property def available(self) -> bool: """Check if systemd-journal-gatwayd is available.""" - if os.environ.get("SUPERVISOR_SYSTEMD_JOURNAL_GATEWAYD_URL"): - return True - return SYSTEMD_JOURNAL_GATEWAYD_SOCKET.is_socket() + return self._available @property def boot_ids(self) -> list[str]: diff --git a/supervisor/host/manager.py b/supervisor/host/manager.py index e926c5ad3..a8a94f5b9 100644 --- a/supervisor/host/manager.py +++ b/supervisor/host/manager.py @@ -3,6 +3,7 @@ from contextlib import suppress from functools import lru_cache import logging +from typing import Self from awesomeversion import AwesomeVersion @@ -38,6 +39,11 @@ class HostManager(CoreSysAttributes): self._sound: SoundControl = SoundControl(coresys) self._logs: LogsControl = LogsControl(coresys) + async def post_init(self) -> Self: + """Post init actions that must occur in event loop.""" + await self._logs.post_init() + return self + @property def apparmor(self) -> AppArmorControl: """Return host AppArmor handler.""" diff --git a/supervisor/store/__init__.py b/supervisor/store/__init__.py index 0fc2ef52b..09aadc737 100644 --- a/supervisor/store/__init__.py +++ b/supervisor/store/__init__.py @@ -183,7 +183,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration): raise err else: - if not repository.validate(): + if not await self.sys_run_in_executor(repository.validate): if add_with_errors: _LOGGER.error("%s is not a valid add-on repository", url) self.sys_resolution.create_issue( diff --git a/supervisor/store/git.py b/supervisor/store/git.py index 6841e6823..835c67c7f 100644 --- a/supervisor/store/git.py +++ b/supervisor/store/git.py @@ -49,7 +49,7 @@ class GitRepo(CoreSysAttributes): async def load(self) -> None: """Init Git add-on repository.""" - if not (self.path / ".git").is_dir(): + if not await self.sys_run_in_executor((self.path / ".git").is_dir): await self.clone() return diff --git a/supervisor/utils/dt.py b/supervisor/utils/dt.py index 01895acc0..fe20866be 100644 --- a/supervisor/utils/dt.py +++ b/supervisor/utils/dt.py @@ -69,7 +69,10 @@ def utc_from_timestamp(timestamp: float) -> datetime: def get_time_zone(time_zone_str: str) -> tzinfo | None: - """Get time zone from string. Return None if unable to determine.""" + """Get time zone from string. Return None if unable to determine. + + Must be run in executor. + """ try: return zoneinfo.ZoneInfo(time_zone_str) except zoneinfo.ZoneInfoNotFoundError: diff --git a/supervisor/utils/validate.py b/supervisor/utils/validate.py index 60146bc27..146900c65 100644 --- a/supervisor/utils/validate.py +++ b/supervisor/utils/validate.py @@ -18,7 +18,10 @@ def schema_or(schema): def validate_timezone(timezone): - """Validate voluptuous timezone.""" + """Validate voluptuous timezone. + + Must be run in executor. + """ if get_time_zone(timezone) is not None: return timezone raise vol.Invalid( diff --git a/tests/addons/test_build.py b/tests/addons/test_build.py index fb9cb566b..800f4e21e 100644 --- a/tests/addons/test_build.py +++ b/tests/addons/test_build.py @@ -20,7 +20,9 @@ async def test_platform_set(coresys: CoreSys, install_addon_ssh: Addon): type(coresys.arch), "default", new=PropertyMock(return_value="amd64") ), ): - args = build.get_docker_args(AwesomeVersion("latest")) + args = await coresys.run_in_executor( + build.get_docker_args, AwesomeVersion("latest") + ) assert args["platform"] == "linux/amd64" @@ -36,10 +38,14 @@ async def test_dockerfile_evaluation(coresys: CoreSys, install_addon_ssh: Addon) type(coresys.arch), "default", new=PropertyMock(return_value="amd64") ), ): - args = build.get_docker_args(AwesomeVersion("latest")) + args = await coresys.run_in_executor( + build.get_docker_args, AwesomeVersion("latest") + ) assert args["dockerfile"].endswith("fixtures/addons/local/ssh/Dockerfile") - assert str(build.dockerfile).endswith("fixtures/addons/local/ssh/Dockerfile") + assert str(await coresys.run_in_executor(build.get_dockerfile)).endswith( + "fixtures/addons/local/ssh/Dockerfile" + ) assert build.arch == "amd64" @@ -54,10 +60,12 @@ async def test_dockerfile_evaluation_arch(coresys: CoreSys, install_addon_ssh: A type(coresys.arch), "default", new=PropertyMock(return_value="aarch64") ), ): - args = build.get_docker_args(AwesomeVersion("latest")) + args = await coresys.run_in_executor( + build.get_docker_args, AwesomeVersion("latest") + ) assert args["dockerfile"].endswith("fixtures/addons/local/ssh/Dockerfile.aarch64") - assert str(build.dockerfile).endswith( + assert str(await coresys.run_in_executor(build.get_dockerfile)).endswith( "fixtures/addons/local/ssh/Dockerfile.aarch64" ) assert build.arch == "aarch64" @@ -74,7 +82,7 @@ async def test_build_valid(coresys: CoreSys, install_addon_ssh: Addon): type(coresys.arch), "default", new=PropertyMock(return_value="aarch64") ), ): - assert build.is_valid + assert await build.is_valid() async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon): @@ -88,4 +96,4 @@ async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon): type(coresys.arch), "default", new=PropertyMock(return_value="amd64") ), ): - assert not build.is_valid + assert not await build.is_valid() diff --git a/tests/addons/test_manager.py b/tests/addons/test_manager.py index 6dfe350a6..f622aa4ab 100644 --- a/tests/addons/test_manager.py +++ b/tests/addons/test_manager.py @@ -409,7 +409,7 @@ async def test_repository_file_error( in caplog.text ) - write_json_file(repo_file, {"invalid": "bad"}) + await coresys.run_in_executor(write_json_file, repo_file, {"invalid": "bad"}) await coresys.store.data.update() assert f"Repository parse error {repo_dir.as_posix()}" in caplog.text diff --git a/tests/api/test_addons.py b/tests/api/test_addons.py index b27e40dca..c04b0129b 100644 --- a/tests/api/test_addons.py +++ b/tests/api/test_addons.py @@ -234,7 +234,7 @@ async def test_api_addon_rebuild_healthcheck( _container_events_task = asyncio.create_task(container_events()) with ( - patch.object(AddonBuild, "is_valid", new=PropertyMock(return_value=True)), + patch.object(AddonBuild, "is_valid", return_value=True), patch.object(DockerAddon, "is_running", return_value=False), patch.object(Addon, "need_build", new=PropertyMock(return_value=True)), patch.object(CpuArch, "supported", new=PropertyMock(return_value=["amd64"])), diff --git a/tests/api/test_host.py b/tests/api/test_host.py index c9a798544..fee3517af 100644 --- a/tests/api/test_host.py +++ b/tests/api/test_host.py @@ -327,9 +327,9 @@ async def test_advanced_logs_boot_id_offset( async def test_advanced_logs_formatters( + journald_gateway: MagicMock, api_client: TestClient, coresys: CoreSys, - journald_gateway: MagicMock, journal_logs_reader: MagicMock, ): """Test advanced logs formatters varying on Accept header.""" diff --git a/tests/api/test_panel.py b/tests/api/test_panel.py new file mode 100644 index 000000000..e3b8c5ffe --- /dev/null +++ b/tests/api/test_panel.py @@ -0,0 +1,23 @@ +"""Test panel API.""" + +from pathlib import Path + +from aiohttp.test_utils import TestClient +import pytest + +from supervisor.coresys import CoreSys + +PANEL_PATH = Path(__file__).parent.parent.parent.joinpath("supervisor/api/panel") + + +@pytest.mark.parametrize( + "filename", ["entrypoint.js", "entrypoint.js.br", "entrypoint.js.gz"] +) +async def test_frontend_files(api_client: TestClient, coresys: CoreSys, filename: str): + """Test frontend files served up correctly.""" + resp = await api_client.get(f"/app/{filename}") + assert resp.status == 200 + + body = await resp.read() + file_bytes = await coresys.run_in_executor(PANEL_PATH.joinpath(filename).read_bytes) + assert body == file_bytes diff --git a/tests/api/test_supervisor.py b/tests/api/test_supervisor.py index 330818aac..e6ebe908a 100644 --- a/tests/api/test_supervisor.py +++ b/tests/api/test_supervisor.py @@ -233,3 +233,17 @@ async def test_api_supervisor_reload(api_client: TestClient): """Test supervisor reload.""" resp = await api_client.post("/supervisor/reload") assert resp.status == 200 + + +async def test_api_supervisor_options_timezone( + api_client: TestClient, coresys: CoreSys +): + """Test setting supervisor timezone via API.""" + assert coresys.timezone == "Etc/UTC" + + resp = await api_client.post( + "/supervisor/options", json={"timezone": "Europe/Zurich"} + ) + assert resp.status == 200 + + assert coresys.timezone == "Europe/Zurich" diff --git a/tests/common.py b/tests/common.py index d7b507e97..79feb4645 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,6 +1,8 @@ """Common test functions.""" +import asyncio from datetime import datetime +from functools import partial from importlib import import_module from inspect import getclosurevars import json @@ -68,7 +70,9 @@ async def mock_dbus_services( services: dict[str, list[DBusServiceMock] | DBusServiceMock] = {} requested_names: set[str] = set() - for module in get_valid_modules("dbus_service_mocks", base=__file__): + for module in await asyncio.get_running_loop().run_in_executor( + None, partial(get_valid_modules, base=__file__), "dbus_service_mocks" + ): if module in to_mock: service_module = import_module(f"{__package__}.dbus_service_mocks.{module}") diff --git a/tests/conftest.py b/tests/conftest.py index 95d324b08..8eca71c06 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,6 +12,7 @@ from uuid import uuid4 from aiohttp import web from aiohttp.test_utils import TestClient from awesomeversion import AwesomeVersion +from blockbuster import BlockBuster, blockbuster_ctx from dbus_fast import BusType from dbus_fast.aio.message_bus import MessageBus import pytest @@ -63,6 +64,24 @@ from .dbus_service_mocks.network_manager import NetworkManager as NetworkManager # pylint: disable=redefined-outer-name, protected-access +# This commented out code is left in intentionally +# Intent is to enable this for all tests at all times as an autouse fixture +# Findings from PR were growing too big so disabling temporarily to create a checkpoint +# @pytest.fixture(autouse=True) +def blockbuster(request: pytest.FixtureRequest) -> BlockBuster: + """Raise for blocking I/O in event loop.""" + # Excluded modules doesn't seem to stop test code from raising for blocking I/O + # Defaulting to only scanning supervisor core code seems like the best we can do easily + # Added a parameter so we could potentially go module by module in test and eliminate blocking I/O + # Then we could tell it to scan everything by default. That will require more follow-up work + + # pylint: disable-next=contextmanager-generator-missing-cleanup + with blockbuster_ctx( + scanned_modules=getattr(request, "param", ["supervisor"]) + ) as bb: + yield bb + + @pytest.fixture async def path_extern() -> None: """Set external path env for tests.""" diff --git a/tests/host/test_logs.py b/tests/host/test_logs.py index 14e788e81..38c8b739d 100644 --- a/tests/host/test_logs.py +++ b/tests/host/test_logs.py @@ -33,7 +33,7 @@ async def test_load(coresys: CoreSys): assert identifier in coresys.host.logs.default_identifiers -async def test_logs(coresys: CoreSys, journald_gateway: MagicMock): +async def test_logs(journald_gateway: MagicMock, coresys: CoreSys): """Test getting logs and errors.""" assert coresys.host.logs.available is True @@ -63,7 +63,7 @@ async def test_logs(coresys: CoreSys, journald_gateway: MagicMock): pass -async def test_logs_coloured(coresys: CoreSys, journald_gateway: MagicMock): +async def test_logs_coloured(journald_gateway: MagicMock, coresys: CoreSys): """Test ANSI control sequences being preserved in binary messages.""" journald_gateway.content.feed_data( load_fixture("logs_export_supervisor.txt").encode("utf-8") @@ -82,7 +82,7 @@ async def test_logs_coloured(coresys: CoreSys, journald_gateway: MagicMock): ) -async def test_boot_ids(coresys: CoreSys, journald_gateway: MagicMock): +async def test_boot_ids(journald_gateway: MagicMock, coresys: CoreSys): """Test getting boot ids.""" journald_gateway.content.feed_data( load_fixture("logs_boot_ids.txt").encode("utf-8") @@ -109,7 +109,7 @@ async def test_boot_ids(coresys: CoreSys, journald_gateway: MagicMock): await coresys.host.logs.get_boot_id(3) -async def test_boot_ids_fallback(coresys: CoreSys, journald_gateway: MagicMock): +async def test_boot_ids_fallback(journald_gateway: MagicMock, coresys: CoreSys): """Test getting boot ids using fallback.""" # Initial response has no log lines journald_gateway.content.feed_data(b"") @@ -134,7 +134,7 @@ async def test_boot_ids_fallback(coresys: CoreSys, journald_gateway: MagicMock): ] -async def test_identifiers(coresys: CoreSys, journald_gateway: MagicMock): +async def test_identifiers(journald_gateway: MagicMock, coresys: CoreSys): """Test getting identifiers.""" journald_gateway.content.feed_data( load_fixture("logs_identifiers.txt").encode("utf-8") @@ -156,7 +156,7 @@ async def test_identifiers(coresys: CoreSys, journald_gateway: MagicMock): async def test_connection_refused_handled( - coresys: CoreSys, journald_gateway: MagicMock + journald_gateway: MagicMock, coresys: CoreSys ): """Test connection refused is handled with HostServiceError.""" with patch("supervisor.host.logs.ClientSession.get") as get: diff --git a/tests/resolution/check/test_check.py b/tests/resolution/check/test_check.py index 358578fde..47fb9253d 100644 --- a/tests/resolution/check/test_check.py +++ b/tests/resolution/check/test_check.py @@ -114,5 +114,5 @@ async def test_get_checks(coresys: CoreSys): async def test_dynamic_check_loader(coresys: CoreSys): """Test dynamic check loader, this ensures that all checks have defined a setup function.""" coresys.resolution.check.load_modules() - for check in get_valid_modules("checks"): + for check in await coresys.run_in_executor(get_valid_modules, "checks"): assert check in coresys.resolution.check._checks diff --git a/tests/resolution/evaluation/test_evaluate_operating_system.py b/tests/resolution/evaluation/test_evaluate_operating_system.py index dbbc87c6a..e4e699dcb 100644 --- a/tests/resolution/evaluation/test_evaluate_operating_system.py +++ b/tests/resolution/evaluation/test_evaluate_operating_system.py @@ -18,7 +18,9 @@ async def test_evaluation(coresys: CoreSys): assert operating_system.reason not in coresys.resolution.unsupported - coresys.host._info = MagicMock(operating_system="unsupported", timezone=None) + coresys.host._info = MagicMock( + operating_system="unsupported", timezone=None, timezone_tzinfo=None + ) await operating_system() assert operating_system.reason in coresys.resolution.unsupported @@ -27,7 +29,9 @@ async def test_evaluation(coresys: CoreSys): assert operating_system.reason not in coresys.resolution.unsupported coresys.os._available = False - coresys.host._info = MagicMock(operating_system=SUPPORTED_OS[0], timezone=None) + coresys.host._info = MagicMock( + operating_system=SUPPORTED_OS[0], timezone=None, timezone_tzinfo=None + ) await operating_system() assert operating_system.reason not in coresys.resolution.unsupported diff --git a/tests/resolution/evaluation/test_evaluate_os_agent.py b/tests/resolution/evaluation/test_evaluate_os_agent.py index c1b830134..d6365140d 100644 --- a/tests/resolution/evaluation/test_evaluate_os_agent.py +++ b/tests/resolution/evaluation/test_evaluate_os_agent.py @@ -16,7 +16,7 @@ async def test_evaluation(coresys: CoreSys): assert agent.reason not in coresys.resolution.unsupported - coresys._host = MagicMock(info=MagicMock(timezone=None)) + coresys._host = MagicMock(info=MagicMock(timezone=None, timezone_tzinfo=None)) coresys.host.features = [HostFeature.HOSTNAME] await agent() diff --git a/tests/resolution/evaluation/test_evaluate_systemd.py b/tests/resolution/evaluation/test_evaluate_systemd.py index d2c4cbdf3..e068ba253 100644 --- a/tests/resolution/evaluation/test_evaluate_systemd.py +++ b/tests/resolution/evaluation/test_evaluate_systemd.py @@ -16,7 +16,7 @@ async def test_evaluation(coresys: CoreSys): assert systemd.reason not in coresys.resolution.unsupported - coresys._host = MagicMock(info=MagicMock(timezone=None)) + coresys._host = MagicMock(info=MagicMock(timezone=None, timezone_tzinfo=None)) coresys.host.features = [HostFeature.HOSTNAME] await systemd() diff --git a/tests/resolution/evaluation/test_evaluate_systemd_journal.py b/tests/resolution/evaluation/test_evaluate_systemd_journal.py index faec87960..ddf4a7d90 100644 --- a/tests/resolution/evaluation/test_evaluate_systemd_journal.py +++ b/tests/resolution/evaluation/test_evaluate_systemd_journal.py @@ -8,23 +8,24 @@ from supervisor.coresys import CoreSys from supervisor.resolution.evaluations.systemd_journal import EvaluateSystemdJournal -async def test_evaluation(coresys: CoreSys, journald_gateway: MagicMock): - """Test evaluation.""" +async def test_evaluation_supported(journald_gateway: MagicMock, coresys: CoreSys): + """Test evaluation for supported system.""" systemd_journal = EvaluateSystemdJournal(coresys) await coresys.core.set_state(CoreState.SETUP) - assert systemd_journal.reason not in coresys.resolution.unsupported - - with patch("supervisor.host.logs.Path.is_socket", return_value=False): - await systemd_journal() - assert systemd_journal.reason in coresys.resolution.unsupported - - coresys.host.supported_features.cache_clear() # pylint: disable=no-member - await systemd_journal() assert systemd_journal.reason not in coresys.resolution.unsupported +async def test_evaluation_unsupported(coresys: CoreSys): + """Test evaluation for unsupported system.""" + systemd_journal = EvaluateSystemdJournal(coresys) + await coresys.core.set_state(CoreState.SETUP) + + await systemd_journal() + assert systemd_journal.reason in coresys.resolution.unsupported + + async def test_did_run(coresys: CoreSys): """Test that the evaluation ran as expected.""" systemd_journal = EvaluateSystemdJournal(coresys) diff --git a/tests/resolution/fixup/test_fixup.py b/tests/resolution/fixup/test_fixup.py index c76b89811..5e0e2285e 100644 --- a/tests/resolution/fixup/test_fixup.py +++ b/tests/resolution/fixup/test_fixup.py @@ -43,7 +43,7 @@ async def test_check_autofix(coresys: CoreSys): assert len(coresys.resolution.suggestions) == 0 -def test_dynamic_fixup_loader(coresys: CoreSys): +async def test_dynamic_fixup_loader(coresys: CoreSys): """Test dynamic fixup loader, this ensures that all fixups have defined a setup function.""" - for fixup in get_valid_modules("fixups"): + for fixup in await coresys.run_in_executor(get_valid_modules, "fixups"): assert fixup in coresys.resolution.fixup._fixups diff --git a/tests/store/test_store_manager.py b/tests/store/test_store_manager.py index 7c2edddab..b479b3172 100644 --- a/tests/store/test_store_manager.py +++ b/tests/store/test_store_manager.py @@ -223,6 +223,7 @@ async def test_install_unavailable_addon( assert log in caplog.text +@pytest.mark.usefixtures("tmp_supervisor_data") async def test_reload(coresys: CoreSys): """Test store reload.""" await coresys.store.load() diff --git a/tests/test_coresys.py b/tests/test_coresys.py index cf3d1d5c7..a5f34e2e0 100644 --- a/tests/test_coresys.py +++ b/tests/test_coresys.py @@ -21,13 +21,13 @@ async def test_timezone(coresys: CoreSys): await coresys.dbus.timedate.connect(coresys.dbus.bus) assert coresys.timezone == "Etc/UTC" - coresys.config.timezone = "Europe/Zurich" + await coresys.config.set_timezone("Europe/Zurich") assert coresys.timezone == "Europe/Zurich" -def test_now(coresys: CoreSys): +async def test_now(coresys: CoreSys): """Test datetime now with local time.""" - coresys.config.timezone = "Europe/Zurich" + await coresys.config.set_timezone("Europe/Zurich") zurich = coresys.now() utc = utcnow()