mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-14 12:46:32 +00:00
Add blockbuster library and find I/O from unit tests (#5731)
* Add blockbuster library and find I/O from unit tests * Fix lint and test issue * Fixes from feedback * Avoid modifying webapp object in executor * Split su options validation and only validate timezone on change
This commit is contained in:
parent
1fb4d1cc11
commit
6ef4f3cc67
@ -3,6 +3,7 @@ aiohttp==3.11.13
|
|||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==25.1.0
|
attrs==25.1.0
|
||||||
awesomeversion==24.6.0
|
awesomeversion==24.6.0
|
||||||
|
blockbuster==1.5.23
|
||||||
brotli==1.1.0
|
brotli==1.1.0
|
||||||
ciso8601==2.3.2
|
ciso8601==2.3.2
|
||||||
colorlog==6.9.0
|
colorlog==6.9.0
|
||||||
|
@ -753,9 +753,12 @@ class Addon(AddonModel):
|
|||||||
for listener in self._listeners:
|
for listener in self._listeners:
|
||||||
self.sys_bus.remove_listener(listener)
|
self.sys_bus.remove_listener(listener)
|
||||||
|
|
||||||
|
def remove_data_dir():
|
||||||
if self.path_data.is_dir():
|
if self.path_data.is_dir():
|
||||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||||
await remove_data(self.path_data)
|
remove_data(self.path_data)
|
||||||
|
|
||||||
|
await self.sys_run_in_executor(remove_data_dir)
|
||||||
|
|
||||||
async def _check_ingress_port(self):
|
async def _check_ingress_port(self):
|
||||||
"""Assign a ingress port if dynamic port selection is used."""
|
"""Assign a ingress port if dynamic port selection is used."""
|
||||||
@ -777,12 +780,15 @@ class Addon(AddonModel):
|
|||||||
await self.sys_addons.data.install(self.addon_store)
|
await self.sys_addons.data.install(self.addon_store)
|
||||||
await self.load()
|
await self.load()
|
||||||
|
|
||||||
|
def setup_data():
|
||||||
if not self.path_data.is_dir():
|
if not self.path_data.is_dir():
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Creating Home Assistant add-on data folder %s", self.path_data
|
"Creating Home Assistant add-on data folder %s", self.path_data
|
||||||
)
|
)
|
||||||
self.path_data.mkdir()
|
self.path_data.mkdir()
|
||||||
|
|
||||||
|
await self.sys_run_in_executor(setup_data)
|
||||||
|
|
||||||
# Setup/Fix AppArmor profile
|
# Setup/Fix AppArmor profile
|
||||||
await self.install_apparmor()
|
await self.install_apparmor()
|
||||||
|
|
||||||
@ -820,15 +826,18 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
await self.unload()
|
await self.unload()
|
||||||
|
|
||||||
|
def cleanup_config_and_audio():
|
||||||
# Remove config if present and requested
|
# Remove config if present and requested
|
||||||
if self.addon_config_used and remove_config:
|
if self.addon_config_used and remove_config:
|
||||||
await remove_data(self.path_config)
|
remove_data(self.path_config)
|
||||||
|
|
||||||
# Cleanup audio settings
|
# Cleanup audio settings
|
||||||
if self.path_pulse.exists():
|
if self.path_pulse.exists():
|
||||||
with suppress(OSError):
|
with suppress(OSError):
|
||||||
self.path_pulse.unlink()
|
self.path_pulse.unlink()
|
||||||
|
|
||||||
|
await self.sys_run_in_executor(cleanup_config_and_audio)
|
||||||
|
|
||||||
# Cleanup AppArmor profile
|
# Cleanup AppArmor profile
|
||||||
with suppress(HostAppArmorError):
|
with suppress(HostAppArmorError):
|
||||||
await self.uninstall_apparmor()
|
await self.uninstall_apparmor()
|
||||||
@ -968,7 +977,7 @@ class Addon(AddonModel):
|
|||||||
async def install_apparmor(self) -> None:
|
async def install_apparmor(self) -> None:
|
||||||
"""Install or Update AppArmor profile for Add-on."""
|
"""Install or Update AppArmor profile for Add-on."""
|
||||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||||
exists_addon = self.path_apparmor.exists()
|
exists_addon = await self.sys_run_in_executor(self.path_apparmor.exists)
|
||||||
|
|
||||||
# Nothing to do
|
# Nothing to do
|
||||||
if not exists_local and not exists_addon:
|
if not exists_local and not exists_addon:
|
||||||
@ -1444,6 +1453,12 @@ class Addon(AddonModel):
|
|||||||
# Restore data and config
|
# Restore data and config
|
||||||
def _restore_data():
|
def _restore_data():
|
||||||
"""Restore data and config."""
|
"""Restore data and config."""
|
||||||
|
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
||||||
|
if self.path_data.is_dir():
|
||||||
|
remove_data(self.path_data)
|
||||||
|
if self.path_config.is_dir():
|
||||||
|
remove_data(self.path_config)
|
||||||
|
|
||||||
temp_data = Path(tmp.name, "data")
|
temp_data = Path(tmp.name, "data")
|
||||||
if temp_data.is_dir():
|
if temp_data.is_dir():
|
||||||
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
||||||
@ -1456,12 +1471,6 @@ class Addon(AddonModel):
|
|||||||
elif self.addon_config_used:
|
elif self.addon_config_used:
|
||||||
self.path_config.mkdir()
|
self.path_config.mkdir()
|
||||||
|
|
||||||
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
|
||||||
if self.path_data.is_dir():
|
|
||||||
await remove_data(self.path_data)
|
|
||||||
if self.path_config.is_dir():
|
|
||||||
await remove_data(self.path_config)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.sys_run_in_executor(_restore_data)
|
await self.sys_run_in_executor(_restore_data)
|
||||||
except shutil.Error as err:
|
except shutil.Error as err:
|
||||||
@ -1471,7 +1480,7 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
# Restore AppArmor
|
# Restore AppArmor
|
||||||
profile_file = Path(tmp.name, "apparmor.txt")
|
profile_file = Path(tmp.name, "apparmor.txt")
|
||||||
if profile_file.exists():
|
if await self.sys_run_in_executor(profile_file.exists):
|
||||||
try:
|
try:
|
||||||
await self.sys_host.apparmor.load_profile(
|
await self.sys_host.apparmor.load_profile(
|
||||||
self.slug, profile_file
|
self.slug, profile_file
|
||||||
@ -1492,7 +1501,7 @@ class Addon(AddonModel):
|
|||||||
if data[ATTR_STATE] == AddonState.STARTED:
|
if data[ATTR_STATE] == AddonState.STARTED:
|
||||||
wait_for_start = await self.start()
|
wait_for_start = await self.start()
|
||||||
finally:
|
finally:
|
||||||
tmp.cleanup()
|
await self.sys_run_in_executor(tmp.cleanup)
|
||||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
|
|
||||||
|
@ -81,13 +81,6 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
return self._data[ATTR_BUILD_FROM][self.arch]
|
return self._data[ATTR_BUILD_FROM][self.arch]
|
||||||
|
|
||||||
@property
|
|
||||||
def dockerfile(self) -> Path:
|
|
||||||
"""Return Dockerfile path."""
|
|
||||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
|
||||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
|
||||||
return self.addon.path_location.joinpath("Dockerfile")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def squash(self) -> bool:
|
def squash(self) -> bool:
|
||||||
"""Return True or False if squash is active."""
|
"""Return True or False if squash is active."""
|
||||||
@ -103,25 +96,40 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
|||||||
"""Return additional Docker labels."""
|
"""Return additional Docker labels."""
|
||||||
return self._data[ATTR_LABELS]
|
return self._data[ATTR_LABELS]
|
||||||
|
|
||||||
@property
|
def get_dockerfile(self) -> Path:
|
||||||
def is_valid(self) -> bool:
|
"""Return Dockerfile path.
|
||||||
|
|
||||||
|
Must be run in executor.
|
||||||
|
"""
|
||||||
|
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||||
|
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||||
|
return self.addon.path_location.joinpath("Dockerfile")
|
||||||
|
|
||||||
|
async def is_valid(self) -> bool:
|
||||||
"""Return true if the build env is valid."""
|
"""Return true if the build env is valid."""
|
||||||
try:
|
|
||||||
|
def build_is_valid() -> bool:
|
||||||
return all(
|
return all(
|
||||||
[
|
[
|
||||||
self.addon.path_location.is_dir(),
|
self.addon.path_location.is_dir(),
|
||||||
self.dockerfile.is_file(),
|
self.get_dockerfile().is_file(),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self.sys_run_in_executor(build_is_valid)
|
||||||
except HassioArchNotFound:
|
except HassioArchNotFound:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
||||||
"""Create a dict with Docker build arguments."""
|
"""Create a dict with Docker build arguments.
|
||||||
|
|
||||||
|
Must be run in executor.
|
||||||
|
"""
|
||||||
args = {
|
args = {
|
||||||
"path": str(self.addon.path_location),
|
"path": str(self.addon.path_location),
|
||||||
"tag": f"{image or self.addon.image}:{version!s}",
|
"tag": f"{image or self.addon.image}:{version!s}",
|
||||||
"dockerfile": str(self.dockerfile),
|
"dockerfile": str(self.get_dockerfile()),
|
||||||
"pull": True,
|
"pull": True,
|
||||||
"forcerm": not self.sys_dev,
|
"forcerm": not self.sys_dev,
|
||||||
"squash": self.squash,
|
"squash": self.squash,
|
||||||
|
@ -2,9 +2,9 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import subprocess
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
|
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
|
||||||
@ -86,18 +86,20 @@ def rating_security(addon: AddonModel) -> int:
|
|||||||
return max(min(8, rating), 1)
|
return max(min(8, rating), 1)
|
||||||
|
|
||||||
|
|
||||||
async def remove_data(folder: Path) -> None:
|
def remove_data(folder: Path) -> None:
|
||||||
"""Remove folder and reset privileged."""
|
"""Remove folder and reset privileged.
|
||||||
try:
|
|
||||||
proc = await asyncio.create_subprocess_exec(
|
|
||||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
|
||||||
)
|
|
||||||
|
|
||||||
_, error_msg = await proc.communicate()
|
Must be run in executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
subprocess.run(
|
||||||
|
["rm", "-rf", str(folder)], stdout=subprocess.DEVNULL, text=True, check=True
|
||||||
|
)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
error_msg = str(err)
|
error_msg = str(err)
|
||||||
|
except subprocess.CalledProcessError as procerr:
|
||||||
|
error_msg = procerr.stderr.strip()
|
||||||
else:
|
else:
|
||||||
if proc.returncode == 0:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Init file for Supervisor RESTful API."""
|
"""Init file for Supervisor RESTful API."""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -47,6 +48,14 @@ MAX_CLIENT_SIZE: int = 1024**2 * 16
|
|||||||
MAX_LINE_SIZE: int = 24570
|
MAX_LINE_SIZE: int = 24570
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True, frozen=True)
|
||||||
|
class StaticResourceConfig:
|
||||||
|
"""Configuration for a static resource."""
|
||||||
|
|
||||||
|
prefix: str
|
||||||
|
path: Path
|
||||||
|
|
||||||
|
|
||||||
class RestAPI(CoreSysAttributes):
|
class RestAPI(CoreSysAttributes):
|
||||||
"""Handle RESTful API for Supervisor."""
|
"""Handle RESTful API for Supervisor."""
|
||||||
|
|
||||||
@ -77,6 +86,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Register REST API Calls."""
|
"""Register REST API Calls."""
|
||||||
|
static_resource_configs: list[StaticResourceConfig] = []
|
||||||
|
|
||||||
self._api_host = APIHost()
|
self._api_host = APIHost()
|
||||||
self._api_host.coresys = self.coresys
|
self._api_host.coresys = self.coresys
|
||||||
|
|
||||||
@ -98,7 +109,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self._register_network()
|
self._register_network()
|
||||||
self._register_observer()
|
self._register_observer()
|
||||||
self._register_os()
|
self._register_os()
|
||||||
self._register_panel()
|
static_resource_configs.extend(self._register_panel())
|
||||||
self._register_proxy()
|
self._register_proxy()
|
||||||
self._register_resolution()
|
self._register_resolution()
|
||||||
self._register_root()
|
self._register_root()
|
||||||
@ -107,6 +118,17 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self._register_store()
|
self._register_store()
|
||||||
self._register_supervisor()
|
self._register_supervisor()
|
||||||
|
|
||||||
|
if static_resource_configs:
|
||||||
|
|
||||||
|
def process_configs() -> list[web.StaticResource]:
|
||||||
|
return [
|
||||||
|
web.StaticResource(config.prefix, config.path)
|
||||||
|
for config in static_resource_configs
|
||||||
|
]
|
||||||
|
|
||||||
|
for resource in await self.sys_run_in_executor(process_configs):
|
||||||
|
self.webapp.router.register_resource(resource)
|
||||||
|
|
||||||
await self.start()
|
await self.start()
|
||||||
|
|
||||||
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||||
@ -750,10 +772,9 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
def _register_panel(self) -> None:
|
def _register_panel(self) -> list[StaticResourceConfig]:
|
||||||
"""Register panel for Home Assistant."""
|
"""Register panel for Home Assistant."""
|
||||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
return [StaticResourceConfig("/app", Path(__file__).parent.joinpath("panel"))]
|
||||||
self.webapp.add_routes([web.static("/app", panel_dir)])
|
|
||||||
|
|
||||||
def _register_docker(self) -> None:
|
def _register_docker(self) -> None:
|
||||||
"""Register docker configuration functions."""
|
"""Register docker configuration functions."""
|
||||||
|
@ -475,7 +475,7 @@ class APIBackups(CoreSysAttributes):
|
|||||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||||
filename = backup.all_locations[location][ATTR_PATH]
|
filename = backup.all_locations[location][ATTR_PATH]
|
||||||
# If the file is missing, return 404 and trigger reload of location
|
# If the file is missing, return 404 and trigger reload of location
|
||||||
if not filename.is_file():
|
if not await self.sys_run_in_executor(filename.is_file):
|
||||||
self.sys_create_task(self.sys_backups.reload(location))
|
self.sys_create_task(self.sys_backups.reload(location))
|
||||||
return web.Response(status=404)
|
return web.Response(status=404)
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
{
|
{
|
||||||
vol.Optional(ATTR_CHANNEL): vol.Coerce(UpdateChannel),
|
vol.Optional(ATTR_CHANNEL): vol.Coerce(UpdateChannel),
|
||||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): repositories,
|
vol.Optional(ATTR_ADDONS_REPOSITORIES): repositories,
|
||||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
vol.Optional(ATTR_TIMEZONE): str,
|
||||||
vol.Optional(ATTR_WAIT_BOOT): wait_boot,
|
vol.Optional(ATTR_WAIT_BOOT): wait_boot,
|
||||||
vol.Optional(ATTR_LOGGING): vol.Coerce(LogLevel),
|
vol.Optional(ATTR_LOGGING): vol.Coerce(LogLevel),
|
||||||
vol.Optional(ATTR_DEBUG): vol.Boolean(),
|
vol.Optional(ATTR_DEBUG): vol.Boolean(),
|
||||||
@ -127,12 +127,18 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
"""Set Supervisor options."""
|
"""Set Supervisor options."""
|
||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
|
# Timezone must be first as validation is incomplete
|
||||||
|
# If a timezone is present we do that validation after in the executor
|
||||||
|
if (
|
||||||
|
ATTR_TIMEZONE in body
|
||||||
|
and (timezone := body[ATTR_TIMEZONE]) != self.sys_config.timezone
|
||||||
|
):
|
||||||
|
await self.sys_run_in_executor(validate_timezone, timezone)
|
||||||
|
await self.sys_config.set_timezone(timezone)
|
||||||
|
|
||||||
if ATTR_CHANNEL in body:
|
if ATTR_CHANNEL in body:
|
||||||
self.sys_updater.channel = body[ATTR_CHANNEL]
|
self.sys_updater.channel = body[ATTR_CHANNEL]
|
||||||
|
|
||||||
if ATTR_TIMEZONE in body:
|
|
||||||
self.sys_config.timezone = body[ATTR_TIMEZONE]
|
|
||||||
|
|
||||||
if ATTR_DEBUG in body:
|
if ATTR_DEBUG in body:
|
||||||
self.sys_config.debug = body[ATTR_DEBUG]
|
self.sys_config.debug = body[ATTR_DEBUG]
|
||||||
|
|
||||||
|
@ -174,7 +174,9 @@ def api_return_ok(data: dict[str, Any] | None = None) -> web.Response:
|
|||||||
|
|
||||||
|
|
||||||
async def api_validate(
|
async def api_validate(
|
||||||
schema: vol.Schema, request: web.Request, origin: list[str] | None = None
|
schema: vol.Schema,
|
||||||
|
request: web.Request,
|
||||||
|
origin: list[str] | None = None,
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""Validate request data with schema."""
|
"""Validate request data with schema."""
|
||||||
data: dict[str, Any] = await request.json(loads=json_loads)
|
data: dict[str, Any] = await request.json(loads=json_loads)
|
||||||
|
@ -542,7 +542,7 @@ class Backup(JobGroup):
|
|||||||
raise err
|
raise err
|
||||||
finally:
|
finally:
|
||||||
if self._tmp:
|
if self._tmp:
|
||||||
self._tmp.cleanup()
|
await self.sys_run_in_executor(self._tmp.cleanup)
|
||||||
|
|
||||||
async def _create_cleanup(self, outer_tarfile: TarFile) -> None:
|
async def _create_cleanup(self, outer_tarfile: TarFile) -> None:
|
||||||
"""Cleanup after backup creation.
|
"""Cleanup after backup creation.
|
||||||
@ -846,7 +846,9 @@ class Backup(JobGroup):
|
|||||||
await self.sys_homeassistant.backup(homeassistant_file, exclude_database)
|
await self.sys_homeassistant.backup(homeassistant_file, exclude_database)
|
||||||
|
|
||||||
# Store size
|
# Store size
|
||||||
self.homeassistant[ATTR_SIZE] = homeassistant_file.size
|
self.homeassistant[ATTR_SIZE] = await self.sys_run_in_executor(
|
||||||
|
getattr, homeassistant_file, "size"
|
||||||
|
)
|
||||||
|
|
||||||
@Job(name="backup_restore_homeassistant", cleanup=False)
|
@Job(name="backup_restore_homeassistant", cleanup=False)
|
||||||
async def restore_homeassistant(self) -> Awaitable[None]:
|
async def restore_homeassistant(self) -> Awaitable[None]:
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable, Iterable
|
from collections.abc import Awaitable
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -179,12 +179,18 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
)
|
)
|
||||||
self.sys_jobs.current.stage = stage
|
self.sys_jobs.current.stage = stage
|
||||||
|
|
||||||
def _list_backup_files(self, path: Path) -> Iterable[Path]:
|
async def _list_backup_files(self, path: Path) -> list[Path]:
|
||||||
"""Return iterable of backup files, suppress and log OSError for network mounts."""
|
"""Return iterable of backup files, suppress and log OSError for network mounts."""
|
||||||
try:
|
|
||||||
|
def find_backups() -> list[Path]:
|
||||||
# is_dir does a stat syscall which raises if the mount is down
|
# is_dir does a stat syscall which raises if the mount is down
|
||||||
|
# Returning an iterator causes I/O while iterating, coerce into list here
|
||||||
if path.is_dir():
|
if path.is_dir():
|
||||||
return path.glob("*.tar")
|
return list(path.glob("*.tar"))
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self.sys_run_in_executor(find_backups)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
if err.errno == errno.EBADMSG and path in {
|
if err.errno == errno.EBADMSG and path in {
|
||||||
self.sys_config.path_backup,
|
self.sys_config.path_backup,
|
||||||
@ -278,9 +284,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
|||||||
tasks = [
|
tasks = [
|
||||||
self.sys_create_task(_load_backup(_location, tar_file))
|
self.sys_create_task(_load_backup(_location, tar_file))
|
||||||
for _location, path in locations.items()
|
for _location, path in locations.items()
|
||||||
for tar_file in await self.sys_run_in_executor(
|
for tar_file in await self._list_backup_files(path)
|
||||||
self._list_backup_files, path
|
|
||||||
)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
_LOGGER.info("Found %d backup files", len(tasks))
|
_LOGGER.info("Found %d backup files", len(tasks))
|
||||||
|
@ -70,8 +70,8 @@ async def initialize_coresys() -> CoreSys:
|
|||||||
coresys.homeassistant = await HomeAssistant(coresys).load_config()
|
coresys.homeassistant = await HomeAssistant(coresys).load_config()
|
||||||
coresys.addons = await AddonManager(coresys).load_config()
|
coresys.addons = await AddonManager(coresys).load_config()
|
||||||
coresys.backups = await BackupManager(coresys).load_config()
|
coresys.backups = await BackupManager(coresys).load_config()
|
||||||
coresys.host = HostManager(coresys)
|
coresys.host = await HostManager(coresys).post_init()
|
||||||
coresys.hardware = HardwareManager(coresys)
|
coresys.hardware = await HardwareManager(coresys).post_init()
|
||||||
coresys.ingress = await Ingress(coresys).load_config()
|
coresys.ingress = await Ingress(coresys).load_config()
|
||||||
coresys.tasks = Tasks(coresys)
|
coresys.tasks = Tasks(coresys)
|
||||||
coresys.services = await ServiceManager(coresys).load_config()
|
coresys.services = await ServiceManager(coresys).load_config()
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Bootstrap Supervisor."""
|
"""Bootstrap Supervisor."""
|
||||||
|
|
||||||
from datetime import UTC, datetime
|
import asyncio
|
||||||
|
from datetime import UTC, datetime, tzinfo
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
@ -24,7 +25,7 @@ from .const import (
|
|||||||
LogLevel,
|
LogLevel,
|
||||||
)
|
)
|
||||||
from .utils.common import FileConfiguration
|
from .utils.common import FileConfiguration
|
||||||
from .utils.dt import parse_datetime
|
from .utils.dt import get_time_zone, parse_datetime
|
||||||
from .validate import SCHEMA_SUPERVISOR_CONFIG
|
from .validate import SCHEMA_SUPERVISOR_CONFIG
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@ -66,6 +67,7 @@ class CoreConfig(FileConfiguration):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize config object."""
|
"""Initialize config object."""
|
||||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_SUPERVISOR_CONFIG)
|
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_SUPERVISOR_CONFIG)
|
||||||
|
self._timezone_tzinfo: tzinfo | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timezone(self) -> str | None:
|
def timezone(self) -> str | None:
|
||||||
@ -76,12 +78,19 @@ class CoreConfig(FileConfiguration):
|
|||||||
self._data.pop(ATTR_TIMEZONE, None)
|
self._data.pop(ATTR_TIMEZONE, None)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@timezone.setter
|
@property
|
||||||
def timezone(self, value: str) -> None:
|
def timezone_tzinfo(self) -> tzinfo | None:
|
||||||
|
"""Return system timezone as tzinfo object."""
|
||||||
|
return self._timezone_tzinfo
|
||||||
|
|
||||||
|
async def set_timezone(self, value: str) -> None:
|
||||||
"""Set system timezone."""
|
"""Set system timezone."""
|
||||||
if value == _UTC:
|
if value == _UTC:
|
||||||
return
|
return
|
||||||
self._data[ATTR_TIMEZONE] = value
|
self._data[ATTR_TIMEZONE] = value
|
||||||
|
self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor(
|
||||||
|
None, get_time_zone, value
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> AwesomeVersion:
|
def version(self) -> AwesomeVersion:
|
||||||
@ -390,3 +399,15 @@ class CoreConfig(FileConfiguration):
|
|||||||
def extern_to_local_path(self, path: PurePath) -> Path:
|
def extern_to_local_path(self, path: PurePath) -> Path:
|
||||||
"""Translate a path relative to extern supervisor data to its path in the container."""
|
"""Translate a path relative to extern supervisor data to its path in the container."""
|
||||||
return self.path_supervisor / path.relative_to(self.path_extern_supervisor)
|
return self.path_supervisor / path.relative_to(self.path_extern_supervisor)
|
||||||
|
|
||||||
|
async def read_data(self) -> None:
|
||||||
|
"""Read configuration file."""
|
||||||
|
timezone = self.timezone
|
||||||
|
await super().read_data()
|
||||||
|
|
||||||
|
if not self.timezone:
|
||||||
|
self._timezone_tzinfo = None
|
||||||
|
elif timezone != self.timezone:
|
||||||
|
self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor(
|
||||||
|
None, get_time_zone, self.timezone
|
||||||
|
)
|
||||||
|
@ -399,7 +399,7 @@ class Core(CoreSysAttributes):
|
|||||||
_LOGGER.warning("Can't adjust Time/Date settings: %s", err)
|
_LOGGER.warning("Can't adjust Time/Date settings: %s", err)
|
||||||
return
|
return
|
||||||
|
|
||||||
self.sys_config.timezone = self.sys_config.timezone or data.timezone
|
await self.sys_config.set_timezone(self.sys_config.timezone or data.timezone)
|
||||||
|
|
||||||
# Calculate if system time is out of sync
|
# Calculate if system time is out of sync
|
||||||
delta = data.dt_utc - utcnow()
|
delta = data.dt_utc - utcnow()
|
||||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Callable, Coroutine
|
from collections.abc import Callable, Coroutine
|
||||||
from contextvars import Context, copy_context
|
from contextvars import Context, copy_context
|
||||||
from datetime import datetime
|
from datetime import UTC, datetime, tzinfo
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@ -22,7 +22,6 @@ from .const import (
|
|||||||
MACHINE_ID,
|
MACHINE_ID,
|
||||||
SERVER_SOFTWARE,
|
SERVER_SOFTWARE,
|
||||||
)
|
)
|
||||||
from .utils.dt import UTC, get_time_zone
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .addons.manager import AddonManager
|
from .addons.manager import AddonManager
|
||||||
@ -143,13 +142,19 @@ class CoreSys:
|
|||||||
"""Return system timezone."""
|
"""Return system timezone."""
|
||||||
if self.config.timezone:
|
if self.config.timezone:
|
||||||
return self.config.timezone
|
return self.config.timezone
|
||||||
# pylint bug with python 3.12.4 (https://github.com/pylint-dev/pylint/issues/9811)
|
|
||||||
# pylint: disable=no-member
|
|
||||||
if self.host.info.timezone:
|
if self.host.info.timezone:
|
||||||
return self.host.info.timezone
|
return self.host.info.timezone
|
||||||
# pylint: enable=no-member
|
|
||||||
return "UTC"
|
return "UTC"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timezone_tzinfo(self) -> tzinfo:
|
||||||
|
"""Return system timezone as tzinfo object."""
|
||||||
|
if self.config.timezone_tzinfo:
|
||||||
|
return self.config.timezone_tzinfo
|
||||||
|
if self.host.info.timezone_tzinfo:
|
||||||
|
return self.host.info.timezone_tzinfo
|
||||||
|
return UTC
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def loop(self) -> asyncio.BaseEventLoop:
|
def loop(self) -> asyncio.BaseEventLoop:
|
||||||
"""Return loop object."""
|
"""Return loop object."""
|
||||||
@ -555,7 +560,7 @@ class CoreSys:
|
|||||||
|
|
||||||
def now(self) -> datetime:
|
def now(self) -> datetime:
|
||||||
"""Return now in local timezone."""
|
"""Return now in local timezone."""
|
||||||
return datetime.now(get_time_zone(self.timezone) or UTC)
|
return datetime.now(self.timezone_tzinfo)
|
||||||
|
|
||||||
def add_set_task_context_callback(
|
def add_set_task_context_callback(
|
||||||
self, callback: Callable[[Context], Context]
|
self, callback: Callable[[Context], Context]
|
||||||
@ -642,6 +647,11 @@ class CoreSysAttributes:
|
|||||||
"""Return running machine type of the Supervisor system."""
|
"""Return running machine type of the Supervisor system."""
|
||||||
return self.coresys.machine
|
return self.coresys.machine
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sys_machine_id(self) -> str | None:
|
||||||
|
"""Return machine id."""
|
||||||
|
return self.coresys.machine_id
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sys_dev(self) -> bool:
|
def sys_dev(self) -> bool:
|
||||||
"""Return True if we run dev mode."""
|
"""Return True if we run dev mode."""
|
||||||
|
@ -1,12 +1,14 @@
|
|||||||
"""Interface to systemd-timedate over D-Bus."""
|
"""Interface to systemd-timedate over D-Bus."""
|
||||||
|
|
||||||
from datetime import datetime
|
import asyncio
|
||||||
|
from datetime import datetime, tzinfo
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
|
from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
|
||||||
from ..utils.dt import utc_from_timestamp
|
from ..utils.dt import get_time_zone, utc_from_timestamp
|
||||||
from .const import (
|
from .const import (
|
||||||
DBUS_ATTR_NTP,
|
DBUS_ATTR_NTP,
|
||||||
DBUS_ATTR_NTPSYNCHRONIZED,
|
DBUS_ATTR_NTPSYNCHRONIZED,
|
||||||
@ -33,6 +35,11 @@ class TimeDate(DBusInterfaceProxy):
|
|||||||
object_path: str = DBUS_OBJECT_TIMEDATE
|
object_path: str = DBUS_OBJECT_TIMEDATE
|
||||||
properties_interface: str = DBUS_IFACE_TIMEDATE
|
properties_interface: str = DBUS_IFACE_TIMEDATE
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize object."""
|
||||||
|
super().__init__()
|
||||||
|
self._timezone_tzinfo: tzinfo | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
def timezone(self) -> str:
|
def timezone(self) -> str:
|
||||||
@ -57,6 +64,11 @@ class TimeDate(DBusInterfaceProxy):
|
|||||||
"""Return the system UTC time."""
|
"""Return the system UTC time."""
|
||||||
return utc_from_timestamp(self.properties[DBUS_ATTR_TIMEUSEC] / 1000000)
|
return utc_from_timestamp(self.properties[DBUS_ATTR_TIMEUSEC] / 1000000)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timezone_tzinfo(self) -> tzinfo | None:
|
||||||
|
"""Return timezone as tzinfo object."""
|
||||||
|
return self._timezone_tzinfo
|
||||||
|
|
||||||
async def connect(self, bus: MessageBus):
|
async def connect(self, bus: MessageBus):
|
||||||
"""Connect to D-Bus."""
|
"""Connect to D-Bus."""
|
||||||
_LOGGER.info("Load dbus interface %s", self.name)
|
_LOGGER.info("Load dbus interface %s", self.name)
|
||||||
@ -69,6 +81,19 @@ class TimeDate(DBusInterfaceProxy):
|
|||||||
"No timedate support on the host. Time/Date functions have been disabled."
|
"No timedate support on the host. Time/Date functions have been disabled."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@dbus_connected
|
||||||
|
async def update(self, changed: dict[str, Any] | None = None) -> None:
|
||||||
|
"""Update properties via D-Bus."""
|
||||||
|
timezone = self.timezone
|
||||||
|
await super().update(changed)
|
||||||
|
|
||||||
|
if not self.timezone:
|
||||||
|
self._timezone_tzinfo = None
|
||||||
|
elif timezone != self.timezone:
|
||||||
|
self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor(
|
||||||
|
None, get_time_zone, self.timezone
|
||||||
|
)
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
async def set_time(self, utc: datetime) -> None:
|
async def set_time(self, utc: datetime) -> None:
|
||||||
"""Set time & date on host as UTC."""
|
"""Set time & date on host as UTC."""
|
||||||
|
@ -665,18 +665,20 @@ class DockerAddon(DockerInterface):
|
|||||||
async def _build(self, version: AwesomeVersion, image: str | None = None) -> None:
|
async def _build(self, version: AwesomeVersion, image: str | None = None) -> None:
|
||||||
"""Build a Docker container."""
|
"""Build a Docker container."""
|
||||||
build_env = await AddonBuild(self.coresys, self.addon).load_config()
|
build_env = await AddonBuild(self.coresys, self.addon).load_config()
|
||||||
if not build_env.is_valid:
|
if not await build_env.is_valid():
|
||||||
_LOGGER.error("Invalid build environment, can't build this add-on!")
|
_LOGGER.error("Invalid build environment, can't build this add-on!")
|
||||||
raise DockerError()
|
raise DockerError()
|
||||||
|
|
||||||
_LOGGER.info("Starting build for %s:%s", self.image, version)
|
_LOGGER.info("Starting build for %s:%s", self.image, version)
|
||||||
try:
|
|
||||||
image, log = await self.sys_run_in_executor(
|
def build_image():
|
||||||
self.sys_docker.images.build,
|
return self.sys_docker.images.build(
|
||||||
use_config_proxy=False,
|
use_config_proxy=False, **build_env.get_docker_args(version, image)
|
||||||
**build_env.get_docker_args(version, image),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
image, log = await self.sys_run_in_executor(build_image)
|
||||||
|
|
||||||
_LOGGER.debug("Build %s:%s done: %s", self.image, version, log)
|
_LOGGER.debug("Build %s:%s done: %s", self.image, version, log)
|
||||||
|
|
||||||
# Update meta data
|
# Update meta data
|
||||||
|
@ -5,7 +5,7 @@ import logging
|
|||||||
import docker
|
import docker
|
||||||
from docker.types import Mount
|
from docker.types import Mount
|
||||||
|
|
||||||
from ..const import DOCKER_CPU_RUNTIME_ALLOCATION, MACHINE_ID
|
from ..const import DOCKER_CPU_RUNTIME_ALLOCATION
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import DockerJobError
|
from ..exceptions import DockerJobError
|
||||||
from ..hardware.const import PolicyGroup
|
from ..hardware.const import PolicyGroup
|
||||||
@ -57,7 +57,7 @@ class DockerAudio(DockerInterface, CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Machine ID
|
# Machine ID
|
||||||
if MACHINE_ID.exists():
|
if self.sys_machine_id:
|
||||||
mounts.append(MOUNT_MACHINE_ID)
|
mounts.append(MOUNT_MACHINE_ID)
|
||||||
|
|
||||||
return mounts
|
return mounts
|
||||||
|
@ -8,7 +8,7 @@ import re
|
|||||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||||
from docker.types import Mount
|
from docker.types import Mount
|
||||||
|
|
||||||
from ..const import LABEL_MACHINE, MACHINE_ID
|
from ..const import LABEL_MACHINE
|
||||||
from ..exceptions import DockerJobError
|
from ..exceptions import DockerJobError
|
||||||
from ..hardware.const import PolicyGroup
|
from ..hardware.const import PolicyGroup
|
||||||
from ..homeassistant.const import LANDINGPAGE
|
from ..homeassistant.const import LANDINGPAGE
|
||||||
@ -154,7 +154,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Machine ID
|
# Machine ID
|
||||||
if MACHINE_ID.exists():
|
if self.sys_machine_id:
|
||||||
mounts.append(MOUNT_MACHINE_ID)
|
mounts.append(MOUNT_MACHINE_ID)
|
||||||
|
|
||||||
return mounts
|
return mounts
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Self
|
||||||
|
|
||||||
import pyudev
|
import pyudev
|
||||||
|
|
||||||
@ -51,17 +52,25 @@ class HardwareManager(CoreSysAttributes):
|
|||||||
"""Initialize Hardware Monitor object."""
|
"""Initialize Hardware Monitor object."""
|
||||||
self.coresys: CoreSys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self._devices: dict[str, Device] = {}
|
self._devices: dict[str, Device] = {}
|
||||||
self._udev = pyudev.Context()
|
self._udev: pyudev.Context | None = None
|
||||||
|
|
||||||
self._montior: HwMonitor = HwMonitor(coresys)
|
self._monitor: HwMonitor | None = None
|
||||||
self._helper: HwHelper = HwHelper(coresys)
|
self._helper: HwHelper = HwHelper(coresys)
|
||||||
self._policy: HwPolicy = HwPolicy(coresys)
|
self._policy: HwPolicy = HwPolicy(coresys)
|
||||||
self._disk: HwDisk = HwDisk(coresys)
|
self._disk: HwDisk = HwDisk(coresys)
|
||||||
|
|
||||||
|
async def post_init(self) -> Self:
|
||||||
|
"""Complete initialization of obect within event loop."""
|
||||||
|
self._udev = await self.sys_run_in_executor(pyudev.Context)
|
||||||
|
self._monitor: HwMonitor = HwMonitor(self.coresys, self._udev)
|
||||||
|
return self
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def monitor(self) -> HwMonitor:
|
def monitor(self) -> HwMonitor:
|
||||||
"""Return Hardware Monitor instance."""
|
"""Return Hardware Monitor instance."""
|
||||||
return self._montior
|
if not self._monitor:
|
||||||
|
raise RuntimeError("Hardware monitor not initialized!")
|
||||||
|
return self._monitor
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def helper(self) -> HwHelper:
|
def helper(self) -> HwHelper:
|
||||||
|
@ -20,10 +20,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
class HwMonitor(CoreSysAttributes):
|
class HwMonitor(CoreSysAttributes):
|
||||||
"""Hardware monitor for supervisor."""
|
"""Hardware monitor for supervisor."""
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
def __init__(self, coresys: CoreSys, context: pyudev.Context):
|
||||||
"""Initialize Hardware Monitor object."""
|
"""Initialize Hardware Monitor object."""
|
||||||
self.coresys: CoreSys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self.context = pyudev.Context()
|
self.context = context
|
||||||
self.monitor: pyudev.Monitor | None = None
|
self.monitor: pyudev.Monitor | None = None
|
||||||
self.observer: pyudev.MonitorObserver | None = None
|
self.observer: pyudev.MonitorObserver | None = None
|
||||||
|
|
||||||
|
@ -49,18 +49,21 @@ class HomeAssistantSecrets(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
async def _read_secrets(self):
|
async def _read_secrets(self):
|
||||||
"""Read secrets.yaml into memory."""
|
"""Read secrets.yaml into memory."""
|
||||||
|
|
||||||
|
def read_secrets_yaml() -> dict | None:
|
||||||
if not self.path_secrets.exists():
|
if not self.path_secrets.exists():
|
||||||
_LOGGER.debug("Home Assistant secrets.yaml does not exist")
|
_LOGGER.debug("Home Assistant secrets.yaml does not exist")
|
||||||
return
|
return None
|
||||||
|
|
||||||
# Read secrets
|
# Read secrets
|
||||||
try:
|
try:
|
||||||
secrets = await self.sys_run_in_executor(read_yaml_file, self.path_secrets)
|
return read_yaml_file(self.path_secrets)
|
||||||
except YamlFileError as err:
|
except YamlFileError as err:
|
||||||
_LOGGER.warning("Can't read Home Assistant secrets: %s", err)
|
_LOGGER.warning("Can't read Home Assistant secrets: %s", err)
|
||||||
return
|
return None
|
||||||
|
|
||||||
if not isinstance(secrets, dict):
|
secrets = await self.sys_run_in_executor(read_secrets_yaml)
|
||||||
|
if secrets is None or not isinstance(secrets, dict):
|
||||||
return
|
return
|
||||||
|
|
||||||
# Process secrets
|
# Process secrets
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Info control for host."""
|
"""Info control for host."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime
|
from datetime import datetime, tzinfo
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
@ -72,6 +72,11 @@ class InfoCenter(CoreSysAttributes):
|
|||||||
"""Return host timezone."""
|
"""Return host timezone."""
|
||||||
return self.sys_dbus.timedate.timezone
|
return self.sys_dbus.timedate.timezone
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timezone_tzinfo(self) -> tzinfo | None:
|
||||||
|
"""Return host timezone as tzinfo object."""
|
||||||
|
return self.sys_dbus.timedate.timezone_tzinfo
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dt_utc(self) -> datetime | None:
|
def dt_utc(self) -> datetime | None:
|
||||||
"""Return host UTC time."""
|
"""Return host UTC time."""
|
||||||
|
@ -8,6 +8,7 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Self
|
||||||
|
|
||||||
from aiohttp import ClientError, ClientSession, ClientTimeout
|
from aiohttp import ClientError, ClientSession, ClientTimeout
|
||||||
from aiohttp.client_exceptions import UnixClientConnectorError
|
from aiohttp.client_exceptions import UnixClientConnectorError
|
||||||
@ -51,13 +52,19 @@ class LogsControl(CoreSysAttributes):
|
|||||||
self._profiles: set[str] = set()
|
self._profiles: set[str] = set()
|
||||||
self._boot_ids: list[str] = []
|
self._boot_ids: list[str] = []
|
||||||
self._default_identifiers: list[str] = []
|
self._default_identifiers: list[str] = []
|
||||||
|
self._available: bool = False
|
||||||
|
|
||||||
|
async def post_init(self) -> Self:
|
||||||
|
"""Post init actions that must occur in event loop."""
|
||||||
|
self._available = bool(
|
||||||
|
os.environ.get("SUPERVISOR_SYSTEMD_JOURNAL_GATEWAYD_URL")
|
||||||
|
) or await self.sys_run_in_executor(SYSTEMD_JOURNAL_GATEWAYD_SOCKET.is_socket)
|
||||||
|
return self
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Check if systemd-journal-gatwayd is available."""
|
"""Check if systemd-journal-gatwayd is available."""
|
||||||
if os.environ.get("SUPERVISOR_SYSTEMD_JOURNAL_GATEWAYD_URL"):
|
return self._available
|
||||||
return True
|
|
||||||
return SYSTEMD_JOURNAL_GATEWAYD_SOCKET.is_socket()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot_ids(self) -> list[str]:
|
def boot_ids(self) -> list[str]:
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Self
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
|
|
||||||
@ -38,6 +39,11 @@ class HostManager(CoreSysAttributes):
|
|||||||
self._sound: SoundControl = SoundControl(coresys)
|
self._sound: SoundControl = SoundControl(coresys)
|
||||||
self._logs: LogsControl = LogsControl(coresys)
|
self._logs: LogsControl = LogsControl(coresys)
|
||||||
|
|
||||||
|
async def post_init(self) -> Self:
|
||||||
|
"""Post init actions that must occur in event loop."""
|
||||||
|
await self._logs.post_init()
|
||||||
|
return self
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def apparmor(self) -> AppArmorControl:
|
def apparmor(self) -> AppArmorControl:
|
||||||
"""Return host AppArmor handler."""
|
"""Return host AppArmor handler."""
|
||||||
|
@ -183,7 +183,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
|
|||||||
raise err
|
raise err
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if not repository.validate():
|
if not await self.sys_run_in_executor(repository.validate):
|
||||||
if add_with_errors:
|
if add_with_errors:
|
||||||
_LOGGER.error("%s is not a valid add-on repository", url)
|
_LOGGER.error("%s is not a valid add-on repository", url)
|
||||||
self.sys_resolution.create_issue(
|
self.sys_resolution.create_issue(
|
||||||
|
@ -49,7 +49,7 @@ class GitRepo(CoreSysAttributes):
|
|||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Init Git add-on repository."""
|
"""Init Git add-on repository."""
|
||||||
if not (self.path / ".git").is_dir():
|
if not await self.sys_run_in_executor((self.path / ".git").is_dir):
|
||||||
await self.clone()
|
await self.clone()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -69,7 +69,10 @@ def utc_from_timestamp(timestamp: float) -> datetime:
|
|||||||
|
|
||||||
|
|
||||||
def get_time_zone(time_zone_str: str) -> tzinfo | None:
|
def get_time_zone(time_zone_str: str) -> tzinfo | None:
|
||||||
"""Get time zone from string. Return None if unable to determine."""
|
"""Get time zone from string. Return None if unable to determine.
|
||||||
|
|
||||||
|
Must be run in executor.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
return zoneinfo.ZoneInfo(time_zone_str)
|
return zoneinfo.ZoneInfo(time_zone_str)
|
||||||
except zoneinfo.ZoneInfoNotFoundError:
|
except zoneinfo.ZoneInfoNotFoundError:
|
||||||
|
@ -18,7 +18,10 @@ def schema_or(schema):
|
|||||||
|
|
||||||
|
|
||||||
def validate_timezone(timezone):
|
def validate_timezone(timezone):
|
||||||
"""Validate voluptuous timezone."""
|
"""Validate voluptuous timezone.
|
||||||
|
|
||||||
|
Must be run in executor.
|
||||||
|
"""
|
||||||
if get_time_zone(timezone) is not None:
|
if get_time_zone(timezone) is not None:
|
||||||
return timezone
|
return timezone
|
||||||
raise vol.Invalid(
|
raise vol.Invalid(
|
||||||
|
@ -20,7 +20,9 @@ async def test_platform_set(coresys: CoreSys, install_addon_ssh: Addon):
|
|||||||
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
args = build.get_docker_args(AwesomeVersion("latest"))
|
args = await coresys.run_in_executor(
|
||||||
|
build.get_docker_args, AwesomeVersion("latest")
|
||||||
|
)
|
||||||
|
|
||||||
assert args["platform"] == "linux/amd64"
|
assert args["platform"] == "linux/amd64"
|
||||||
|
|
||||||
@ -36,10 +38,14 @@ async def test_dockerfile_evaluation(coresys: CoreSys, install_addon_ssh: Addon)
|
|||||||
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
args = build.get_docker_args(AwesomeVersion("latest"))
|
args = await coresys.run_in_executor(
|
||||||
|
build.get_docker_args, AwesomeVersion("latest")
|
||||||
|
)
|
||||||
|
|
||||||
assert args["dockerfile"].endswith("fixtures/addons/local/ssh/Dockerfile")
|
assert args["dockerfile"].endswith("fixtures/addons/local/ssh/Dockerfile")
|
||||||
assert str(build.dockerfile).endswith("fixtures/addons/local/ssh/Dockerfile")
|
assert str(await coresys.run_in_executor(build.get_dockerfile)).endswith(
|
||||||
|
"fixtures/addons/local/ssh/Dockerfile"
|
||||||
|
)
|
||||||
assert build.arch == "amd64"
|
assert build.arch == "amd64"
|
||||||
|
|
||||||
|
|
||||||
@ -54,10 +60,12 @@ async def test_dockerfile_evaluation_arch(coresys: CoreSys, install_addon_ssh: A
|
|||||||
type(coresys.arch), "default", new=PropertyMock(return_value="aarch64")
|
type(coresys.arch), "default", new=PropertyMock(return_value="aarch64")
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
args = build.get_docker_args(AwesomeVersion("latest"))
|
args = await coresys.run_in_executor(
|
||||||
|
build.get_docker_args, AwesomeVersion("latest")
|
||||||
|
)
|
||||||
|
|
||||||
assert args["dockerfile"].endswith("fixtures/addons/local/ssh/Dockerfile.aarch64")
|
assert args["dockerfile"].endswith("fixtures/addons/local/ssh/Dockerfile.aarch64")
|
||||||
assert str(build.dockerfile).endswith(
|
assert str(await coresys.run_in_executor(build.get_dockerfile)).endswith(
|
||||||
"fixtures/addons/local/ssh/Dockerfile.aarch64"
|
"fixtures/addons/local/ssh/Dockerfile.aarch64"
|
||||||
)
|
)
|
||||||
assert build.arch == "aarch64"
|
assert build.arch == "aarch64"
|
||||||
@ -74,7 +82,7 @@ async def test_build_valid(coresys: CoreSys, install_addon_ssh: Addon):
|
|||||||
type(coresys.arch), "default", new=PropertyMock(return_value="aarch64")
|
type(coresys.arch), "default", new=PropertyMock(return_value="aarch64")
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
assert build.is_valid
|
assert await build.is_valid()
|
||||||
|
|
||||||
|
|
||||||
async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon):
|
async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||||
@ -88,4 +96,4 @@ async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon):
|
|||||||
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
assert not build.is_valid
|
assert not await build.is_valid()
|
||||||
|
@ -409,7 +409,7 @@ async def test_repository_file_error(
|
|||||||
in caplog.text
|
in caplog.text
|
||||||
)
|
)
|
||||||
|
|
||||||
write_json_file(repo_file, {"invalid": "bad"})
|
await coresys.run_in_executor(write_json_file, repo_file, {"invalid": "bad"})
|
||||||
await coresys.store.data.update()
|
await coresys.store.data.update()
|
||||||
assert f"Repository parse error {repo_dir.as_posix()}" in caplog.text
|
assert f"Repository parse error {repo_dir.as_posix()}" in caplog.text
|
||||||
|
|
||||||
|
@ -234,7 +234,7 @@ async def test_api_addon_rebuild_healthcheck(
|
|||||||
_container_events_task = asyncio.create_task(container_events())
|
_container_events_task = asyncio.create_task(container_events())
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch.object(AddonBuild, "is_valid", new=PropertyMock(return_value=True)),
|
patch.object(AddonBuild, "is_valid", return_value=True),
|
||||||
patch.object(DockerAddon, "is_running", return_value=False),
|
patch.object(DockerAddon, "is_running", return_value=False),
|
||||||
patch.object(Addon, "need_build", new=PropertyMock(return_value=True)),
|
patch.object(Addon, "need_build", new=PropertyMock(return_value=True)),
|
||||||
patch.object(CpuArch, "supported", new=PropertyMock(return_value=["amd64"])),
|
patch.object(CpuArch, "supported", new=PropertyMock(return_value=["amd64"])),
|
||||||
|
@ -327,9 +327,9 @@ async def test_advanced_logs_boot_id_offset(
|
|||||||
|
|
||||||
|
|
||||||
async def test_advanced_logs_formatters(
|
async def test_advanced_logs_formatters(
|
||||||
|
journald_gateway: MagicMock,
|
||||||
api_client: TestClient,
|
api_client: TestClient,
|
||||||
coresys: CoreSys,
|
coresys: CoreSys,
|
||||||
journald_gateway: MagicMock,
|
|
||||||
journal_logs_reader: MagicMock,
|
journal_logs_reader: MagicMock,
|
||||||
):
|
):
|
||||||
"""Test advanced logs formatters varying on Accept header."""
|
"""Test advanced logs formatters varying on Accept header."""
|
||||||
|
23
tests/api/test_panel.py
Normal file
23
tests/api/test_panel.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
"""Test panel API."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from aiohttp.test_utils import TestClient
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
|
||||||
|
PANEL_PATH = Path(__file__).parent.parent.parent.joinpath("supervisor/api/panel")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"filename", ["entrypoint.js", "entrypoint.js.br", "entrypoint.js.gz"]
|
||||||
|
)
|
||||||
|
async def test_frontend_files(api_client: TestClient, coresys: CoreSys, filename: str):
|
||||||
|
"""Test frontend files served up correctly."""
|
||||||
|
resp = await api_client.get(f"/app/{filename}")
|
||||||
|
assert resp.status == 200
|
||||||
|
|
||||||
|
body = await resp.read()
|
||||||
|
file_bytes = await coresys.run_in_executor(PANEL_PATH.joinpath(filename).read_bytes)
|
||||||
|
assert body == file_bytes
|
@ -233,3 +233,17 @@ async def test_api_supervisor_reload(api_client: TestClient):
|
|||||||
"""Test supervisor reload."""
|
"""Test supervisor reload."""
|
||||||
resp = await api_client.post("/supervisor/reload")
|
resp = await api_client.post("/supervisor/reload")
|
||||||
assert resp.status == 200
|
assert resp.status == 200
|
||||||
|
|
||||||
|
|
||||||
|
async def test_api_supervisor_options_timezone(
|
||||||
|
api_client: TestClient, coresys: CoreSys
|
||||||
|
):
|
||||||
|
"""Test setting supervisor timezone via API."""
|
||||||
|
assert coresys.timezone == "Etc/UTC"
|
||||||
|
|
||||||
|
resp = await api_client.post(
|
||||||
|
"/supervisor/options", json={"timezone": "Europe/Zurich"}
|
||||||
|
)
|
||||||
|
assert resp.status == 200
|
||||||
|
|
||||||
|
assert coresys.timezone == "Europe/Zurich"
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
"""Common test functions."""
|
"""Common test functions."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from functools import partial
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from inspect import getclosurevars
|
from inspect import getclosurevars
|
||||||
import json
|
import json
|
||||||
@ -68,7 +70,9 @@ async def mock_dbus_services(
|
|||||||
services: dict[str, list[DBusServiceMock] | DBusServiceMock] = {}
|
services: dict[str, list[DBusServiceMock] | DBusServiceMock] = {}
|
||||||
requested_names: set[str] = set()
|
requested_names: set[str] = set()
|
||||||
|
|
||||||
for module in get_valid_modules("dbus_service_mocks", base=__file__):
|
for module in await asyncio.get_running_loop().run_in_executor(
|
||||||
|
None, partial(get_valid_modules, base=__file__), "dbus_service_mocks"
|
||||||
|
):
|
||||||
if module in to_mock:
|
if module in to_mock:
|
||||||
service_module = import_module(f"{__package__}.dbus_service_mocks.{module}")
|
service_module = import_module(f"{__package__}.dbus_service_mocks.{module}")
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@ from uuid import uuid4
|
|||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp.test_utils import TestClient
|
from aiohttp.test_utils import TestClient
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
|
from blockbuster import BlockBuster, blockbuster_ctx
|
||||||
from dbus_fast import BusType
|
from dbus_fast import BusType
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
import pytest
|
import pytest
|
||||||
@ -63,6 +64,24 @@ from .dbus_service_mocks.network_manager import NetworkManager as NetworkManager
|
|||||||
# pylint: disable=redefined-outer-name, protected-access
|
# pylint: disable=redefined-outer-name, protected-access
|
||||||
|
|
||||||
|
|
||||||
|
# This commented out code is left in intentionally
|
||||||
|
# Intent is to enable this for all tests at all times as an autouse fixture
|
||||||
|
# Findings from PR were growing too big so disabling temporarily to create a checkpoint
|
||||||
|
# @pytest.fixture(autouse=True)
|
||||||
|
def blockbuster(request: pytest.FixtureRequest) -> BlockBuster:
|
||||||
|
"""Raise for blocking I/O in event loop."""
|
||||||
|
# Excluded modules doesn't seem to stop test code from raising for blocking I/O
|
||||||
|
# Defaulting to only scanning supervisor core code seems like the best we can do easily
|
||||||
|
# Added a parameter so we could potentially go module by module in test and eliminate blocking I/O
|
||||||
|
# Then we could tell it to scan everything by default. That will require more follow-up work
|
||||||
|
|
||||||
|
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||||
|
with blockbuster_ctx(
|
||||||
|
scanned_modules=getattr(request, "param", ["supervisor"])
|
||||||
|
) as bb:
|
||||||
|
yield bb
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def path_extern() -> None:
|
async def path_extern() -> None:
|
||||||
"""Set external path env for tests."""
|
"""Set external path env for tests."""
|
||||||
|
@ -33,7 +33,7 @@ async def test_load(coresys: CoreSys):
|
|||||||
assert identifier in coresys.host.logs.default_identifiers
|
assert identifier in coresys.host.logs.default_identifiers
|
||||||
|
|
||||||
|
|
||||||
async def test_logs(coresys: CoreSys, journald_gateway: MagicMock):
|
async def test_logs(journald_gateway: MagicMock, coresys: CoreSys):
|
||||||
"""Test getting logs and errors."""
|
"""Test getting logs and errors."""
|
||||||
assert coresys.host.logs.available is True
|
assert coresys.host.logs.available is True
|
||||||
|
|
||||||
@ -63,7 +63,7 @@ async def test_logs(coresys: CoreSys, journald_gateway: MagicMock):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
async def test_logs_coloured(coresys: CoreSys, journald_gateway: MagicMock):
|
async def test_logs_coloured(journald_gateway: MagicMock, coresys: CoreSys):
|
||||||
"""Test ANSI control sequences being preserved in binary messages."""
|
"""Test ANSI control sequences being preserved in binary messages."""
|
||||||
journald_gateway.content.feed_data(
|
journald_gateway.content.feed_data(
|
||||||
load_fixture("logs_export_supervisor.txt").encode("utf-8")
|
load_fixture("logs_export_supervisor.txt").encode("utf-8")
|
||||||
@ -82,7 +82,7 @@ async def test_logs_coloured(coresys: CoreSys, journald_gateway: MagicMock):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_boot_ids(coresys: CoreSys, journald_gateway: MagicMock):
|
async def test_boot_ids(journald_gateway: MagicMock, coresys: CoreSys):
|
||||||
"""Test getting boot ids."""
|
"""Test getting boot ids."""
|
||||||
journald_gateway.content.feed_data(
|
journald_gateway.content.feed_data(
|
||||||
load_fixture("logs_boot_ids.txt").encode("utf-8")
|
load_fixture("logs_boot_ids.txt").encode("utf-8")
|
||||||
@ -109,7 +109,7 @@ async def test_boot_ids(coresys: CoreSys, journald_gateway: MagicMock):
|
|||||||
await coresys.host.logs.get_boot_id(3)
|
await coresys.host.logs.get_boot_id(3)
|
||||||
|
|
||||||
|
|
||||||
async def test_boot_ids_fallback(coresys: CoreSys, journald_gateway: MagicMock):
|
async def test_boot_ids_fallback(journald_gateway: MagicMock, coresys: CoreSys):
|
||||||
"""Test getting boot ids using fallback."""
|
"""Test getting boot ids using fallback."""
|
||||||
# Initial response has no log lines
|
# Initial response has no log lines
|
||||||
journald_gateway.content.feed_data(b"")
|
journald_gateway.content.feed_data(b"")
|
||||||
@ -134,7 +134,7 @@ async def test_boot_ids_fallback(coresys: CoreSys, journald_gateway: MagicMock):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
async def test_identifiers(coresys: CoreSys, journald_gateway: MagicMock):
|
async def test_identifiers(journald_gateway: MagicMock, coresys: CoreSys):
|
||||||
"""Test getting identifiers."""
|
"""Test getting identifiers."""
|
||||||
journald_gateway.content.feed_data(
|
journald_gateway.content.feed_data(
|
||||||
load_fixture("logs_identifiers.txt").encode("utf-8")
|
load_fixture("logs_identifiers.txt").encode("utf-8")
|
||||||
@ -156,7 +156,7 @@ async def test_identifiers(coresys: CoreSys, journald_gateway: MagicMock):
|
|||||||
|
|
||||||
|
|
||||||
async def test_connection_refused_handled(
|
async def test_connection_refused_handled(
|
||||||
coresys: CoreSys, journald_gateway: MagicMock
|
journald_gateway: MagicMock, coresys: CoreSys
|
||||||
):
|
):
|
||||||
"""Test connection refused is handled with HostServiceError."""
|
"""Test connection refused is handled with HostServiceError."""
|
||||||
with patch("supervisor.host.logs.ClientSession.get") as get:
|
with patch("supervisor.host.logs.ClientSession.get") as get:
|
||||||
|
@ -114,5 +114,5 @@ async def test_get_checks(coresys: CoreSys):
|
|||||||
async def test_dynamic_check_loader(coresys: CoreSys):
|
async def test_dynamic_check_loader(coresys: CoreSys):
|
||||||
"""Test dynamic check loader, this ensures that all checks have defined a setup function."""
|
"""Test dynamic check loader, this ensures that all checks have defined a setup function."""
|
||||||
coresys.resolution.check.load_modules()
|
coresys.resolution.check.load_modules()
|
||||||
for check in get_valid_modules("checks"):
|
for check in await coresys.run_in_executor(get_valid_modules, "checks"):
|
||||||
assert check in coresys.resolution.check._checks
|
assert check in coresys.resolution.check._checks
|
||||||
|
@ -18,7 +18,9 @@ async def test_evaluation(coresys: CoreSys):
|
|||||||
|
|
||||||
assert operating_system.reason not in coresys.resolution.unsupported
|
assert operating_system.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
coresys.host._info = MagicMock(operating_system="unsupported", timezone=None)
|
coresys.host._info = MagicMock(
|
||||||
|
operating_system="unsupported", timezone=None, timezone_tzinfo=None
|
||||||
|
)
|
||||||
await operating_system()
|
await operating_system()
|
||||||
assert operating_system.reason in coresys.resolution.unsupported
|
assert operating_system.reason in coresys.resolution.unsupported
|
||||||
|
|
||||||
@ -27,7 +29,9 @@ async def test_evaluation(coresys: CoreSys):
|
|||||||
assert operating_system.reason not in coresys.resolution.unsupported
|
assert operating_system.reason not in coresys.resolution.unsupported
|
||||||
coresys.os._available = False
|
coresys.os._available = False
|
||||||
|
|
||||||
coresys.host._info = MagicMock(operating_system=SUPPORTED_OS[0], timezone=None)
|
coresys.host._info = MagicMock(
|
||||||
|
operating_system=SUPPORTED_OS[0], timezone=None, timezone_tzinfo=None
|
||||||
|
)
|
||||||
await operating_system()
|
await operating_system()
|
||||||
assert operating_system.reason not in coresys.resolution.unsupported
|
assert operating_system.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ async def test_evaluation(coresys: CoreSys):
|
|||||||
|
|
||||||
assert agent.reason not in coresys.resolution.unsupported
|
assert agent.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
coresys._host = MagicMock(info=MagicMock(timezone=None))
|
coresys._host = MagicMock(info=MagicMock(timezone=None, timezone_tzinfo=None))
|
||||||
|
|
||||||
coresys.host.features = [HostFeature.HOSTNAME]
|
coresys.host.features = [HostFeature.HOSTNAME]
|
||||||
await agent()
|
await agent()
|
||||||
|
@ -16,7 +16,7 @@ async def test_evaluation(coresys: CoreSys):
|
|||||||
|
|
||||||
assert systemd.reason not in coresys.resolution.unsupported
|
assert systemd.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
coresys._host = MagicMock(info=MagicMock(timezone=None))
|
coresys._host = MagicMock(info=MagicMock(timezone=None, timezone_tzinfo=None))
|
||||||
|
|
||||||
coresys.host.features = [HostFeature.HOSTNAME]
|
coresys.host.features = [HostFeature.HOSTNAME]
|
||||||
await systemd()
|
await systemd()
|
||||||
|
@ -8,22 +8,23 @@ from supervisor.coresys import CoreSys
|
|||||||
from supervisor.resolution.evaluations.systemd_journal import EvaluateSystemdJournal
|
from supervisor.resolution.evaluations.systemd_journal import EvaluateSystemdJournal
|
||||||
|
|
||||||
|
|
||||||
async def test_evaluation(coresys: CoreSys, journald_gateway: MagicMock):
|
async def test_evaluation_supported(journald_gateway: MagicMock, coresys: CoreSys):
|
||||||
"""Test evaluation."""
|
"""Test evaluation for supported system."""
|
||||||
systemd_journal = EvaluateSystemdJournal(coresys)
|
systemd_journal = EvaluateSystemdJournal(coresys)
|
||||||
await coresys.core.set_state(CoreState.SETUP)
|
await coresys.core.set_state(CoreState.SETUP)
|
||||||
|
|
||||||
|
await systemd_journal()
|
||||||
assert systemd_journal.reason not in coresys.resolution.unsupported
|
assert systemd_journal.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
with patch("supervisor.host.logs.Path.is_socket", return_value=False):
|
|
||||||
|
async def test_evaluation_unsupported(coresys: CoreSys):
|
||||||
|
"""Test evaluation for unsupported system."""
|
||||||
|
systemd_journal = EvaluateSystemdJournal(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.SETUP)
|
||||||
|
|
||||||
await systemd_journal()
|
await systemd_journal()
|
||||||
assert systemd_journal.reason in coresys.resolution.unsupported
|
assert systemd_journal.reason in coresys.resolution.unsupported
|
||||||
|
|
||||||
coresys.host.supported_features.cache_clear() # pylint: disable=no-member
|
|
||||||
|
|
||||||
await systemd_journal()
|
|
||||||
assert systemd_journal.reason not in coresys.resolution.unsupported
|
|
||||||
|
|
||||||
|
|
||||||
async def test_did_run(coresys: CoreSys):
|
async def test_did_run(coresys: CoreSys):
|
||||||
"""Test that the evaluation ran as expected."""
|
"""Test that the evaluation ran as expected."""
|
||||||
|
@ -43,7 +43,7 @@ async def test_check_autofix(coresys: CoreSys):
|
|||||||
assert len(coresys.resolution.suggestions) == 0
|
assert len(coresys.resolution.suggestions) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_dynamic_fixup_loader(coresys: CoreSys):
|
async def test_dynamic_fixup_loader(coresys: CoreSys):
|
||||||
"""Test dynamic fixup loader, this ensures that all fixups have defined a setup function."""
|
"""Test dynamic fixup loader, this ensures that all fixups have defined a setup function."""
|
||||||
for fixup in get_valid_modules("fixups"):
|
for fixup in await coresys.run_in_executor(get_valid_modules, "fixups"):
|
||||||
assert fixup in coresys.resolution.fixup._fixups
|
assert fixup in coresys.resolution.fixup._fixups
|
||||||
|
@ -223,6 +223,7 @@ async def test_install_unavailable_addon(
|
|||||||
assert log in caplog.text
|
assert log in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("tmp_supervisor_data")
|
||||||
async def test_reload(coresys: CoreSys):
|
async def test_reload(coresys: CoreSys):
|
||||||
"""Test store reload."""
|
"""Test store reload."""
|
||||||
await coresys.store.load()
|
await coresys.store.load()
|
||||||
|
@ -21,13 +21,13 @@ async def test_timezone(coresys: CoreSys):
|
|||||||
await coresys.dbus.timedate.connect(coresys.dbus.bus)
|
await coresys.dbus.timedate.connect(coresys.dbus.bus)
|
||||||
assert coresys.timezone == "Etc/UTC"
|
assert coresys.timezone == "Etc/UTC"
|
||||||
|
|
||||||
coresys.config.timezone = "Europe/Zurich"
|
await coresys.config.set_timezone("Europe/Zurich")
|
||||||
assert coresys.timezone == "Europe/Zurich"
|
assert coresys.timezone == "Europe/Zurich"
|
||||||
|
|
||||||
|
|
||||||
def test_now(coresys: CoreSys):
|
async def test_now(coresys: CoreSys):
|
||||||
"""Test datetime now with local time."""
|
"""Test datetime now with local time."""
|
||||||
coresys.config.timezone = "Europe/Zurich"
|
await coresys.config.set_timezone("Europe/Zurich")
|
||||||
|
|
||||||
zurich = coresys.now()
|
zurich = coresys.now()
|
||||||
utc = utcnow()
|
utc = utcnow()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user