mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-13 12:16:29 +00:00
FileConfiguration uses executor for I/O (#5652)
* FileConfiguration uses executor for I/O * Fix credentials tests * Remove migrate_system_env as its very deprecated
This commit is contained in:
parent
ae266e1692
commit
31193abb7b
@ -55,7 +55,6 @@ if __name__ == "__main__":
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
# Signal health startup for container
|
||||
run_os_startup_check_cleanup()
|
||||
|
@ -243,7 +243,7 @@ class Addon(AddonModel):
|
||||
await self.instance.install(self.version, default_image, arch=self.arch)
|
||||
|
||||
self.persist[ATTR_IMAGE] = default_image
|
||||
self.save_persist()
|
||||
await self.save_persist()
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
@ -667,9 +667,9 @@ class Addon(AddonModel):
|
||||
"""Is add-on loaded."""
|
||||
return bool(self._listeners)
|
||||
|
||||
def save_persist(self) -> None:
|
||||
async def save_persist(self) -> None:
|
||||
"""Save data of add-on."""
|
||||
self.sys_addons.data.save_data()
|
||||
await self.sys_addons.data.save_data()
|
||||
|
||||
async def watchdog_application(self) -> bool:
|
||||
"""Return True if application is running."""
|
||||
@ -772,7 +772,7 @@ class Addon(AddonModel):
|
||||
)
|
||||
async def install(self) -> None:
|
||||
"""Install and setup this addon."""
|
||||
self.sys_addons.data.install(self.addon_store)
|
||||
await self.sys_addons.data.install(self.addon_store)
|
||||
await self.load()
|
||||
|
||||
if not self.path_data.is_dir():
|
||||
@ -790,7 +790,7 @@ class Addon(AddonModel):
|
||||
self.latest_version, self.addon_store.image, arch=self.arch
|
||||
)
|
||||
except DockerError as err:
|
||||
self.sys_addons.data.uninstall(self)
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
raise AddonsError() from err
|
||||
|
||||
# Add to addon manager
|
||||
@ -839,14 +839,14 @@ class Addon(AddonModel):
|
||||
|
||||
# Cleanup Ingress dynamic port assignment
|
||||
if self.with_ingress:
|
||||
await self.sys_ingress.del_dynamic_port(self.slug)
|
||||
self.sys_create_task(self.sys_ingress.reload())
|
||||
self.sys_ingress.del_dynamic_port(self.slug)
|
||||
|
||||
# Cleanup discovery data
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != self.slug:
|
||||
continue
|
||||
self.sys_discovery.remove(message)
|
||||
await self.sys_discovery.remove(message)
|
||||
|
||||
# Cleanup services data
|
||||
for service in self.sys_services.list_services:
|
||||
@ -855,7 +855,7 @@ class Addon(AddonModel):
|
||||
service.del_service_data(self)
|
||||
|
||||
# Remove from addon manager
|
||||
self.sys_addons.data.uninstall(self)
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
self.sys_addons.local.pop(self.slug)
|
||||
|
||||
@Job(
|
||||
@ -884,7 +884,7 @@ class Addon(AddonModel):
|
||||
|
||||
try:
|
||||
_LOGGER.info("Add-on '%s' successfully updated", self.slug)
|
||||
self.sys_addons.data.update(store)
|
||||
await self.sys_addons.data.update(store)
|
||||
await self._check_ingress_port()
|
||||
|
||||
# Cleanup
|
||||
@ -925,7 +925,7 @@ class Addon(AddonModel):
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
self.sys_addons.data.update(self.addon_store)
|
||||
await self.sys_addons.data.update(self.addon_store)
|
||||
await self._check_ingress_port()
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
||||
|
||||
@ -1053,7 +1053,7 @@ class Addon(AddonModel):
|
||||
|
||||
# Access Token
|
||||
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_persist()
|
||||
await self.save_persist()
|
||||
|
||||
# Options
|
||||
await self.write_options()
|
||||
@ -1398,7 +1398,7 @@ class Addon(AddonModel):
|
||||
# Restore local add-on information
|
||||
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||
restore_image = self._image(data[ATTR_SYSTEM])
|
||||
self.sys_addons.data.restore(
|
||||
await self.sys_addons.data.restore(
|
||||
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
|
||||
)
|
||||
|
||||
|
@ -34,16 +34,29 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
self.coresys: CoreSys = coresys
|
||||
self.addon = addon
|
||||
|
||||
# Search for build file later in executor
|
||||
super().__init__(None, SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def _get_build_file(self) -> Path:
|
||||
"""Get build file.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
try:
|
||||
build_file = find_one_filetype(
|
||||
return find_one_filetype(
|
||||
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
|
||||
)
|
||||
except ConfigurationFileError:
|
||||
build_file = self.addon.path_location / "build.json"
|
||||
return self.addon.path_location / "build.json"
|
||||
|
||||
super().__init__(build_file, SCHEMA_BUILD_CONFIG)
|
||||
async def read_data(self) -> None:
|
||||
"""Load data from file."""
|
||||
if not self._file:
|
||||
self._file = await self.sys_run_in_executor(self._get_build_file)
|
||||
|
||||
def save_data(self):
|
||||
await super().read_data()
|
||||
|
||||
async def save_data(self):
|
||||
"""Ignore save function."""
|
||||
raise RuntimeError()
|
||||
|
||||
|
@ -38,7 +38,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
"""Return local add-on data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
def install(self, addon: AddonStore) -> None:
|
||||
async def install(self, addon: AddonStore) -> None:
|
||||
"""Set addon as installed."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug] = {
|
||||
@ -46,26 +46,28 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_IMAGE: addon.image,
|
||||
}
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def uninstall(self, addon: Addon) -> None:
|
||||
async def uninstall(self, addon: Addon) -> None:
|
||||
"""Set add-on as uninstalled."""
|
||||
self.system.pop(addon.slug, None)
|
||||
self.user.pop(addon.slug, None)
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def update(self, addon: AddonStore) -> None:
|
||||
async def update(self, addon: AddonStore) -> None:
|
||||
"""Update version of add-on."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug].update(
|
||||
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||
)
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||
async def restore(
|
||||
self, slug: str, user: Config, system: Config, image: str
|
||||
) -> None:
|
||||
"""Restore data to add-on."""
|
||||
self.user[slug] = deepcopy(user)
|
||||
self.system[slug] = deepcopy(system)
|
||||
|
||||
self.user[slug][ATTR_IMAGE] = image
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
@ -5,7 +5,7 @@ from collections.abc import Awaitable
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Union
|
||||
from typing import Self, Union
|
||||
|
||||
from attr import evolve
|
||||
|
||||
@ -74,6 +74,11 @@ class AddonManager(CoreSysAttributes):
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await self.data.read_data()
|
||||
return self
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
# Refresh cache for all store addons
|
||||
|
@ -322,7 +322,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_WATCHDOG in body:
|
||||
addon.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
addon.save_persist()
|
||||
await addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def sys_options(self, request: web.Request) -> None:
|
||||
@ -336,7 +336,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
|
||||
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
|
||||
|
||||
addon.save_persist()
|
||||
await addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def options_validate(self, request: web.Request) -> None:
|
||||
@ -402,7 +402,7 @@ class APIAddons(CoreSysAttributes):
|
||||
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_persist()
|
||||
await addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
|
@ -99,7 +99,7 @@ class APIAuth(CoreSysAttributes):
|
||||
@api_process
|
||||
async def cache(self, request: web.Request) -> None:
|
||||
"""Process cache reset request."""
|
||||
self.sys_auth.reset_data()
|
||||
await self.sys_auth.reset_data()
|
||||
|
||||
@api_process
|
||||
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
||||
|
@ -83,7 +83,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
)
|
||||
|
||||
# Process discovery message
|
||||
message = self.sys_discovery.send(addon, **body)
|
||||
message = await self.sys_discovery.send(addon, **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@ -110,5 +110,5 @@ class APIDiscovery(CoreSysAttributes):
|
||||
if message.addon != addon.slug:
|
||||
raise APIForbidden("Can't remove discovery message")
|
||||
|
||||
self.sys_discovery.remove(message)
|
||||
await self.sys_discovery.remove(message)
|
||||
return True
|
||||
|
@ -99,7 +99,7 @@ class APIJobs(CoreSysAttributes):
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Reset options for JobManager."""
|
||||
self.sys_jobs.reset_data()
|
||||
await self.sys_jobs.reset_data()
|
||||
|
||||
@api_process
|
||||
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||
|
@ -46,7 +46,7 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _update_cache(self, username: str, password: str) -> None:
|
||||
async def _update_cache(self, username: str, password: str) -> None:
|
||||
"""Cache a username, password."""
|
||||
username_h = self._rehash(username)
|
||||
password_h = self._rehash(password, username)
|
||||
@ -55,9 +55,9 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
return
|
||||
|
||||
self._data[username_h] = password_h
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def _dismatch_cache(self, username: str, password: str) -> None:
|
||||
async def _dismatch_cache(self, username: str, password: str) -> None:
|
||||
"""Remove user from cache."""
|
||||
username_h = self._rehash(username)
|
||||
password_h = self._rehash(password, username)
|
||||
@ -66,7 +66,7 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
return
|
||||
|
||||
self._data.pop(username_h, None)
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
async def check_login(self, addon: Addon, username: str, password: str) -> bool:
|
||||
"""Check username login."""
|
||||
@ -109,11 +109,11 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
) as req:
|
||||
if req.status == 200:
|
||||
_LOGGER.info("Successful login for '%s'", username)
|
||||
self._update_cache(username, password)
|
||||
await self._update_cache(username, password)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("Unauthorized login for '%s'", username)
|
||||
self._dismatch_cache(username, password)
|
||||
await self._dismatch_cache(username, password)
|
||||
return False
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Can't request auth on Home Assistant!")
|
||||
|
@ -3,7 +3,6 @@
|
||||
# ruff: noqa: T100
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import signal
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
@ -15,8 +14,6 @@ from .auth import Auth
|
||||
from .backups.manager import BackupManager
|
||||
from .bus import Bus
|
||||
from .const import (
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_REPOSITORIES,
|
||||
ENV_HOMEASSISTANT_REPOSITORY,
|
||||
ENV_SUPERVISOR_MACHINE,
|
||||
ENV_SUPERVISOR_NAME,
|
||||
@ -45,7 +42,6 @@ from .resolution.module import ResolutionManager
|
||||
from .security.module import Security
|
||||
from .services import ServiceManager
|
||||
from .store import StoreManager
|
||||
from .store.validate import ensure_builtin_repositories
|
||||
from .supervisor import Supervisor
|
||||
from .updater import Updater
|
||||
from .utils.sentry import init_sentry
|
||||
@ -55,35 +51,35 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
async def initialize_coresys() -> CoreSys:
|
||||
"""Initialize supervisor coresys/objects."""
|
||||
coresys = CoreSys()
|
||||
coresys = await CoreSys().load_config()
|
||||
|
||||
# Initialize core objects
|
||||
coresys.docker = DockerAPI(coresys)
|
||||
coresys.resolution = ResolutionManager(coresys)
|
||||
coresys.jobs = JobManager(coresys)
|
||||
coresys.docker = await DockerAPI(coresys).load_config()
|
||||
coresys.resolution = await ResolutionManager(coresys).load_config()
|
||||
coresys.jobs = await JobManager(coresys).load_config()
|
||||
coresys.core = Core(coresys)
|
||||
coresys.plugins = PluginManager(coresys)
|
||||
coresys.plugins = await PluginManager(coresys).load_config()
|
||||
coresys.arch = CpuArch(coresys)
|
||||
coresys.auth = Auth(coresys)
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.auth = await Auth(coresys).load_config()
|
||||
coresys.updater = await Updater(coresys).load_config()
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
coresys.homeassistant = HomeAssistant(coresys)
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.backups = BackupManager(coresys)
|
||||
coresys.homeassistant = await HomeAssistant(coresys).load_config()
|
||||
coresys.addons = await AddonManager(coresys).load_config()
|
||||
coresys.backups = await BackupManager(coresys).load_config()
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.hardware = HardwareManager(coresys)
|
||||
coresys.ingress = Ingress(coresys)
|
||||
coresys.ingress = await Ingress(coresys).load_config()
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.store = StoreManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.services = await ServiceManager(coresys).load_config()
|
||||
coresys.store = await StoreManager(coresys).load_config()
|
||||
coresys.discovery = await Discovery(coresys).load_config()
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.os = OSManager(coresys)
|
||||
coresys.scheduler = Scheduler(coresys)
|
||||
coresys.security = Security(coresys)
|
||||
coresys.security = await Security(coresys).load_config()
|
||||
coresys.bus = Bus(coresys)
|
||||
coresys.mounts = MountManager(coresys)
|
||||
coresys.mounts = await MountManager(coresys).load_config()
|
||||
|
||||
# diagnostics
|
||||
if coresys.config.diagnostics:
|
||||
@ -237,29 +233,6 @@ def initialize_system(coresys: CoreSys) -> None:
|
||||
config.path_addon_configs.mkdir()
|
||||
|
||||
|
||||
def migrate_system_env(coresys: CoreSys) -> None:
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_supervisor, "addons/build")
|
||||
if old_build.is_dir():
|
||||
try:
|
||||
old_build.rmdir()
|
||||
except OSError:
|
||||
_LOGGER.error("Can't cleanup old Add-on build directory at '%s'", old_build)
|
||||
|
||||
# Supervisor 2022.5 -> 2022.6. Can be removed after 2022.9
|
||||
# pylint: disable=protected-access
|
||||
if len(coresys.config.addons_repositories) > 0:
|
||||
coresys.store._data[ATTR_REPOSITORIES] = ensure_builtin_repositories(
|
||||
coresys.config.addons_repositories
|
||||
)
|
||||
coresys.config._data[ATTR_ADDONS_CUSTOM_LIST] = []
|
||||
coresys.store.save_data()
|
||||
coresys.config.save_data()
|
||||
|
||||
|
||||
def initialize_logging() -> None:
|
||||
"""Initialize the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
@ -109,7 +109,7 @@ class Core(CoreSysAttributes):
|
||||
|
||||
# Fix wrong version in config / avoid boot loop on OS
|
||||
self.sys_config.version = self.sys_supervisor.version
|
||||
self.sys_config.save_data()
|
||||
await self.sys_config.save_data()
|
||||
|
||||
async def setup(self):
|
||||
"""Start setting up supervisor orchestration."""
|
||||
@ -225,7 +225,7 @@ class Core(CoreSysAttributes):
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self.sys_services.reset()
|
||||
await self.sys_services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self.sys_addons.boot(AddonStartup.SYSTEM)
|
||||
@ -264,7 +264,7 @@ class Core(CoreSysAttributes):
|
||||
await self.sys_addons.boot(AddonStartup.APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self._update_last_boot()
|
||||
await self._update_last_boot()
|
||||
|
||||
finally:
|
||||
# Add core tasks into scheduler
|
||||
@ -289,7 +289,7 @@ class Core(CoreSysAttributes):
|
||||
"""Stop a running orchestration."""
|
||||
# store new last boot / prevent time adjustments
|
||||
if self.state in (CoreState.RUNNING, CoreState.SHUTDOWN):
|
||||
self._update_last_boot()
|
||||
await self._update_last_boot()
|
||||
if self.state in (CoreState.STOPPING, CoreState.CLOSE):
|
||||
return
|
||||
|
||||
@ -357,10 +357,10 @@ class Core(CoreSysAttributes):
|
||||
if self.state in (CoreState.STOPPING, CoreState.SHUTDOWN):
|
||||
await self.sys_plugins.shutdown()
|
||||
|
||||
def _update_last_boot(self):
|
||||
async def _update_last_boot(self):
|
||||
"""Update last boot time."""
|
||||
self.sys_config.last_boot = self.sys_hardware.helper.last_boot
|
||||
self.sys_config.save_data()
|
||||
await self.sys_config.save_data()
|
||||
|
||||
async def _retrieve_whoami(self, with_ssl: bool) -> WhoamiData | None:
|
||||
try:
|
||||
|
@ -10,7 +10,7 @@ from functools import partial
|
||||
import logging
|
||||
import os
|
||||
from types import MappingProxyType
|
||||
from typing import TYPE_CHECKING, Any, TypeVar
|
||||
from typing import TYPE_CHECKING, Any, Self, TypeVar
|
||||
|
||||
import aiohttp
|
||||
|
||||
@ -102,6 +102,11 @@ class CoreSys:
|
||||
# Task factory attributes
|
||||
self._set_task_context: list[Callable[[Context], Context]] = []
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await self.config.read_data()
|
||||
return self
|
||||
|
||||
@property
|
||||
def dev(self) -> bool:
|
||||
"""Return True if we run dev mode."""
|
||||
|
@ -70,11 +70,11 @@ class BoardManager(DBusInterfaceProxy):
|
||||
await super().connect(bus)
|
||||
|
||||
if self.board == BOARD_NAME_YELLOW:
|
||||
self._board_proxy = Yellow()
|
||||
self._board_proxy = await Yellow().load_config()
|
||||
elif self.board == BOARD_NAME_GREEN:
|
||||
self._board_proxy = Green()
|
||||
self._board_proxy = await Green().load_config()
|
||||
elif self.board == BOARD_NAME_SUPERVISED:
|
||||
self._board_proxy = Supervised()
|
||||
self._board_proxy = await Supervised().load_config()
|
||||
else:
|
||||
return
|
||||
|
||||
|
@ -53,7 +53,7 @@ class Discovery(CoreSysAttributes, FileConfiguration):
|
||||
_LOGGER.info("Loaded %d messages", len(messages))
|
||||
self.message_obj = messages
|
||||
|
||||
def save(self) -> None:
|
||||
async def save(self) -> None:
|
||||
"""Write discovery message into data file."""
|
||||
messages: list[dict[str, Any]] = []
|
||||
for message in self.list_messages:
|
||||
@ -61,7 +61,7 @@ class Discovery(CoreSysAttributes, FileConfiguration):
|
||||
|
||||
self._data[ATTR_DISCOVERY].clear()
|
||||
self._data[ATTR_DISCOVERY].extend(messages)
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def get(self, uuid: str) -> Message | None:
|
||||
"""Return discovery message."""
|
||||
@ -72,7 +72,7 @@ class Discovery(CoreSysAttributes, FileConfiguration):
|
||||
"""Return list of available discovery messages."""
|
||||
return list(self.message_obj.values())
|
||||
|
||||
def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message:
|
||||
async def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message:
|
||||
"""Send a discovery message to Home Assistant."""
|
||||
# Create message
|
||||
message = Message(addon.slug, service, config)
|
||||
@ -93,15 +93,15 @@ class Discovery(CoreSysAttributes, FileConfiguration):
|
||||
"Sending discovery to Home Assistant %s from %s", service, addon.slug
|
||||
)
|
||||
self.message_obj[message.uuid] = message
|
||||
self.save()
|
||||
await self.save()
|
||||
|
||||
self.sys_create_task(self._push_discovery(message, CMD_NEW))
|
||||
return message
|
||||
|
||||
def remove(self, message: Message) -> None:
|
||||
async def remove(self, message: Message) -> None:
|
||||
"""Remove a discovery message from Home Assistant."""
|
||||
self.message_obj.pop(message.uuid, None)
|
||||
self.save()
|
||||
await self.save()
|
||||
|
||||
_LOGGER.info(
|
||||
"Delete discovery to Home Assistant %s from %s",
|
||||
|
@ -664,7 +664,7 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
async def _build(self, version: AwesomeVersion, image: str | None = None) -> None:
|
||||
"""Build a Docker container."""
|
||||
build_env = AddonBuild(self.coresys, self.addon)
|
||||
build_env = await AddonBuild(self.coresys, self.addon).load_config()
|
||||
if not build_env.is_valid:
|
||||
_LOGGER.error("Invalid build environment, can't build this add-on!")
|
||||
raise DockerError()
|
||||
|
@ -5,7 +5,7 @@ from ipaddress import IPv4Address
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any, Final
|
||||
from typing import Any, Final, Self
|
||||
|
||||
import attr
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||
@ -113,6 +113,11 @@ class DockerAPI:
|
||||
self.config: DockerConfig = DockerConfig()
|
||||
self._monitor: DockerMonitor = DockerMonitor(coresys)
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await self.config.read_data()
|
||||
return self
|
||||
|
||||
@property
|
||||
def images(self) -> ImageCollection:
|
||||
"""Return API images."""
|
||||
|
@ -110,7 +110,7 @@ class HomeAssistantCore(JobGroup):
|
||||
else:
|
||||
self.sys_homeassistant.version = self.instance.version
|
||||
self.sys_homeassistant.image = self.instance.image
|
||||
self.sys_homeassistant.save_data()
|
||||
await self.sys_homeassistant.save_data()
|
||||
|
||||
# Start landingpage
|
||||
if self.instance.version != LANDINGPAGE:
|
||||
@ -139,7 +139,7 @@ class HomeAssistantCore(JobGroup):
|
||||
_LOGGER.info("Using preinstalled landingpage")
|
||||
self.sys_homeassistant.version = LANDINGPAGE
|
||||
self.sys_homeassistant.image = self.instance.image
|
||||
self.sys_homeassistant.save_data()
|
||||
await self.sys_homeassistant.save_data()
|
||||
return
|
||||
|
||||
_LOGGER.info("Setting up Home Assistant landingpage")
|
||||
@ -167,7 +167,7 @@ class HomeAssistantCore(JobGroup):
|
||||
|
||||
self.sys_homeassistant.version = LANDINGPAGE
|
||||
self.sys_homeassistant.image = self.sys_updater.image_homeassistant
|
||||
self.sys_homeassistant.save_data()
|
||||
await self.sys_homeassistant.save_data()
|
||||
|
||||
@Job(
|
||||
name="home_assistant_core_install",
|
||||
@ -200,7 +200,7 @@ class HomeAssistantCore(JobGroup):
|
||||
_LOGGER.info("Home Assistant docker now installed")
|
||||
self.sys_homeassistant.version = self.instance.version
|
||||
self.sys_homeassistant.image = self.sys_updater.image_homeassistant
|
||||
self.sys_homeassistant.save_data()
|
||||
await self.sys_homeassistant.save_data()
|
||||
|
||||
# finishing
|
||||
try:
|
||||
@ -270,7 +270,7 @@ class HomeAssistantCore(JobGroup):
|
||||
_LOGGER.info("Successfully started Home Assistant %s", to_version)
|
||||
|
||||
# Successfull - last step
|
||||
self.sys_homeassistant.save_data()
|
||||
await self.sys_homeassistant.save_data()
|
||||
with suppress(DockerError):
|
||||
await self.instance.cleanup(old_image=old_image)
|
||||
|
||||
@ -339,7 +339,7 @@ class HomeAssistantCore(JobGroup):
|
||||
else:
|
||||
# Create new API token
|
||||
self.sys_homeassistant.supervisor_token = secrets.token_hex(56)
|
||||
self.sys_homeassistant.save_data()
|
||||
await self.sys_homeassistant.save_data()
|
||||
|
||||
# Write audio settings
|
||||
self.sys_homeassistant.write_pulse()
|
||||
|
@ -82,7 +82,7 @@ class Ingress(FileConfiguration, CoreSysAttributes):
|
||||
|
||||
async def unload(self) -> None:
|
||||
"""Shutdown sessions."""
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def _cleanup_sessions(self) -> None:
|
||||
"""Remove not used sessions."""
|
||||
@ -170,16 +170,16 @@ class Ingress(FileConfiguration, CoreSysAttributes):
|
||||
|
||||
# Save port for next time
|
||||
self.ports[addon_slug] = port
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
return port
|
||||
|
||||
def del_dynamic_port(self, addon_slug: str) -> None:
|
||||
async def del_dynamic_port(self, addon_slug: str) -> None:
|
||||
"""Remove a previously assigned dynamic port."""
|
||||
if addon_slug not in self.ports:
|
||||
return
|
||||
|
||||
del self.ports[addon_slug]
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
async def update_hass_panel(self, addon: Addon):
|
||||
"""Return True if Home Assistant up and running."""
|
||||
|
@ -5,6 +5,7 @@ from collections.abc import Awaitable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from pathlib import PurePath
|
||||
from typing import Self
|
||||
|
||||
from attr import evolve
|
||||
|
||||
@ -49,11 +50,17 @@ class MountManager(FileConfiguration, CoreSysAttributes):
|
||||
)
|
||||
|
||||
self.coresys: CoreSys = coresys
|
||||
self._mounts: dict[str, Mount] = {}
|
||||
self._bound_mounts: dict[str, BoundMount] = {}
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await super().load_config()
|
||||
self._mounts: dict[str, Mount] = {
|
||||
mount[ATTR_NAME]: Mount.from_dict(coresys, mount)
|
||||
mount[ATTR_NAME]: Mount.from_dict(self.coresys, mount)
|
||||
for mount in self._data[ATTR_MOUNTS]
|
||||
}
|
||||
self._bound_mounts: dict[str, BoundMount] = {}
|
||||
return self
|
||||
|
||||
@property
|
||||
def mounts(self) -> list[Mount]:
|
||||
@ -303,9 +310,9 @@ class MountManager(FileConfiguration, CoreSysAttributes):
|
||||
)
|
||||
await bound_mount.bind_mount.load()
|
||||
|
||||
def save_data(self) -> None:
|
||||
async def save_data(self) -> None:
|
||||
"""Store data to configuration file."""
|
||||
self._data[ATTR_MOUNTS] = [
|
||||
mount.to_dict(skip_secrets=False) for mount in self.mounts
|
||||
]
|
||||
super().save_data()
|
||||
await super().save_data()
|
||||
|
@ -179,7 +179,7 @@ class PluginBase(ABC, FileConfiguration, CoreSysAttributes):
|
||||
else:
|
||||
self.version = self.instance.version
|
||||
self.image = self.default_image
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
# Run plugin
|
||||
with suppress(PluginError):
|
||||
@ -208,7 +208,7 @@ class PluginBase(ABC, FileConfiguration, CoreSysAttributes):
|
||||
_LOGGER.info("%s plugin now installed", self.slug)
|
||||
self.version = self.instance.version
|
||||
self.image = self.default_image
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
async def update(self, version: str | None = None) -> None:
|
||||
"""Update system plugin."""
|
||||
@ -224,7 +224,7 @@ class PluginBase(ABC, FileConfiguration, CoreSysAttributes):
|
||||
await self.instance.update(version, image=self.default_image)
|
||||
self.version = self.instance.version
|
||||
self.image = self.default_image
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerError):
|
||||
|
@ -73,7 +73,7 @@ class PluginCli(PluginBase):
|
||||
"""Run cli."""
|
||||
# Create new API token
|
||||
self._data[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
# Start Instance
|
||||
_LOGGER.info("Starting CLI plugin")
|
||||
|
@ -226,7 +226,7 @@ class PluginDns(PluginBase):
|
||||
# Reset manually defined DNS
|
||||
self.servers.clear()
|
||||
self.fallback = True
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
# Resets hosts
|
||||
with suppress(OSError):
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Self
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import HassioError
|
||||
@ -30,6 +31,11 @@ class PluginManager(CoreSysAttributes):
|
||||
self._observer: PluginObserver = PluginObserver(coresys)
|
||||
self._multicast: PluginMulticast = PluginMulticast(coresys)
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await asyncio.gather(*[plugin.read_data() for plugin in self.all_plugins])
|
||||
return self
|
||||
|
||||
@property
|
||||
def all_plugins(self) -> list[PluginBase]:
|
||||
"""Return cli handler."""
|
||||
|
@ -80,7 +80,7 @@ class PluginObserver(PluginBase):
|
||||
"""Run observer."""
|
||||
# Create new API token
|
||||
self._data[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
# Start Instance
|
||||
_LOGGER.info("Starting observer plugin")
|
||||
|
@ -22,8 +22,6 @@ class ResolutionCheck(CoreSysAttributes):
|
||||
self.coresys = coresys
|
||||
self._checks: dict[str, CheckBase] = {}
|
||||
|
||||
self._load()
|
||||
|
||||
@property
|
||||
def data(self) -> dict[str, Any]:
|
||||
"""Return data."""
|
||||
@ -34,13 +32,20 @@ class ResolutionCheck(CoreSysAttributes):
|
||||
"""Return all list of all checks."""
|
||||
return list(self._checks.values())
|
||||
|
||||
def _load(self):
|
||||
async def load(self) -> None:
|
||||
"""Load all checks."""
|
||||
|
||||
def _load() -> dict[str, CheckBase]:
|
||||
"""Load and setup checks in executor."""
|
||||
package = f"{__package__}.checks"
|
||||
checks: dict[str, CheckBase] = {}
|
||||
for module in get_valid_modules("checks"):
|
||||
check_module = import_module(f"{package}.{module}")
|
||||
check = check_module.setup(self.coresys)
|
||||
self._checks[check.slug] = check
|
||||
checks[check.slug] = check
|
||||
return checks
|
||||
|
||||
self._checks = await self.sys_run_in_executor(_load)
|
||||
|
||||
def get(self, slug: str) -> CheckBase:
|
||||
"""Return check based on slug."""
|
||||
|
@ -28,20 +28,25 @@ class ResolutionEvaluation(CoreSysAttributes):
|
||||
self.cached_images: set[str] = set()
|
||||
self._evalutions: dict[str, EvaluateBase] = {}
|
||||
|
||||
self._load()
|
||||
|
||||
@property
|
||||
def all_evaluations(self) -> list[EvaluateBase]:
|
||||
"""Return all list of all checks."""
|
||||
return list(self._evalutions.values())
|
||||
|
||||
def _load(self):
|
||||
"""Load all checks."""
|
||||
async def load(self) -> None:
|
||||
"""Load all evaluations."""
|
||||
|
||||
def _load() -> dict[str, EvaluateBase]:
|
||||
"""Load and setup evaluations in executor."""
|
||||
package = f"{__package__}.evaluations"
|
||||
evaluations: dict[str, EvaluateBase] = {}
|
||||
for module in get_valid_modules("evaluations"):
|
||||
check_module = import_module(f"{package}.{module}")
|
||||
check = check_module.setup(self.coresys)
|
||||
self._evalutions[check.slug] = check
|
||||
evaluate_module = import_module(f"{package}.{module}")
|
||||
evaluation = evaluate_module.setup(self.coresys)
|
||||
evaluations[evaluation.slug] = evaluation
|
||||
return evaluations
|
||||
|
||||
self._evalutions = await self.sys_run_in_executor(_load)
|
||||
|
||||
def get(self, slug: str) -> EvaluateBase:
|
||||
"""Return check based on slug."""
|
||||
|
@ -22,15 +22,20 @@ class ResolutionFixup(CoreSysAttributes):
|
||||
self.coresys = coresys
|
||||
self._fixups: dict[str, FixupBase] = {}
|
||||
|
||||
self._load()
|
||||
async def load(self) -> None:
|
||||
"""Load all fixups."""
|
||||
|
||||
def _load(self):
|
||||
"""Load all checks."""
|
||||
def _load() -> dict[str, FixupBase]:
|
||||
"""Load and setup fixups in executor."""
|
||||
package = f"{__package__}.fixups"
|
||||
fixups: dict[str, FixupBase] = {}
|
||||
for module in get_valid_modules("fixups"):
|
||||
fixup_module = import_module(f"{package}.{module}")
|
||||
fixup = fixup_module.setup(self.coresys)
|
||||
self._fixups[fixup.slug] = fixup
|
||||
fixups[fixup.slug] = fixup
|
||||
return fixups
|
||||
|
||||
self._fixups = await self.sys_run_in_executor(_load)
|
||||
|
||||
@property
|
||||
def all_fixes(self) -> list[FixupBase]:
|
||||
|
@ -195,6 +195,10 @@ class ResolutionManager(FileConfiguration, CoreSysAttributes):
|
||||
|
||||
async def load(self):
|
||||
"""Load the resoulution manager."""
|
||||
await self.check.load()
|
||||
await self.fixup.load()
|
||||
await self.evaluate.load()
|
||||
|
||||
# Initial healthcheck when the manager is loaded
|
||||
await self.healthcheck()
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Handle internal services discovery."""
|
||||
|
||||
from typing import Self
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .const import SERVICE_MQTT, SERVICE_MYSQL
|
||||
from .data import ServicesData
|
||||
@ -19,6 +21,11 @@ class ServiceManager(CoreSysAttributes):
|
||||
self.data: ServicesData = ServicesData()
|
||||
self.services_obj: dict[str, ServiceInterface] = {}
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await self.data.read_data()
|
||||
return self
|
||||
|
||||
@property
|
||||
def list_services(self) -> list[ServiceInterface]:
|
||||
"""Return a list of services."""
|
||||
@ -33,6 +40,6 @@ class ServiceManager(CoreSysAttributes):
|
||||
for slug, service in AVAILABLE_SERVICES.items():
|
||||
self.services_obj[slug] = service(self.coresys)
|
||||
|
||||
def reset(self) -> None:
|
||||
async def reset(self) -> None:
|
||||
"""Reset available data."""
|
||||
self.data.reset_data()
|
||||
await self.data.reset_data()
|
||||
|
@ -216,7 +216,7 @@ class Supervisor(CoreSysAttributes):
|
||||
|
||||
self.sys_config.version = version
|
||||
self.sys_config.image = self.sys_updater.image_supervisor
|
||||
self.sys_config.save_data()
|
||||
await self.sys_config.save_data()
|
||||
|
||||
self.sys_create_task(self.sys_core.stop())
|
||||
|
||||
|
@ -310,7 +310,7 @@ class Updater(FileConfiguration, CoreSysAttributes):
|
||||
f"Can't process version data: {err}", _LOGGER.warning
|
||||
) from err
|
||||
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
# Send status update to core
|
||||
for event in events:
|
||||
|
@ -1,9 +1,10 @@
|
||||
"""Common utils."""
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import Any, Self
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
@ -50,15 +51,18 @@ def write_json_or_yaml_file(path: Path, data: dict) -> None:
|
||||
class FileConfiguration:
|
||||
"""Baseclass for classes that uses configuration files, the files can be JSON/YAML."""
|
||||
|
||||
def __init__(self, file_path: Path, schema: vol.Schema):
|
||||
def __init__(self, file_path: Path | None, schema: vol.Schema):
|
||||
"""Initialize hass object."""
|
||||
self._file: Path = file_path
|
||||
self._file: Path | None = file_path
|
||||
self._schema: vol.Schema = schema
|
||||
self._data: dict[str, Any] = _DEFAULT
|
||||
|
||||
self.read_data()
|
||||
async def load_config(self) -> Self:
|
||||
"""Read in config in executor."""
|
||||
await self.read_data()
|
||||
return self
|
||||
|
||||
def reset_data(self) -> None:
|
||||
async def reset_data(self) -> None:
|
||||
"""Reset configuration to default."""
|
||||
try:
|
||||
self._data = self._schema(_DEFAULT)
|
||||
@ -67,15 +71,20 @@ class FileConfiguration:
|
||||
"Can't reset %s: %s", self._file, humanize_error(self._data, ex)
|
||||
)
|
||||
else:
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def read_data(self) -> None:
|
||||
async def read_data(self) -> None:
|
||||
"""Read configuration file."""
|
||||
if not self._file:
|
||||
raise RuntimeError("Path to config file must be set!")
|
||||
|
||||
def _read_data() -> dict[str, Any]:
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_or_yaml_file(self._file)
|
||||
except ConfigurationFileError:
|
||||
self._data = _DEFAULT
|
||||
with suppress(ConfigurationFileError):
|
||||
return read_json_or_yaml_file(self._file)
|
||||
return _DEFAULT
|
||||
|
||||
self._data = await asyncio.get_running_loop().run_in_executor(None, _read_data)
|
||||
|
||||
# Validate
|
||||
try:
|
||||
@ -89,8 +98,11 @@ class FileConfiguration:
|
||||
_LOGGER.warning("Resetting %s to default", self._file)
|
||||
self._data = self._schema(_DEFAULT)
|
||||
|
||||
def save_data(self) -> None:
|
||||
async def save_data(self) -> None:
|
||||
"""Store data to configuration file."""
|
||||
if not self._file:
|
||||
raise RuntimeError("Path to config file must be set!")
|
||||
|
||||
# Validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
@ -100,8 +112,10 @@ class FileConfiguration:
|
||||
# Load last valid data
|
||||
_LOGGER.warning("Resetting %s to last version", self._file)
|
||||
self._data = _DEFAULT
|
||||
self.read_data()
|
||||
await self.read_data()
|
||||
else:
|
||||
# write
|
||||
with suppress(ConfigurationFileError):
|
||||
write_json_or_yaml_file(self._file, self._data)
|
||||
await asyncio.get_running_loop().run_in_executor(
|
||||
None, write_json_or_yaml_file, self._file, self._data
|
||||
)
|
||||
|
@ -246,7 +246,7 @@ async def test_watchdog_during_attach(
|
||||
):
|
||||
"""Test host reboot treated as manual stop but not supervisor restart."""
|
||||
store = coresys.addons.store[TEST_ADDON_SLUG]
|
||||
coresys.addons.data.install(store)
|
||||
await coresys.addons.data.install(store)
|
||||
|
||||
with (
|
||||
patch.object(Addon, "restart") as restart,
|
||||
|
@ -11,7 +11,7 @@ from supervisor.coresys import CoreSys
|
||||
|
||||
async def test_platform_set(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test platform set in docker args."""
|
||||
build = AddonBuild(coresys, install_addon_ssh)
|
||||
build = await AddonBuild(coresys, install_addon_ssh).load_config()
|
||||
with (
|
||||
patch.object(
|
||||
type(coresys.arch), "supported", new=PropertyMock(return_value=["amd64"])
|
||||
@ -27,7 +27,7 @@ async def test_platform_set(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
|
||||
async def test_dockerfile_evaluation(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test platform set in docker args."""
|
||||
build = AddonBuild(coresys, install_addon_ssh)
|
||||
build = await AddonBuild(coresys, install_addon_ssh).load_config()
|
||||
with (
|
||||
patch.object(
|
||||
type(coresys.arch), "supported", new=PropertyMock(return_value=["amd64"])
|
||||
@ -45,7 +45,7 @@ async def test_dockerfile_evaluation(coresys: CoreSys, install_addon_ssh: Addon)
|
||||
|
||||
async def test_dockerfile_evaluation_arch(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test platform set in docker args."""
|
||||
build = AddonBuild(coresys, install_addon_ssh)
|
||||
build = await AddonBuild(coresys, install_addon_ssh).load_config()
|
||||
with (
|
||||
patch.object(
|
||||
type(coresys.arch), "supported", new=PropertyMock(return_value=["aarch64"])
|
||||
@ -65,7 +65,7 @@ async def test_dockerfile_evaluation_arch(coresys: CoreSys, install_addon_ssh: A
|
||||
|
||||
async def test_build_valid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test platform set in docker args."""
|
||||
build = AddonBuild(coresys, install_addon_ssh)
|
||||
build = await AddonBuild(coresys, install_addon_ssh).load_config()
|
||||
with (
|
||||
patch.object(
|
||||
type(coresys.arch), "supported", new=PropertyMock(return_value=["aarch64"])
|
||||
@ -79,7 +79,7 @@ async def test_build_valid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
|
||||
async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
"""Test platform set in docker args."""
|
||||
build = AddonBuild(coresys, install_addon_ssh)
|
||||
build = await AddonBuild(coresys, install_addon_ssh).load_config()
|
||||
with (
|
||||
patch.object(
|
||||
type(coresys.arch), "supported", new=PropertyMock(return_value=["amd64"])
|
||||
|
@ -66,12 +66,12 @@ async def fixture_remove_wait_boot(coresys: CoreSys) -> AsyncGenerator[None]:
|
||||
|
||||
|
||||
@pytest.fixture(name="install_addon_example_image")
|
||||
def fixture_install_addon_example_image(
|
||||
async def fixture_install_addon_example_image(
|
||||
coresys: CoreSys, repository
|
||||
) -> Generator[Addon]:
|
||||
"""Install local_example add-on with image."""
|
||||
store = coresys.addons.store["local_example_image"]
|
||||
coresys.addons.data.install(store)
|
||||
await coresys.addons.data.install(store)
|
||||
# pylint: disable-next=protected-access
|
||||
coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data)
|
||||
|
||||
@ -195,7 +195,7 @@ async def test_addon_uninstall_removes_discovery(
|
||||
"""Test discovery messages removed when addon uninstalled."""
|
||||
assert coresys.discovery.list_messages == []
|
||||
|
||||
message = coresys.discovery.send(
|
||||
message = await coresys.discovery.send(
|
||||
install_addon_ssh, "mqtt", {"host": "localhost", "port": 1883}
|
||||
)
|
||||
assert message.addon == TEST_ADDON_SLUG
|
||||
@ -504,7 +504,7 @@ async def test_shared_image_kept_on_uninstall(
|
||||
store_data = deepcopy(coresys.addons.store["local_example"].data)
|
||||
store = AddonStore(coresys, "local_example2", store_data)
|
||||
coresys.addons.store["local_example2"] = store
|
||||
coresys.addons.data.install(store)
|
||||
await coresys.addons.data.install(store)
|
||||
# pylint: disable-next=protected-access
|
||||
coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data)
|
||||
|
||||
@ -545,7 +545,7 @@ async def test_shared_image_kept_on_update(
|
||||
|
||||
coresys.store.data.addons["local_example2"] = new_store_data
|
||||
coresys.addons.store["local_example2"] = new_store
|
||||
coresys.addons.data.install(curr_store)
|
||||
await coresys.addons.data.install(curr_store)
|
||||
# pylint: disable-next=protected-access
|
||||
coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data)
|
||||
|
||||
|
@ -54,7 +54,7 @@ async def test_api_list_discovery(
|
||||
),
|
||||
patch("supervisor.utils.common.Path.is_file", return_value=True),
|
||||
):
|
||||
coresys.discovery.read_data()
|
||||
await coresys.discovery.read_data()
|
||||
|
||||
await coresys.discovery.load()
|
||||
assert coresys.discovery.list_messages == [
|
||||
|
@ -51,7 +51,7 @@ async def test_do_backup_full(coresys: CoreSys, backup_mock, install_addon_ssh):
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
# backup_mock fixture causes Backup() to be a MagicMock
|
||||
backup_instance: MagicMock = await manager.do_backup_full()
|
||||
@ -84,7 +84,7 @@ async def test_do_backup_full_with_filename(
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
# backup_mock fixture causes Backup() to be a MagicMock
|
||||
await manager.do_backup_full(filename=filename)
|
||||
@ -102,7 +102,7 @@ async def test_do_backup_full_uncompressed(
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
# backup_mock fixture causes Backup() to be a MagicMock
|
||||
backup_instance: MagicMock = await manager.do_backup_full(compressed=False)
|
||||
@ -132,7 +132,7 @@ async def test_do_backup_partial_minimal(
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
# backup_mock fixture causes Backup() to be a MagicMock
|
||||
backup_instance: MagicMock = await manager.do_backup_partial(homeassistant=False)
|
||||
@ -159,7 +159,7 @@ async def test_do_backup_partial_minimal_uncompressed(
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
# backup_mock fixture causes Backup() to be a MagicMock
|
||||
backup_instance: MagicMock = await manager.do_backup_partial(
|
||||
@ -188,7 +188,7 @@ async def test_do_backup_partial_maximal(
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
# backup_mock fixture causes Backup() to be a MagicMock
|
||||
backup_instance: MagicMock = await manager.do_backup_partial(
|
||||
@ -224,7 +224,7 @@ async def test_do_restore_full(coresys: CoreSys, full_backup_mock, install_addon
|
||||
coresys.homeassistant.core.update = AsyncMock(return_value=None)
|
||||
install_addon_ssh.uninstall = AsyncMock(return_value=None)
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
backup_instance = full_backup_mock.return_value
|
||||
backup_instance.sys_addons = coresys.addons
|
||||
@ -255,7 +255,7 @@ async def test_do_restore_full_different_addon(
|
||||
coresys.homeassistant.core.update = AsyncMock(return_value=None)
|
||||
install_addon_ssh.uninstall = AsyncMock(return_value=None)
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
backup_instance = full_backup_mock.return_value
|
||||
backup_instance.addon_list = ["differentslug"]
|
||||
@ -286,7 +286,7 @@ async def test_do_restore_partial_minimal(
|
||||
coresys.homeassistant.core.stop = AsyncMock(return_value=None)
|
||||
coresys.homeassistant.core.update = AsyncMock(return_value=None)
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
backup_instance = partial_backup_mock.return_value
|
||||
assert await manager.do_restore_partial(backup_instance, homeassistant=False)
|
||||
@ -309,7 +309,7 @@ async def test_do_restore_partial_maximal(coresys: CoreSys, partial_backup_mock)
|
||||
coresys.homeassistant.core.stop = AsyncMock(return_value=None)
|
||||
coresys.homeassistant.core.update = AsyncMock(return_value=None)
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
backup_instance = partial_backup_mock.return_value
|
||||
assert await manager.do_restore_partial(
|
||||
@ -337,7 +337,7 @@ async def test_fail_invalid_full_backup(
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
with pytest.raises(BackupInvalidError):
|
||||
await manager.do_restore_full(partial_backup_mock.return_value)
|
||||
@ -369,7 +369,7 @@ async def test_fail_invalid_partial_backup(
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
backup_instance = partial_backup_mock.return_value
|
||||
backup_instance.all_locations[None]["protected"] = True
|
||||
@ -1182,7 +1182,6 @@ async def test_backup_progress(
|
||||
|
||||
|
||||
async def test_restore_progress(
|
||||
request: pytest.FixtureRequest,
|
||||
coresys: CoreSys,
|
||||
install_addon_ssh: Addon,
|
||||
container: MagicMock,
|
||||
@ -1202,7 +1201,14 @@ async def test_restore_progress(
|
||||
ha_ws_client.async_send_command.reset_mock()
|
||||
|
||||
# Install another addon to be uninstalled
|
||||
request.getfixturevalue("install_addon_example")
|
||||
# Duplicate code from install_addon_example fixture
|
||||
# Apparently request.getfixturevalue does not work with async fixtures: https://github.com/pytest-dev/pytest-asyncio/issues/112
|
||||
store = coresys.addons.store["local_example"]
|
||||
await coresys.addons.data.install(store)
|
||||
# pylint: disable-next=protected-access
|
||||
coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data)
|
||||
coresys.addons.local[store.slug] = Addon(coresys, store.slug)
|
||||
|
||||
with (
|
||||
patch("supervisor.addons.addon.asyncio.Event.wait"),
|
||||
patch.object(HomeAssistant, "restore"),
|
||||
@ -1489,7 +1495,7 @@ async def test_restore_only_reloads_ingress_on_change(
|
||||
)
|
||||
|
||||
install_addon_ssh.ingress_panel = True
|
||||
install_addon_ssh.save_persist()
|
||||
await install_addon_ssh.save_persist()
|
||||
backup_with_ingress: Backup = await coresys.backups.do_backup_partial(
|
||||
addons=["local_ssh"]
|
||||
)
|
||||
@ -1812,7 +1818,7 @@ async def test_monitoring_after_full_restore(
|
||||
coresys.homeassistant.core.stop = AsyncMock(return_value=None)
|
||||
coresys.homeassistant.core.update = AsyncMock(return_value=None)
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
backup_instance = full_backup_mock.return_value
|
||||
backup_instance.protected = False
|
||||
@ -1830,7 +1836,7 @@ async def test_monitoring_after_partial_restore(
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||
|
||||
manager = BackupManager(coresys)
|
||||
manager = await BackupManager(coresys).load_config()
|
||||
|
||||
backup_instance = partial_backup_mock.return_value
|
||||
backup_instance.protected = False
|
||||
|
@ -96,13 +96,10 @@ async def docker() -> DockerAPI:
|
||||
"supervisor.docker.manager.DockerAPI.info",
|
||||
return_value=MagicMock(),
|
||||
),
|
||||
patch(
|
||||
"supervisor.docker.manager.DockerConfig",
|
||||
return_value=MagicMock(),
|
||||
),
|
||||
patch("supervisor.docker.manager.DockerAPI.unload"),
|
||||
):
|
||||
docker_obj = DockerAPI(MagicMock())
|
||||
docker_obj.config._data = {"registries": {}}
|
||||
with patch("supervisor.docker.monitor.DockerMonitor.load"):
|
||||
await docker_obj.load()
|
||||
|
||||
@ -110,8 +107,6 @@ async def docker() -> DockerAPI:
|
||||
docker_obj.info.storage = "overlay2"
|
||||
docker_obj.info.version = "1.0.0"
|
||||
|
||||
docker_obj.config.registries = {}
|
||||
|
||||
yield docker_obj
|
||||
|
||||
|
||||
@ -323,15 +318,18 @@ async def coresys(
|
||||
coresys_obj = await initialize_coresys()
|
||||
|
||||
# Mock save json
|
||||
coresys_obj._ingress.save_data = MagicMock()
|
||||
coresys_obj._auth.save_data = MagicMock()
|
||||
coresys_obj._updater.save_data = MagicMock()
|
||||
coresys_obj._config.save_data = MagicMock()
|
||||
coresys_obj._jobs.save_data = MagicMock()
|
||||
coresys_obj._resolution.save_data = MagicMock()
|
||||
coresys_obj._addons.data.save_data = MagicMock()
|
||||
coresys_obj._store.save_data = MagicMock()
|
||||
coresys_obj._mounts.save_data = MagicMock()
|
||||
coresys_obj._ingress.save_data = AsyncMock()
|
||||
coresys_obj._auth.save_data = AsyncMock()
|
||||
coresys_obj._updater.save_data = AsyncMock()
|
||||
coresys_obj._config.save_data = AsyncMock()
|
||||
coresys_obj._jobs.save_data = AsyncMock()
|
||||
coresys_obj._resolution.save_data = AsyncMock()
|
||||
coresys_obj._addons.data.save_data = AsyncMock()
|
||||
coresys_obj._store.save_data = AsyncMock()
|
||||
coresys_obj._mounts.save_data = AsyncMock()
|
||||
|
||||
# Load resolution center
|
||||
await coresys_obj.resolution.load()
|
||||
|
||||
# Mock test client
|
||||
coresys_obj._supervisor.instance._meta = {
|
||||
@ -549,10 +547,10 @@ async def repository(coresys: CoreSys):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def install_addon_ssh(coresys: CoreSys, repository):
|
||||
async def install_addon_ssh(coresys: CoreSys, repository):
|
||||
"""Install local_ssh add-on."""
|
||||
store = coresys.addons.store[TEST_ADDON_SLUG]
|
||||
coresys.addons.data.install(store)
|
||||
await coresys.addons.data.install(store)
|
||||
coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data)
|
||||
|
||||
addon = Addon(coresys, store.slug)
|
||||
@ -561,10 +559,10 @@ def install_addon_ssh(coresys: CoreSys, repository):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def install_addon_example(coresys: CoreSys, repository):
|
||||
async def install_addon_example(coresys: CoreSys, repository):
|
||||
"""Install local_example add-on."""
|
||||
store = coresys.addons.store["local_example"]
|
||||
coresys.addons.data.install(store)
|
||||
await coresys.addons.data.install(store)
|
||||
coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data)
|
||||
|
||||
addon = Addon(coresys, store.slug)
|
||||
|
@ -21,7 +21,7 @@ async def fixture_green_service(dbus_session_bus: MessageBus) -> GreenService:
|
||||
|
||||
async def test_dbus_green(green_service: GreenService, dbus_session_bus: MessageBus):
|
||||
"""Test Green board load."""
|
||||
green = Green()
|
||||
green = await Green().load_config()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
assert green.name == "Green"
|
||||
@ -36,7 +36,7 @@ async def test_dbus_green(green_service: GreenService, dbus_session_bus: Message
|
||||
return_value={"activity_led": False, "user_led": False},
|
||||
),
|
||||
):
|
||||
green = Green()
|
||||
green = await Green().load_config()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
assert green.activity_led is False
|
||||
@ -47,7 +47,7 @@ async def test_dbus_green_set_activity_led(
|
||||
green_service: GreenService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test setting activity led for Green board."""
|
||||
green = Green()
|
||||
green = await Green().load_config()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
await green.set_activity_led(False)
|
||||
@ -59,7 +59,7 @@ async def test_dbus_green_set_power_led(
|
||||
green_service: GreenService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test setting power led for Green board."""
|
||||
green = Green()
|
||||
green = await Green().load_config()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
await green.set_power_led(False)
|
||||
@ -71,7 +71,7 @@ async def test_dbus_green_set_user_led(
|
||||
green_service: GreenService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test setting user led for Green board."""
|
||||
green = Green()
|
||||
green = await Green().load_config()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
await green.set_user_led(False)
|
||||
|
@ -21,7 +21,7 @@ async def fixture_yellow_service(dbus_session_bus: MessageBus) -> YellowService:
|
||||
|
||||
async def test_dbus_yellow(yellow_service: YellowService, dbus_session_bus: MessageBus):
|
||||
"""Test Yellow board load."""
|
||||
yellow = Yellow()
|
||||
yellow = await Yellow().load_config()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
assert yellow.name == "Yellow"
|
||||
@ -36,7 +36,7 @@ async def test_dbus_yellow(yellow_service: YellowService, dbus_session_bus: Mess
|
||||
return_value={"disk_led": False, "heartbeat_led": False},
|
||||
),
|
||||
):
|
||||
yellow = Yellow()
|
||||
yellow = await Yellow().load_config()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
assert yellow.disk_led is False
|
||||
@ -47,7 +47,7 @@ async def test_dbus_yellow_set_disk_led(
|
||||
yellow_service: YellowService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test setting disk led for Yellow board."""
|
||||
yellow = Yellow()
|
||||
yellow = await Yellow().load_config()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
await yellow.set_disk_led(False)
|
||||
@ -59,7 +59,7 @@ async def test_dbus_yellow_set_heartbeat_led(
|
||||
yellow_service: YellowService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test setting heartbeat led for Yellow board."""
|
||||
yellow = Yellow()
|
||||
yellow = await Yellow().load_config()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
await yellow.set_heartbeat_led(False)
|
||||
@ -71,7 +71,7 @@ async def test_dbus_yellow_set_power_led(
|
||||
yellow_service: YellowService, dbus_session_bus: MessageBus
|
||||
):
|
||||
"""Test setting power led for Yellow board."""
|
||||
yellow = Yellow()
|
||||
yellow = await Yellow().load_config()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
await yellow.set_power_led(False)
|
||||
|
@ -8,7 +8,7 @@ from supervisor.docker.interface import DOCKER_HUB, DockerInterface
|
||||
def test_no_credentials(coresys: CoreSys):
|
||||
"""Test no credentials."""
|
||||
docker = DockerInterface(coresys)
|
||||
coresys.docker.config.registries = {
|
||||
coresys.docker.config._data["registries"] = {
|
||||
DOCKER_HUB: {"username": "Spongebob Squarepants", "password": "Password1!"}
|
||||
}
|
||||
assert not docker._get_credentials("ghcr.io/homeassistant")
|
||||
@ -18,7 +18,7 @@ def test_no_credentials(coresys: CoreSys):
|
||||
def test_no_matching_credentials(coresys: CoreSys):
|
||||
"""Test no matching credentials."""
|
||||
docker = DockerInterface(coresys)
|
||||
coresys.docker.config.registries = {
|
||||
coresys.docker.config._data["registries"] = {
|
||||
DOCKER_HUB: {"username": "Spongebob Squarepants", "password": "Password1!"}
|
||||
}
|
||||
assert not docker._get_credentials("ghcr.io/homeassistant")
|
||||
@ -28,7 +28,7 @@ def test_no_matching_credentials(coresys: CoreSys):
|
||||
def test_matching_credentials(coresys: CoreSys):
|
||||
"""Test no matching credentials."""
|
||||
docker = DockerInterface(coresys)
|
||||
coresys.docker.config.registries = {
|
||||
coresys.docker.config._data["registries"] = {
|
||||
"ghcr.io": {"username": "Octocat", "password": "Password1!"},
|
||||
DOCKER_HUB: {"username": "Spongebob Squarepants", "password": "Password1!"},
|
||||
}
|
||||
|
@ -468,7 +468,7 @@ async def test_save_data(
|
||||
):
|
||||
"""Test saving mount config data."""
|
||||
# Replace mount manager with one that doesn't have save_data mocked
|
||||
coresys._mounts = MountManager(coresys) # pylint: disable=protected-access
|
||||
coresys._mounts = await MountManager(coresys).load_config() # pylint: disable=protected-access
|
||||
|
||||
path = tmp_supervisor_data / "mounts.json"
|
||||
assert not path.exists()
|
||||
@ -488,7 +488,7 @@ async def test_save_data(
|
||||
},
|
||||
)
|
||||
)
|
||||
coresys.mounts.save_data()
|
||||
await coresys.mounts.save_data()
|
||||
|
||||
assert path.exists()
|
||||
with path.open() as file:
|
||||
|
@ -111,8 +111,8 @@ async def test_get_checks(coresys: CoreSys):
|
||||
assert coresys.resolution.check.get("free_space")
|
||||
|
||||
|
||||
def test_dynamic_check_loader(coresys: CoreSys):
|
||||
async def test_dynamic_check_loader(coresys: CoreSys):
|
||||
"""Test dynamic check loader, this ensures that all checks have defined a setup function."""
|
||||
coresys.resolution.check._load()
|
||||
await coresys.resolution.check.load()
|
||||
for check in get_valid_modules("checks"):
|
||||
assert check in coresys.resolution.check._checks
|
||||
|
@ -212,7 +212,7 @@ async def test_remove_used_repository(
|
||||
use_update: bool,
|
||||
):
|
||||
"""Test removing used custom repository."""
|
||||
coresys.addons.data.install(store_addon)
|
||||
await coresys.addons.data.install(store_addon)
|
||||
addon = Addon(coresys, store_addon.slug)
|
||||
coresys.addons.local[addon.slug] = addon
|
||||
|
||||
|
@ -9,8 +9,6 @@ import pytest
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.arch import CpuArch
|
||||
from supervisor.backups.manager import BackupManager
|
||||
from supervisor.bootstrap import migrate_system_env
|
||||
from supervisor.const import ATTR_ADDONS_CUSTOM_LIST
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.exceptions import AddonsNotSupportedError, StoreJobError
|
||||
from supervisor.homeassistant.module import HomeAssistant
|
||||
@ -24,7 +22,7 @@ from tests.common import load_yaml_fixture
|
||||
|
||||
async def test_default_load(coresys: CoreSys):
|
||||
"""Test default load from config."""
|
||||
store_manager = StoreManager(coresys)
|
||||
store_manager = await StoreManager(coresys).load_config()
|
||||
refresh_cache_calls: set[str] = set()
|
||||
|
||||
async def mock_refresh_cache(obj: AddonStore):
|
||||
@ -77,7 +75,7 @@ async def test_load_with_custom_repository(coresys: CoreSys):
|
||||
),
|
||||
patch("pathlib.Path.is_file", return_value=True),
|
||||
):
|
||||
store_manager = StoreManager(coresys)
|
||||
store_manager = await StoreManager(coresys).load_config()
|
||||
|
||||
with (
|
||||
patch("supervisor.store.repository.Repository.load", return_value=None),
|
||||
@ -107,45 +105,6 @@ async def test_load_with_custom_repository(coresys: CoreSys):
|
||||
assert "http://example.com" in store_manager.repository_urls
|
||||
|
||||
|
||||
async def test_load_from_core_config(coresys: CoreSys):
|
||||
"""Test custom repositories loaded from core config when present."""
|
||||
# pylint: disable=protected-access
|
||||
coresys.config._data[ATTR_ADDONS_CUSTOM_LIST] = ["http://example.com"]
|
||||
assert coresys.config.addons_repositories == ["http://example.com"]
|
||||
|
||||
migrate_system_env(coresys)
|
||||
|
||||
with (
|
||||
patch("supervisor.store.repository.Repository.load", return_value=None),
|
||||
patch("supervisor.store.repository.Repository.validate", return_value=True),
|
||||
patch("pathlib.Path.exists", return_value=True),
|
||||
):
|
||||
await coresys.store.load()
|
||||
|
||||
assert len(coresys.store.all) == 6
|
||||
assert isinstance(coresys.store.get("core"), Repository)
|
||||
assert isinstance(coresys.store.get("local"), Repository)
|
||||
|
||||
assert len(coresys.store.repository_urls) == 4
|
||||
assert (
|
||||
"https://github.com/hassio-addons/repository" in coresys.store.repository_urls
|
||||
)
|
||||
assert (
|
||||
"https://github.com/esphome/home-assistant-addon"
|
||||
in coresys.store.repository_urls
|
||||
)
|
||||
assert (
|
||||
"https://github.com/music-assistant/home-assistant-addon"
|
||||
in coresys.store.repository_urls
|
||||
)
|
||||
assert "http://example.com" in coresys.store.repository_urls
|
||||
|
||||
assert coresys.config.addons_repositories == []
|
||||
|
||||
coresys.config.save_data.assert_called_once()
|
||||
coresys.store.save_data.assert_called_once()
|
||||
|
||||
|
||||
async def test_reload_fails_if_out_of_date(coresys: CoreSys):
|
||||
"""Test reload fails when supervisor not updated."""
|
||||
with (
|
||||
|
@ -60,7 +60,7 @@ async def test_auth_request_without_backend_cache(
|
||||
mock_auth_backend.return_value = True
|
||||
mock_api_state.return_value = False
|
||||
|
||||
coresys.auth._update_cache("username", "password")
|
||||
await coresys.auth._update_cache("username", "password")
|
||||
|
||||
assert await coresys.auth.check_login(addon, "username", "password")
|
||||
assert not mock_auth_backend.called
|
||||
@ -76,12 +76,12 @@ async def test_auth_request_with_backend_cache_update(
|
||||
mock_auth_backend.return_value = False
|
||||
mock_api_state.return_value = True
|
||||
|
||||
coresys.auth._update_cache("username", "password")
|
||||
await coresys.auth._update_cache("username", "password")
|
||||
|
||||
assert await coresys.auth.check_login(addon, "username", "password")
|
||||
|
||||
await asyncio.sleep(0)
|
||||
|
||||
assert mock_auth_backend.called
|
||||
coresys.auth._dismatch_cache("username", "password")
|
||||
await coresys.auth._dismatch_cache("username", "password")
|
||||
assert not await coresys.auth.check_login(addon, "username", "password")
|
||||
|
@ -74,11 +74,11 @@ async def test_ingress_save_data(coresys: CoreSys, tmp_supervisor_data: Path):
|
||||
"""Test saving ingress data to file."""
|
||||
config_file = tmp_supervisor_data / "ingress.json"
|
||||
with patch("supervisor.ingress.FILE_HASSIO_INGRESS", new=config_file):
|
||||
ingress = Ingress(coresys)
|
||||
ingress = await Ingress(coresys).load_config()
|
||||
session = ingress.create_session(
|
||||
IngressSessionData(IngressSessionDataUser("123", "Test", "test"))
|
||||
)
|
||||
ingress.save_data()
|
||||
await ingress.save_data()
|
||||
|
||||
assert config_file.exists()
|
||||
data = read_json_file(config_file)
|
||||
|
Loading…
x
Reference in New Issue
Block a user