Merge pull request #1758 from home-assistant/dev

Release 255
This commit is contained in:
Pascal Vizeli 2020-05-28 14:37:30 +02:00 committed by GitHub
commit 3889504292
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 169 additions and 116 deletions

View File

@ -10,7 +10,7 @@ gitpython==3.1.2
jinja2==2.11.2
packaging==20.4
ptvsd==4.3.2
pulsectl==20.5.0
pulsectl==20.5.1
pytz==2020.1
pyudev==0.22.0
ruamel.yaml==0.15.100

View File

@ -1,4 +1,4 @@
flake8==3.8.1
flake8==3.8.2
pylint==2.5.2
pytest==5.4.2
pytest-timeout==1.3.4

View File

@ -37,7 +37,7 @@ class AddonManager(CoreSysAttributes):
@property
def all(self) -> List[AnyAddon]:
"""Return a list of all add-ons."""
addons = {**self.store, **self.local}
addons: Dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
@ -142,7 +142,7 @@ class AddonManager(CoreSysAttributes):
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
addon = self.local.get(slug)
addon = self.local[slug]
try:
await addon.instance.remove()
@ -191,12 +191,12 @@ class AddonManager(CoreSysAttributes):
if slug not in self.local:
_LOGGER.error("Add-on %s is not installed", slug)
raise AddonsError()
addon = self.local.get(slug)
addon = self.local[slug]
if addon.is_detached:
_LOGGER.error("Add-on %s is not available inside store", slug)
raise AddonsError()
store = self.store.get(slug)
store = self.store[slug]
if addon.version == store.version:
_LOGGER.warning("No update available for add-on %s", slug)
@ -233,12 +233,12 @@ class AddonManager(CoreSysAttributes):
if slug not in self.local:
_LOGGER.error("Add-on %s is not installed", slug)
raise AddonsError()
addon = self.local.get(slug)
addon = self.local[slug]
if addon.is_detached:
_LOGGER.error("Add-on %s is not available inside store", slug)
raise AddonsError()
store = self.store.get(slug)
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:

View File

@ -215,7 +215,8 @@ class AddonModel(CoreSysAttributes):
services = {}
for data in services_list:
service = RE_SERVICE.match(data)
services[service.group("service")] = service.group("rights")
if service:
services[service.group("service")] = service.group("rights")
return services
@ -464,6 +465,8 @@ class AddonModel(CoreSysAttributes):
volumes = {}
for volume in self.data[ATTR_MAP]:
result = RE_VOLUME.match(volume)
if not result:
continue
volumes[result.group(1)] = result.group(2) or "ro"
return volumes

View File

@ -2,7 +2,7 @@
import logging
import re
import secrets
from typing import Any, Dict, List
from typing import Any, Dict, List, Union
import uuid
import voluptuous as vol
@ -385,6 +385,9 @@ def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
# parse extend data from type
match = RE_SCHEMA_ELEMENT.match(typ)
if not match:
raise vol.Invalid(f"Unknown type {typ}")
# prepare range
range_args = {}
for group_name in _SCHEMA_LENGTH_PARTS:
@ -462,7 +465,7 @@ def _check_missing_options(origin, exists, root):
def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
"""Generate UI schema."""
ui_schema = []
ui_schema: List[Dict[str, Any]] = []
# read options
for key, value in raw_schema.items():
@ -483,7 +486,7 @@ def _single_ui_option(
ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False
) -> None:
"""Validate a single element."""
ui_node = {"name": key}
ui_node: Dict[str, Union[str, bool, float, List[str]]] = {"name": key}
# If multiple
if multiple:
@ -491,6 +494,8 @@ def _single_ui_option(
# Parse extend data from type
match = RE_SCHEMA_ELEMENT.match(value)
if not match:
return
# Prepare range
for group_name in _SCHEMA_LENGTH_PARTS:

View File

@ -33,7 +33,7 @@ from ..const import (
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import docker_image, network_port, complex_version
from ..validate import complex_version, docker_image, network_port
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@ -43,7 +43,7 @@ from ..const import (
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..utils.validate import validate_timezone
from ..validate import repositories, wait_boot, simple_version
from ..validate import repositories, simple_version, wait_boot
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@ -27,15 +27,16 @@ from .discovery import Discovery
from .hassos import HassOS
from .homeassistant import HomeAssistant
from .host import HostManager
from .hwmon import HwMonitor
from .ingress import Ingress
from .misc.hwmon import HwMonitor
from .misc.scheduler import Scheduler
from .misc.secrets import SecretsManager
from .misc.tasks import Tasks
from .plugins import PluginManager
from .secrets import SecretsManager
from .services import ServiceManager
from .snapshots import SnapshotManager
from .store import StoreManager
from .supervisor import Supervisor
from .tasks import Tasks
from .updater import Updater
from .utils.dt import fetch_timezone
@ -45,7 +46,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
MACHINE_ID = Path("/etc/machine-id")
async def initialize_coresys():
async def initialize_coresys() -> None:
"""Initialize supervisor coresys/objects."""
coresys = CoreSys()
@ -70,6 +71,7 @@ async def initialize_coresys():
coresys.dbus = DBusManager(coresys)
coresys.hassos = HassOS(coresys)
coresys.secrets = SecretsManager(coresys)
coresys.scheduler = Scheduler(coresys)
# bootstrap config
initialize_system_data(coresys)
@ -92,7 +94,7 @@ async def initialize_coresys():
return coresys
def initialize_system_data(coresys: CoreSys):
def initialize_system_data(coresys: CoreSys) -> None:
"""Set up the default configuration and create folders."""
config = coresys.config
@ -168,7 +170,7 @@ def initialize_system_data(coresys: CoreSys):
coresys.config.debug = True
def migrate_system_env(coresys: CoreSys):
def migrate_system_env(coresys: CoreSys) -> None:
"""Cleanup some stuff after update."""
config = coresys.config
@ -181,7 +183,7 @@ def migrate_system_env(coresys: CoreSys):
_LOGGER.warning("Can't cleanup old Add-on build directory")
def initialize_logging():
def initialize_logging() -> None:
"""Initialize the logging."""
logging.basicConfig(level=logging.INFO)
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
@ -237,7 +239,7 @@ def check_environment() -> None:
_LOGGER.critical("Can't find gdbus!")
def reg_signal(loop):
def reg_signal(loop) -> None:
"""Register SIGTERM and SIGKILL to stop system."""
try:
loop.add_signal_handler(signal.SIGTERM, lambda: loop.call_soon(loop.stop))

View File

@ -3,7 +3,7 @@ from datetime import datetime
import logging
import os
from pathlib import Path, PurePath
from typing import Optional
from typing import List, Optional
from .const import (
ATTR_ADDONS_CUSTOM_LIST,
@ -52,12 +52,12 @@ class CoreConfig(JsonConfig):
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_SUPERVISOR_CONFIG)
@property
def timezone(self):
def timezone(self) -> str:
"""Return system timezone."""
return self._data[ATTR_TIMEZONE]
@timezone.setter
def timezone(self, value):
def timezone(self, value: str) -> None:
"""Set system timezone."""
self._data[ATTR_TIMEZONE] = value
@ -67,7 +67,7 @@ class CoreConfig(JsonConfig):
return self._data.get(ATTR_VERSION)
@version.setter
def version(self, value: str):
def version(self, value: str) -> None:
"""Set config version."""
self._data[ATTR_VERSION] = value
@ -77,7 +77,7 @@ class CoreConfig(JsonConfig):
return self._data[ATTR_WAIT_BOOT]
@wait_boot.setter
def wait_boot(self, value: int):
def wait_boot(self, value: int) -> None:
"""Set wait boot time."""
self._data[ATTR_WAIT_BOOT] = value
@ -87,7 +87,7 @@ class CoreConfig(JsonConfig):
return self._data[ATTR_DEBUG]
@debug.setter
def debug(self, value: bool):
def debug(self, value: bool) -> None:
"""Set debug mode."""
self._data[ATTR_DEBUG] = value
@ -97,7 +97,7 @@ class CoreConfig(JsonConfig):
return self._data[ATTR_DEBUG_BLOCK]
@debug_block.setter
def debug_block(self, value: bool):
def debug_block(self, value: bool) -> None:
"""Set debug wait mode."""
self._data[ATTR_DEBUG_BLOCK] = value
@ -107,7 +107,7 @@ class CoreConfig(JsonConfig):
return self._data[ATTR_LOGGING]
@logging.setter
def logging(self, value: LogLevel):
def logging(self, value: LogLevel) -> None:
"""Set system log level."""
self._data[ATTR_LOGGING] = value
self.modify_log_level()
@ -118,7 +118,7 @@ class CoreConfig(JsonConfig):
logging.getLogger("supervisor").setLevel(lvl)
@property
def last_boot(self):
def last_boot(self) -> datetime:
"""Return last boot datetime."""
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
@ -128,138 +128,138 @@ class CoreConfig(JsonConfig):
return boot_time
@last_boot.setter
def last_boot(self, value):
def last_boot(self, value: datetime) -> None:
"""Set last boot datetime."""
self._data[ATTR_LAST_BOOT] = value.isoformat()
@property
def path_supervisor(self):
def path_supervisor(self) -> Path:
"""Return Supervisor data path."""
return SUPERVISOR_DATA
@property
def path_extern_supervisor(self):
def path_extern_supervisor(self) -> PurePath:
"""Return Supervisor data path external for Docker."""
return PurePath(os.environ[ENV_SUPERVISOR_SHARE])
@property
def path_extern_homeassistant(self):
def path_extern_homeassistant(self) -> str:
"""Return config path external for Docker."""
return str(PurePath(self.path_extern_supervisor, HOMEASSISTANT_CONFIG))
@property
def path_homeassistant(self):
def path_homeassistant(self) -> Path:
"""Return config path inside supervisor."""
return Path(SUPERVISOR_DATA, HOMEASSISTANT_CONFIG)
@property
def path_extern_ssl(self):
def path_extern_ssl(self) -> str:
"""Return SSL path external for Docker."""
return str(PurePath(self.path_extern_supervisor, HASSIO_SSL))
@property
def path_ssl(self):
def path_ssl(self) -> Path:
"""Return SSL path inside supervisor."""
return Path(SUPERVISOR_DATA, HASSIO_SSL)
@property
def path_addons_core(self):
def path_addons_core(self) -> Path:
"""Return git path for core Add-ons."""
return Path(SUPERVISOR_DATA, ADDONS_CORE)
@property
def path_addons_git(self):
def path_addons_git(self) -> Path:
"""Return path for Git Add-on."""
return Path(SUPERVISOR_DATA, ADDONS_GIT)
@property
def path_addons_local(self):
def path_addons_local(self) -> Path:
"""Return path for custom Add-ons."""
return Path(SUPERVISOR_DATA, ADDONS_LOCAL)
@property
def path_extern_addons_local(self):
def path_extern_addons_local(self) -> PurePath:
"""Return path for custom Add-ons."""
return PurePath(self.path_extern_supervisor, ADDONS_LOCAL)
@property
def path_addons_data(self):
def path_addons_data(self) -> Path:
"""Return root Add-on data folder."""
return Path(SUPERVISOR_DATA, ADDONS_DATA)
@property
def path_extern_addons_data(self):
def path_extern_addons_data(self) -> PurePath:
"""Return root add-on data folder external for Docker."""
return PurePath(self.path_extern_supervisor, ADDONS_DATA)
@property
def path_audio(self):
def path_audio(self) -> Path:
"""Return root audio data folder."""
return Path(SUPERVISOR_DATA, AUDIO_DATA)
@property
def path_extern_audio(self):
def path_extern_audio(self) -> PurePath:
"""Return root audio data folder external for Docker."""
return PurePath(self.path_extern_supervisor, AUDIO_DATA)
@property
def path_tmp(self):
def path_tmp(self) -> Path:
"""Return Supervisor temp folder."""
return Path(SUPERVISOR_DATA, TMP_DATA)
@property
def path_extern_tmp(self):
def path_extern_tmp(self) -> PurePath:
"""Return Supervisor temp folder for Docker."""
return PurePath(self.path_extern_supervisor, TMP_DATA)
@property
def path_backup(self):
def path_backup(self) -> Path:
"""Return root backup data folder."""
return Path(SUPERVISOR_DATA, BACKUP_DATA)
@property
def path_extern_backup(self):
def path_extern_backup(self) -> PurePath:
"""Return root backup data folder external for Docker."""
return PurePath(self.path_extern_supervisor, BACKUP_DATA)
@property
def path_share(self):
def path_share(self) -> Path:
"""Return root share data folder."""
return Path(SUPERVISOR_DATA, SHARE_DATA)
@property
def path_apparmor(self):
def path_apparmor(self) -> Path:
"""Return root Apparmor profile folder."""
return Path(SUPERVISOR_DATA, APPARMOR_DATA)
@property
def path_extern_share(self):
def path_extern_share(self) -> PurePath:
"""Return root share data folder external for Docker."""
return PurePath(self.path_extern_supervisor, SHARE_DATA)
@property
def path_extern_dns(self):
def path_extern_dns(self) -> str:
"""Return dns path external for Docker."""
return str(PurePath(self.path_extern_supervisor, DNS_DATA))
@property
def path_dns(self):
def path_dns(self) -> Path:
"""Return dns path inside supervisor."""
return Path(SUPERVISOR_DATA, DNS_DATA)
@property
def addons_repositories(self):
def addons_repositories(self) -> List[str]:
"""Return list of custom Add-on repositories."""
return self._data[ATTR_ADDONS_CUSTOM_LIST]
def add_addon_repository(self, repo):
def add_addon_repository(self, repo: str) -> None:
"""Add a custom repository to list."""
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
return
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
def drop_addon_repository(self, repo):
def drop_addon_repository(self, repo: str) -> None:
"""Remove a custom repository from list."""
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
return

View File

@ -3,7 +3,7 @@ from enum import Enum
from ipaddress import ip_network
from pathlib import Path
SUPERVISOR_VERSION = "224"
SUPERVISOR_VERSION = "225"
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
@ -362,6 +362,7 @@ class CoreStates(str, Enum):
STARTUP = "startup"
RUNNING = "running"
FREEZE = "freeze"
STOPPING = "stopping"
class LogLevel(str, Enum):

View File

@ -48,6 +48,12 @@ class Core(CoreSysAttributes):
"Docker version %s is not supported by Supervisor!",
self.sys_docker.info.version,
)
elif self.sys_docker.info.inside_lxc:
self.healthy = False
_LOGGER.critical(
"Detected Docker running inside LXC. Running Home Assistant with the Supervisor on LXC is not supported!"
)
self.sys_docker.info.check_requirements()
# Check if system is healthy
@ -181,7 +187,7 @@ class Core(CoreSysAttributes):
async def stop(self):
"""Stop a running orchestration."""
# don't process scheduler anymore
self.sys_scheduler.suspend = True
self.state = CoreStates.STOPPING
# store new last boot / prevent time adjustments
if self.state == CoreStates.RUNNING:
@ -207,12 +213,17 @@ class Core(CoreSysAttributes):
async def shutdown(self):
"""Shutdown all running containers in correct order."""
# don't process scheduler anymore
self.state = CoreStates.STOPPING
# Shutdown Application Add-ons, using Home Assistant API
await self.sys_addons.shutdown(STARTUP_APPLICATION)
# Close Home Assistant
with suppress(HassioError):
await self.sys_homeassistant.stop()
# Shutdown System Add-ons
await self.sys_addons.shutdown(STARTUP_SERVICES)
await self.sys_addons.shutdown(STARTUP_SYSTEM)
await self.sys_addons.shutdown(STARTUP_INITIALIZE)

View File

@ -10,7 +10,6 @@ from .config import CoreConfig
from .const import UpdateChannels
from .docker import DockerAPI
from .misc.hardware import Hardware
from .misc.scheduler import Scheduler
if TYPE_CHECKING:
from .addons import AddonManager
@ -21,16 +20,17 @@ if TYPE_CHECKING:
from .dbus import DBusManager
from .discovery import Discovery
from .hassos import HassOS
from .hwmon import HwMonitor
from .misc.scheduler import Scheduler
from .misc.hwmon import HwMonitor
from .misc.secrets import SecretsManager
from .misc.tasks import Tasks
from .homeassistant import HomeAssistant
from .host import HostManager
from .ingress import Ingress
from .secrets import SecretsManager
from .services import ServiceManager
from .snapshots import SnapshotManager
from .supervisor import Supervisor
from .store import StoreManager
from .tasks import Tasks
from .updater import Updater
from .plugins import PluginManager
@ -58,7 +58,6 @@ class CoreSys:
self._config: CoreConfig = CoreConfig()
self._hardware: Hardware = Hardware()
self._docker: DockerAPI = DockerAPI()
self._scheduler: Scheduler = Scheduler()
# Internal objects pointers
self._core: Optional[Core] = None
@ -77,6 +76,7 @@ class CoreSys:
self._hassos: Optional[HassOS] = None
self._services: Optional[ServiceManager] = None
self._secrets: Optional[SecretsManager] = None
self._scheduler: Optional[Scheduler] = None
self._store: Optional[StoreManager] = None
self._discovery: Optional[Discovery] = None
self._hwmonitor: Optional[HwMonitor] = None
@ -127,8 +127,17 @@ class CoreSys:
@property
def scheduler(self) -> Scheduler:
"""Return Scheduler object."""
if self._scheduler is None:
raise RuntimeError("Scheduler not set!")
return self._scheduler
@scheduler.setter
def scheduler(self, value: Scheduler) -> None:
"""Set a Scheduler object."""
if self._scheduler:
raise RuntimeError("Scheduler already set!")
self._scheduler = value
@property
def core(self) -> Core:
"""Return core object."""

View File

@ -2,6 +2,7 @@
from contextlib import suppress
from ipaddress import IPv4Address
import logging
from pathlib import Path
from typing import Any, Dict, Optional
import attr
@ -46,6 +47,11 @@ class DockerInfo:
return version_local >= version_min
@property
def inside_lxc(self) -> bool:
"""Return True if the docker run inside lxc."""
return Path("/dev/lxd/sock").exists()
def check_requirements(self) -> None:
"""Show wrong configurations."""
if self.storage != "overlay2":
@ -95,7 +101,7 @@ class DockerAPI:
version: str = "latest",
dns: bool = True,
ipv4: Optional[IPv4Address] = None,
**kwargs: Dict[str, Any],
**kwargs: Any,
) -> docker.models.containers.Container:
"""Create a Docker container and run it.
@ -153,7 +159,7 @@ class DockerAPI:
image: str,
version: str = "latest",
command: Optional[str] = None,
**kwargs: Dict[str, Any],
**kwargs: Any,
) -> CommandReturn:
"""Create a temporary container and run command.

View File

@ -26,7 +26,7 @@ class DockerInterface(CoreSysAttributes):
self.lock: asyncio.Lock = asyncio.Lock()
@property
def timeout(self) -> str:
def timeout(self) -> int:
"""Return timeout for Docker actions."""
return 30

View File

@ -91,10 +91,10 @@ class DockerNetwork:
Need run inside executor.
"""
ipv4 = str(ipv4) if ipv4 else None
ipv4_address = str(ipv4) if ipv4 else None
try:
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
self.network.connect(container, aliases=alias, ipv4_address=ipv4_address)
except docker.errors.APIError as err:
_LOGGER.error("Can't link container to hassio-net: %s", err)
raise DockerAPIError() from None

View File

@ -56,7 +56,7 @@ class HassOS(CoreSysAttributes):
_LOGGER.error("No HassOS available")
raise HassOSNotSupportedError()
async def _download_raucb(self, version: str) -> None:
async def _download_raucb(self, version: str) -> Path:
"""Download rauc bundle (OTA) from github."""
url = URL_HASSOS_OTA.format(version=version, board=self.board)
raucb = Path(self.sys_config.path_tmp, f"hassos-{version}.raucb")
@ -158,7 +158,7 @@ class HassOS(CoreSysAttributes):
_LOGGER.error("HassOS update fails with: %s", self.sys_dbus.rauc.last_error)
raise HassOSUpdateError()
async def mark_healthy(self):
async def mark_healthy(self) -> None:
"""Set booted partition as good for rauc."""
try:
response = await self.sys_dbus.rauc.mark(RaucState.GOOD, "booted")

View File

@ -253,9 +253,9 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
while True:
if not self.sys_updater.image_homeassistant:
_LOGGER.warning(
"Found no information about Home Assistant. Retry in 15sec"
"Found no information about Home Assistant. Retry in 30sec"
)
await asyncio.sleep(15)
await asyncio.sleep(30)
await self.sys_updater.reload()
continue

View File

@ -72,7 +72,13 @@ class Ingress(JsonConfig, CoreSysAttributes):
sessions = {}
for session, valid in self.sessions.items():
valid_dt = utc_from_timestamp(valid)
# check if timestamp valid, to avoid crash on malformed timestamp
try:
valid_dt = utc_from_timestamp(valid)
except OverflowError:
_LOGGER.warning("Session timestamp %f is invalid!", valid_dt)
continue
if valid_dt < now:
continue
@ -103,7 +109,13 @@ class Ingress(JsonConfig, CoreSysAttributes):
"""Return True if session valid and make it longer valid."""
if session not in self.sessions:
return False
valid_until = utc_from_timestamp(self.sessions[session])
# check if timestamp valid, to avoid crash on malformed timestamp
try:
valid_until = utc_from_timestamp(self.sessions[session])
except OverflowError:
_LOGGER.warning("Session timestamp %f is invalid!", valid_until)
return False
# Is still valid?
if valid_until < utcnow():

View File

@ -6,8 +6,8 @@ from typing import Optional
import pyudev
from .coresys import CoreSys, CoreSysAttributes
from .utils import AsyncCallFilter
from ..coresys import CoreSys, CoreSysAttributes
from ..utils import AsyncCallFilter
_LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@ -1,8 +1,10 @@
"""Schedule for Supervisor."""
import asyncio
from datetime import date, datetime, time, timedelta
import logging
from ..const import CoreStates
from ..coresys import CoreSys, CoreSysAttributes
_LOGGER: logging.Logger = logging.getLogger(__name__)
INTERVAL = "interval"
@ -11,14 +13,13 @@ CALL = "callback"
TASK = "task"
class Scheduler:
class Scheduler(CoreSysAttributes):
"""Schedule task inside Supervisor."""
def __init__(self):
def __init__(self, coresys: CoreSys):
"""Initialize task schedule."""
self.loop = asyncio.get_running_loop()
self.coresys: CoreSys = coresys
self._data = {}
self.suspend = False
def register_task(self, coro_callback, interval, repeat=True):
"""Schedule a coroutine.
@ -40,8 +41,8 @@ class Scheduler:
"""Run a scheduled task."""
data = self._data[task_id]
if not self.suspend:
self.loop.create_task(data[CALL]())
if self.sys_core.state == CoreStates.RUNNING:
self.sys_create_task(data[CALL]())
if data[REPEAT]:
self._schedule_task(data[INTERVAL], task_id)
@ -51,7 +52,7 @@ class Scheduler:
def _schedule_task(self, interval, task_id):
"""Schedule a task on loop."""
if isinstance(interval, (int, float)):
job = self.loop.call_later(interval, self._run_task, task_id)
job = self.sys_loop.call_later(interval, self._run_task, task_id)
elif isinstance(interval, time):
today = datetime.combine(date.today(), interval)
tomorrow = datetime.combine(date.today() + timedelta(days=1), interval)
@ -62,7 +63,7 @@ class Scheduler:
else:
calc = tomorrow
job = self.loop.call_at(calc.timestamp(), self._run_task, task_id)
job = self.sys_loop.call_at(calc.timestamp(), self._run_task, task_id)
else:
_LOGGER.critical(
"Unknown interval %s (type: %s) for scheduler %s",

View File

@ -2,12 +2,12 @@
from datetime import timedelta
import logging
from pathlib import Path
from typing import Dict, Union
from typing import Dict, Optional, Union
from ruamel.yaml import YAML, YAMLError
from .coresys import CoreSys, CoreSysAttributes
from .utils import AsyncThrottle
from ..coresys import CoreSys, CoreSysAttributes
from ..utils import AsyncThrottle
_LOGGER: logging.Logger = logging.getLogger(__name__)
@ -25,7 +25,7 @@ class SecretsManager(CoreSysAttributes):
"""Return path to secret file."""
return Path(self.sys_config.path_homeassistant, "secrets.yaml")
def get(self, secret: str) -> Union[bool, float, int, str]:
def get(self, secret: str) -> Optional[Union[bool, float, int, str]]:
"""Get secret from store."""
_LOGGER.info("Request secret %s", secret)
return self.secrets.get(secret)

View File

@ -2,8 +2,8 @@
import asyncio
import logging
from .coresys import CoreSysAttributes
from .exceptions import (
from ..coresys import CoreSysAttributes
from ..exceptions import (
AudioError,
CliError,
CoreDNSError,

View File

@ -5,7 +5,7 @@ Code: https://github.com/home-assistant/plugin-audio
import asyncio
from contextlib import suppress
import logging
from pathlib import Path
from pathlib import Path, PurePath
import shutil
from typing import Awaitable, Optional
@ -36,12 +36,12 @@ class Audio(JsonConfig, CoreSysAttributes):
self.client_template: Optional[jinja2.Template] = None
@property
def path_extern_pulse(self) -> Path:
def path_extern_pulse(self) -> PurePath:
"""Return path of pulse socket file."""
return self.sys_config.path_extern_audio.joinpath("external")
@property
def path_extern_asound(self) -> Path:
def path_extern_asound(self) -> PurePath:
"""Return path of default asound config file."""
return self.sys_config.path_extern_audio.joinpath("asound")

View File

@ -390,6 +390,7 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
if name not in entry.names:
continue
return entry
return None
def logs(self) -> Awaitable[bytes]:
"""Get CoreDNS docker logs.

View File

@ -3,7 +3,7 @@
import voluptuous as vol
from ..const import ATTR_ACCESS_TOKEN, ATTR_IMAGE, ATTR_SERVERS, ATTR_VERSION
from ..validate import dns_server_list, docker_image, token, simple_version
from ..validate import dns_server_list, docker_image, simple_version, token
SCHEMA_DNS_CONFIG = vol.Schema(
{

View File

@ -3,7 +3,7 @@ import asyncio
import logging
from pathlib import Path
from ..const import FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL
from ..const import FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL, CoreStates
from ..coresys import CoreSysAttributes
from ..utils.dt import utcnow
from .snapshot import Snapshot
@ -125,7 +125,7 @@ class SnapshotManager(CoreSysAttributes):
snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
try:
self.sys_scheduler.suspend = True
self.sys_core.state = CoreStates.FREEZE
await self.lock.acquire()
async with snapshot:
@ -147,7 +147,7 @@ class SnapshotManager(CoreSysAttributes):
return snapshot
finally:
self.sys_scheduler.suspend = False
self.sys_core.state = CoreStates.RUNNING
self.lock.release()
async def do_snapshot_partial(
@ -164,7 +164,7 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
try:
self.sys_scheduler.suspend = True
self.sys_core.state = CoreStates.FREEZE
await self.lock.acquire()
async with snapshot:
@ -196,7 +196,7 @@ class SnapshotManager(CoreSysAttributes):
return snapshot
finally:
self.sys_scheduler.suspend = False
self.sys_core.state = CoreStates.RUNNING
self.lock.release()
async def do_restore_full(self, snapshot, password=None):
@ -215,7 +215,7 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Full-Restore %s start", snapshot.slug)
try:
self.sys_scheduler.suspend = True
self.sys_core.state = CoreStates.FREEZE
await self.lock.acquire()
async with snapshot:
@ -267,7 +267,7 @@ class SnapshotManager(CoreSysAttributes):
return True
finally:
self.sys_scheduler.suspend = False
self.sys_core.state = CoreStates.RUNNING
self.lock.release()
async def do_restore_partial(
@ -287,7 +287,7 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
try:
self.sys_scheduler.suspend = True
self.sys_core.state = CoreStates.FREEZE
await self.lock.acquire()
async with snapshot:
@ -339,5 +339,5 @@ class SnapshotManager(CoreSysAttributes):
return True
finally:
self.sys_scheduler.suspend = False
self.sys_core.state = CoreStates.RUNNING
self.lock.release()

View File

@ -31,7 +31,7 @@ from ..const import (
SNAPSHOT_FULL,
SNAPSHOT_PARTIAL,
)
from ..validate import docker_image, network_port, repositories, complex_version
from ..validate import complex_version, docker_image, network_port, repositories
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]

View File

@ -20,21 +20,23 @@ from ..exceptions import (
_LOGGER: logging.Logger = logging.getLogger(__name__)
# Use to convert GVariant into json
RE_GVARIANT_TYPE: re.Match = re.compile(
RE_GVARIANT_TYPE: re.Pattern[Any] = re.compile(
r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(boolean|byte|int16|uint16|int32|uint32|handle|int64|uint64|double|"
r"string|objectpath|signature|@[asviumodf\{\}]+) "
)
RE_GVARIANT_VARIANT: re.Match = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(<|>)")
RE_GVARIANT_STRING_ESC: re.Match = re.compile(
RE_GVARIANT_VARIANT: re.Pattern[Any] = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(<|>)")
RE_GVARIANT_STRING_ESC: re.Pattern[Any] = re.compile(
r"(?<=(?: |{|\[|\(|<))'[^']*?\"[^']*?'(?=(?:|]|}|,|\)|>))"
)
RE_GVARIANT_STRING: re.Match = re.compile(
RE_GVARIANT_STRING: re.Pattern[Any] = re.compile(
r"(?<=(?: |{|\[|\(|<))'(.*?)'(?=(?:|]|}|,|\)|>))"
)
RE_GVARIANT_TUPLE_O: re.Match = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(\()")
RE_GVARIANT_TUPLE_C: re.Match = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(,?\))")
RE_GVARIANT_TUPLE_O: re.Pattern[Any] = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(\()")
RE_GVARIANT_TUPLE_C: re.Pattern[Any] = re.compile(
r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(,?\))"
)
RE_MONITOR_OUTPUT: re.Match = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)")
RE_MONITOR_OUTPUT: re.Pattern[Any] = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)")
# Map GDBus to errors
MAP_GDBUS_ERROR: Dict[str, Any] = {

View File

@ -1,11 +1,11 @@
"""Validate functions."""
import ipaddress
import re
import uuid
from typing import Optional, Union
import uuid
import voluptuous as vol
from packaging import version as pkg_version
import voluptuous as vol
from .const import (
ATTR_ACCESS_TOKEN,