mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-28 11:36:32 +00:00
Merge branch 'main' of github.com:home-assistant/supervisor into context
This commit is contained in:
commit
726dd3a8f9
@ -125,6 +125,8 @@ echo "Start Test-Env"
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
docker system prune -f
|
||||
|
||||
build_supervisor
|
||||
cleanup_lastboot
|
||||
cleanup_docker
|
||||
|
@ -10,6 +10,7 @@ from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerAPIError,
|
||||
@ -147,7 +148,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
@ -248,7 +250,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(self, slug: str) -> None:
|
||||
"""Update add-on."""
|
||||
@ -297,7 +300,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
@ -339,7 +343,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
|
@ -112,6 +112,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/network/info", api_network.info),
|
||||
web.post("/network/reload", api_network.reload),
|
||||
web.get(
|
||||
"/network/interface/{interface}/info", api_network.interface_info
|
||||
),
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""REST API for network."""
|
||||
import asyncio
|
||||
from ipaddress import ip_address, ip_interface
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
@ -18,6 +18,7 @@ from ..const import (
|
||||
ATTR_FREQUENCY,
|
||||
ATTR_GATEWAY,
|
||||
ATTR_HOST_INTERNET,
|
||||
ATTR_ID,
|
||||
ATTR_INTERFACE,
|
||||
ATTR_INTERFACES,
|
||||
ATTR_IPV4,
|
||||
@ -26,6 +27,7 @@ from ..const import (
|
||||
ATTR_METHOD,
|
||||
ATTR_MODE,
|
||||
ATTR_NAMESERVERS,
|
||||
ATTR_PARENT,
|
||||
ATTR_PRIMARY,
|
||||
ATTR_PSK,
|
||||
ATTR_SIGNAL,
|
||||
@ -80,7 +82,7 @@ SCHEMA_UPDATE = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def ipconfig_struct(config: IpConfig) -> dict:
|
||||
def ipconfig_struct(config: IpConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about ip configuration."""
|
||||
return {
|
||||
ATTR_METHOD: config.method,
|
||||
@ -90,7 +92,7 @@ def ipconfig_struct(config: IpConfig) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def wifi_struct(config: WifiConfig) -> dict:
|
||||
def wifi_struct(config: WifiConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about wifi configuration."""
|
||||
return {
|
||||
ATTR_MODE: config.mode,
|
||||
@ -100,7 +102,15 @@ def wifi_struct(config: WifiConfig) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def interface_struct(interface: Interface) -> dict:
|
||||
def vlan_struct(config: VlanConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about VLAN configuration."""
|
||||
return {
|
||||
ATTR_ID: config.id,
|
||||
ATTR_PARENT: config.interface,
|
||||
}
|
||||
|
||||
|
||||
def interface_struct(interface: Interface) -> Dict[str, Any]:
|
||||
"""Return a dict with information of a interface to be used in th API."""
|
||||
return {
|
||||
ATTR_INTERFACE: interface.name,
|
||||
@ -111,11 +121,11 @@ def interface_struct(interface: Interface) -> dict:
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
|
||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||
}
|
||||
|
||||
|
||||
def accesspoint_struct(accesspoint: AccessPoint) -> dict:
|
||||
def accesspoint_struct(accesspoint: AccessPoint) -> Dict[str, Any]:
|
||||
"""Return a dict for AccessPoint."""
|
||||
return {
|
||||
ATTR_MODE: accesspoint.mode,
|
||||
@ -207,6 +217,11 @@ class APINetwork(CoreSysAttributes):
|
||||
|
||||
await asyncio.shield(self.sys_host.network.apply_changes(interface))
|
||||
|
||||
@api_process
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload network data."""
|
||||
return asyncio.shield(self.sys_host.network.update())
|
||||
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Scan and return a list of available networks."""
|
||||
|
@ -6,6 +6,8 @@ from typing import Any, Awaitable, Dict
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.resolution.const import ContextType, SuggestionType
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
ATTR_ADDONS_REPOSITORIES,
|
||||
@ -143,10 +145,20 @@ class APISupervisor(CoreSysAttributes):
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.sys_store.update_repositories(new))
|
||||
if sorted(body[ATTR_ADDONS_REPOSITORIES]) != sorted(
|
||||
self.sys_config.addons_repositories
|
||||
):
|
||||
raise APIError("Not a valid add-on repository")
|
||||
|
||||
# Fix invalid repository
|
||||
found_invalid = False
|
||||
for suggestion in self.sys_resolution.suggestions:
|
||||
if (
|
||||
suggestion.type != SuggestionType.EXECUTE_REMOVE
|
||||
and suggestion.context != ContextType
|
||||
):
|
||||
continue
|
||||
found_invalid = True
|
||||
await self.sys_resolution.apply_suggestion(suggestion)
|
||||
|
||||
if found_invalid:
|
||||
raise APIError("Invalid Add-on repository!")
|
||||
|
||||
self.sys_updater.save_data()
|
||||
self.sys_config.save_data()
|
||||
|
@ -8,7 +8,11 @@ import signal
|
||||
from colorlog import ColoredFormatter
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
|
||||
from sentry_sdk.integrations.atexit import AtexitIntegration
|
||||
from sentry_sdk.integrations.dedupe import DedupeIntegration
|
||||
from sentry_sdk.integrations.excepthook import ExcepthookIntegration
|
||||
from sentry_sdk.integrations.logging import LoggingIntegration
|
||||
from sentry_sdk.integrations.threading import ThreadingIntegration
|
||||
|
||||
from supervisor.jobs import JobManager
|
||||
|
||||
@ -40,7 +44,7 @@ from .misc.hwmon import HwMonitor
|
||||
from .misc.scheduler import Scheduler
|
||||
from .misc.tasks import Tasks
|
||||
from .plugins import PluginManager
|
||||
from .resolution import ResolutionManager
|
||||
from .resolution.module import ResolutionManager
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .store import StoreManager
|
||||
@ -295,16 +299,20 @@ def supervisor_debugger(coresys: CoreSys) -> None:
|
||||
|
||||
def setup_diagnostics(coresys: CoreSys) -> None:
|
||||
"""Sentry diagnostic backend."""
|
||||
sentry_logging = LoggingIntegration(
|
||||
level=logging.WARNING, event_level=logging.CRITICAL
|
||||
)
|
||||
|
||||
_LOGGER.info("Initializing Supervisor Sentry")
|
||||
sentry_sdk.init(
|
||||
dsn="https://9c6ea70f49234442b4746e447b24747e@o427061.ingest.sentry.io/5370612",
|
||||
before_send=lambda event, hint: filter_data(coresys, event, hint),
|
||||
auto_enabling_integrations=False,
|
||||
integrations=[AioHttpIntegration(), sentry_logging],
|
||||
default_integrations=False,
|
||||
integrations=[
|
||||
AioHttpIntegration(),
|
||||
ExcepthookIntegration(),
|
||||
DedupeIntegration(),
|
||||
AtexitIntegration(),
|
||||
ThreadingIntegration(),
|
||||
LoggingIntegration(level=logging.WARNING, event_level=logging.CRITICAL),
|
||||
],
|
||||
release=SUPERVISOR_VERSION,
|
||||
max_breadcrumbs=30,
|
||||
)
|
||||
|
@ -9,11 +9,6 @@ URL_HASSIO_ADDONS = "https://github.com/home-assistant/addons"
|
||||
URL_HASSIO_APPARMOR = "https://version.home-assistant.io/apparmor.txt"
|
||||
URL_HASSIO_VERSION = "https://version.home-assistant.io/{channel}.json"
|
||||
|
||||
URL_HASSOS_OTA = (
|
||||
"https://github.com/home-assistant/operating-system/releases/download/"
|
||||
"{version}/hassos_{board}-{version}.raucb"
|
||||
)
|
||||
|
||||
SUPERVISOR_DATA = Path("/data")
|
||||
|
||||
FILE_HASSIO_ADDONS = Path(SUPERVISOR_DATA, "addons.json")
|
||||
@ -158,6 +153,7 @@ ATTR_HOST_NETWORK = "host_network"
|
||||
ATTR_HOST_PID = "host_pid"
|
||||
ATTR_HOSTNAME = "hostname"
|
||||
ATTR_ICON = "icon"
|
||||
ATTR_ID = "id"
|
||||
ATTR_ISSUES = "issues"
|
||||
ATTR_IMAGE = "image"
|
||||
ATTR_IMAGES = "images"
|
||||
@ -210,6 +206,7 @@ ATTR_PANEL_ICON = "panel_icon"
|
||||
ATTR_PANEL_TITLE = "panel_title"
|
||||
ATTR_PANELS = "panels"
|
||||
ATTR_PASSWORD = "password"
|
||||
ATTR_PARENT = "parent"
|
||||
ATTR_PORT = "port"
|
||||
ATTR_PORTS = "ports"
|
||||
ATTR_PORTS_DESCRIPTION = "ports_description"
|
||||
@ -290,6 +287,7 @@ ATTR_MAC = "mac"
|
||||
ATTR_FREQUENCY = "frequency"
|
||||
ATTR_ACCESSPOINTS = "accesspoints"
|
||||
ATTR_UNHEALTHY = "unhealthy"
|
||||
ATTR_OTA = "ota"
|
||||
|
||||
PROVIDE_SERVICE = "provide"
|
||||
NEED_SERVICE = "need"
|
||||
|
@ -66,23 +66,31 @@ class Core(CoreSysAttributes):
|
||||
await self.sys_resolution.evaluate.evaluate_system()
|
||||
|
||||
# Check supervisor version/update
|
||||
if self.sys_dev:
|
||||
self.sys_config.version = self.sys_supervisor.version
|
||||
elif self.sys_config.version != self.sys_supervisor.version:
|
||||
if self.sys_config.version == self.sys_supervisor.version:
|
||||
return
|
||||
|
||||
# Somethings going wrong
|
||||
_LOGGER.error(
|
||||
"Update '%s' of Supervisor '%s' failed!",
|
||||
self.sys_config.version,
|
||||
self.sys_supervisor.version,
|
||||
)
|
||||
|
||||
if self.sys_supervisor.need_update:
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.UPDATE_ROLLBACK, ContextType.SUPERVISOR
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.SUPERVISOR
|
||||
_LOGGER.error(
|
||||
"Update '%s' of Supervisor '%s' failed!",
|
||||
self.sys_config.version,
|
||||
self.sys_supervisor.version,
|
||||
)
|
||||
|
||||
# Fix wrong version in config / avoid boot loop on OS
|
||||
self.sys_config.version = self.sys_supervisor.version
|
||||
self.sys_config.save_data()
|
||||
|
||||
async def setup(self):
|
||||
"""Start setting up supervisor orchestration."""
|
||||
self.state = CoreState.SETUP
|
||||
|
||||
# Order can be important!
|
||||
setup_loads: List[Awaitable[None]] = [
|
||||
# rest api views
|
||||
self.sys_api.load(),
|
||||
|
@ -30,7 +30,7 @@ if TYPE_CHECKING:
|
||||
from .misc.scheduler import Scheduler
|
||||
from .misc.tasks import Tasks
|
||||
from .plugins import PluginManager
|
||||
from .resolution import ResolutionManager
|
||||
from .resolution.module import ResolutionManager
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .store import StoreManager
|
||||
|
@ -1,9 +1,11 @@
|
||||
"""D-Bus interface objects."""
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
from ..const import SOCKET_DBUS
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DBusNotConnectedError
|
||||
from .hostname import Hostname
|
||||
from .interface import DBusInterface
|
||||
from .network import NetworkManager
|
||||
from .rauc import Rauc
|
||||
from .systemd import Systemd
|
||||
@ -45,15 +47,22 @@ class DBusManager(CoreSysAttributes):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Connect interfaces to D-Bus."""
|
||||
|
||||
try:
|
||||
await self.systemd.connect()
|
||||
await self.hostname.connect()
|
||||
await self.rauc.connect()
|
||||
await self.network.connect()
|
||||
except DBusNotConnectedError:
|
||||
if not SOCKET_DBUS.exists():
|
||||
_LOGGER.error(
|
||||
"No D-Bus support on Host. Disabled any kind of host control!"
|
||||
)
|
||||
return
|
||||
|
||||
dbus_loads: List[DBusInterface] = [
|
||||
self.systemd,
|
||||
self.hostname,
|
||||
self.network,
|
||||
self.rauc,
|
||||
]
|
||||
for dbus in dbus_loads:
|
||||
try:
|
||||
await dbus.connect()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't load dbus interface %s: %s", dbus.name, err)
|
||||
|
||||
self.sys_host.supported_features.cache_clear()
|
||||
|
@ -65,6 +65,7 @@ DBUS_ATTR_STATIC_OPERATING_SYSTEM_CPE_NAME = "OperatingSystemCPEName"
|
||||
DBUS_ATTR_TYPE = "Type"
|
||||
DBUS_ATTR_UUID = "Uuid"
|
||||
DBUS_ATTR_VARIANT = "Variant"
|
||||
DBUS_ATTR_VERSION = "Version"
|
||||
DBUS_ATTR_MANAGED = "Managed"
|
||||
DBUS_ATTR_CONNECTION_ENABLED = "ConnectivityCheckEnabled"
|
||||
|
||||
|
@ -23,6 +23,8 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
class Hostname(DBusInterface):
|
||||
"""Handle D-Bus interface for hostname/system."""
|
||||
|
||||
name = DBUS_NAME_HOSTNAME
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Properties."""
|
||||
self._hostname: Optional[str] = None
|
||||
|
@ -9,6 +9,7 @@ class DBusInterface(ABC):
|
||||
"""Handle D-Bus interface for hostname/system."""
|
||||
|
||||
dbus: Optional[DBus] = None
|
||||
name: Optional[str] = None
|
||||
|
||||
@property
|
||||
def is_connected(self):
|
||||
@ -19,6 +20,10 @@ class DBusInterface(ABC):
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
|
||||
def disconnect(self):
|
||||
"""Disconnect from D-Bus."""
|
||||
self.dbus = None
|
||||
|
||||
|
||||
class DBusInterfaceProxy(ABC):
|
||||
"""Handle D-Bus interface proxy."""
|
||||
|
@ -2,14 +2,21 @@
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from packaging.version import parse as pkg_parse
|
||||
import sentry_sdk
|
||||
|
||||
from ...exceptions import DBusError, DBusInterfaceError, DBusProgramError
|
||||
from ...exceptions import (
|
||||
DBusError,
|
||||
DBusInterfaceError,
|
||||
DBusProgramError,
|
||||
HostNotSupportedError,
|
||||
)
|
||||
from ...utils.gdbus import DBus
|
||||
from ..const import (
|
||||
DBUS_ATTR_CONNECTION_ENABLED,
|
||||
DBUS_ATTR_DEVICES,
|
||||
DBUS_ATTR_PRIMARY_CONNECTION,
|
||||
DBUS_ATTR_VERSION,
|
||||
DBUS_NAME_NM,
|
||||
DBUS_OBJECT_BASE,
|
||||
DBUS_OBJECT_NM,
|
||||
@ -23,10 +30,14 @@ from .settings import NetworkManagerSettings
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
MINIMAL_VERSION = "1.14.6"
|
||||
|
||||
|
||||
class NetworkManager(DBusInterface):
|
||||
"""Handle D-Bus interface for Network Manager."""
|
||||
|
||||
name = DBUS_NAME_NM
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Properties."""
|
||||
self._dns: NetworkManagerDNS = NetworkManagerDNS()
|
||||
@ -55,6 +66,11 @@ class NetworkManager(DBusInterface):
|
||||
"""Return if connectivity check is enabled."""
|
||||
return self.properties[DBUS_ATTR_CONNECTION_ENABLED]
|
||||
|
||||
@property
|
||||
def version(self) -> bool:
|
||||
"""Return if connectivity check is enabled."""
|
||||
return self.properties[DBUS_ATTR_VERSION]
|
||||
|
||||
@dbus_connected
|
||||
def activate_connection(
|
||||
self, connection_object: str, device_object: str
|
||||
@ -91,6 +107,28 @@ class NetworkManager(DBusInterface):
|
||||
"No Network Manager support on the host. Local network functions have been disabled."
|
||||
)
|
||||
|
||||
# Make Sure we only connect to supported version
|
||||
if self.is_connected:
|
||||
try:
|
||||
await self._validate_version()
|
||||
except (HostNotSupportedError, DBusError):
|
||||
self.disconnect()
|
||||
self.dns.disconnect()
|
||||
self.settings.disconnect()
|
||||
|
||||
async def _validate_version(self) -> None:
|
||||
"""Validate Version of NetworkManager."""
|
||||
self.properties = await self.dbus.get_properties(DBUS_NAME_NM)
|
||||
|
||||
try:
|
||||
if pkg_parse(self.version) >= pkg_parse(MINIMAL_VERSION):
|
||||
return
|
||||
except (TypeError, ValueError, KeyError):
|
||||
pass
|
||||
|
||||
_LOGGER.error("Version '%s' of NetworkManager is not supported!", self.version)
|
||||
raise HostNotSupportedError()
|
||||
|
||||
@dbus_connected
|
||||
async def update(self):
|
||||
"""Update Properties."""
|
||||
|
@ -32,6 +32,7 @@ class ConnectionProperties:
|
||||
id: Optional[str] = attr.ib()
|
||||
uuid: Optional[str] = attr.ib()
|
||||
type: Optional[str] = attr.ib()
|
||||
interface_name: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
|
@ -31,6 +31,7 @@ ATTR_ASSIGNED_MAC = "assigned-mac-address"
|
||||
ATTR_POWERSAVE = "powersave"
|
||||
ATTR_AUTH_ALGO = "auth-algo"
|
||||
ATTR_KEY_MGMT = "key-mgmt"
|
||||
ATTR_INTERFACE_NAME = "interface-name"
|
||||
|
||||
|
||||
class NetworkSetting(DBusInterfaceProxy):
|
||||
@ -109,6 +110,7 @@ class NetworkSetting(DBusInterfaceProxy):
|
||||
data[CONF_ATTR_CONNECTION].get(ATTR_ID),
|
||||
data[CONF_ATTR_CONNECTION].get(ATTR_UUID),
|
||||
data[CONF_ATTR_CONNECTION].get(ATTR_TYPE),
|
||||
data[CONF_ATTR_CONNECTION].get(ATTR_INTERFACE_NAME),
|
||||
)
|
||||
|
||||
if CONF_ATTR_802_ETHERNET in data:
|
||||
|
@ -1,7 +1,9 @@
|
||||
"""Payload generators for DBUS communication."""
|
||||
from __future__ import annotations
|
||||
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
from pathlib import Path
|
||||
import socket
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
@ -22,17 +24,32 @@ def interface_update_payload(
|
||||
interface: Interface, name: Optional[str] = None, uuid: Optional[str] = None
|
||||
) -> str:
|
||||
"""Generate a payload for network interface update."""
|
||||
template = jinja2.Template(INTERFACE_UPDATE_TEMPLATE.read_text())
|
||||
env = jinja2.Environment()
|
||||
|
||||
def ipv4_to_int(ip_address: IPv4Address) -> int:
|
||||
"""Convert an ipv4 to an int."""
|
||||
return socket.htonl(int(ip_address))
|
||||
|
||||
def ipv6_to_byte(ip_address: IPv6Address) -> str:
|
||||
"""Convert an ipv6 to an byte array."""
|
||||
return (
|
||||
f'[byte {", ".join("0x{:02x}".format(val) for val in ip_address.packed)}]'
|
||||
)
|
||||
|
||||
# Init template
|
||||
env.filters["ipv4_to_int"] = ipv4_to_int
|
||||
env.filters["ipv6_to_byte"] = ipv6_to_byte
|
||||
template: jinja2.Template = env.from_string(INTERFACE_UPDATE_TEMPLATE.read_text())
|
||||
|
||||
# Generate UUID
|
||||
if not uuid:
|
||||
uuid = str(uuid4())
|
||||
|
||||
# Generate ID/name
|
||||
if not name and interface.type != InterfaceType.VLAN:
|
||||
# Generate/Update ID/name
|
||||
if not name or not name.startswith("Supervisor"):
|
||||
name = f"Supervisor {interface.name}"
|
||||
elif not name:
|
||||
name = f"Supervisor {interface.name}.{interface.vlan.id}"
|
||||
if interface.type == InterfaceType.VLAN:
|
||||
name = f"{name}.{interface.vlan.id}"
|
||||
|
||||
# Fix SSID
|
||||
if interface.wifi:
|
||||
|
@ -21,7 +21,7 @@
|
||||
'method': <'disabled'>
|
||||
{% else %}
|
||||
'method': <'manual'>,
|
||||
'dns': <[uint32 {{ interface.ipv4.nameservers | map("int") | join(",") }}]>,
|
||||
'dns': <[uint32 {{ interface.ipv4.nameservers | map("ipv4_to_int") | join(",") }}]>,
|
||||
'address-data': <[
|
||||
{% for address in interface.ipv4.address %}
|
||||
{
|
||||
@ -44,7 +44,7 @@
|
||||
'method': <'disabled'>
|
||||
{% else %}
|
||||
'method': <'manual'>,
|
||||
'dns': <[uint32 {{ interface.ipv6.nameservers | map("int") | join(",") }}]>,
|
||||
'dns': <[{{ interface.ipv6.nameservers | map("ipv6_to_byte") | join(",") }}]>,
|
||||
'address-data': <[
|
||||
{% for address in interface.ipv6.address if not address.with_prefixlen.startswith("fe80::") %}
|
||||
{
|
||||
@ -61,7 +61,7 @@
|
||||
,
|
||||
'802-3-ethernet':
|
||||
{
|
||||
'assigned-mac-address': <'stable'>
|
||||
'assigned-mac-address': <'preserve'>
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
@ -78,7 +78,7 @@
|
||||
,
|
||||
'802-11-wireless':
|
||||
{
|
||||
'assigned-mac-address': <'stable'>,
|
||||
'assigned-mac-address': <'preserve'>,
|
||||
'ssid': <[byte {{ interface.wifi.ssid }}]>,
|
||||
'mode': <'{{ interface.wifi.mode.value }}'>,
|
||||
'powersave': <uint32 1>
|
||||
|
@ -25,6 +25,8 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
class Rauc(DBusInterface):
|
||||
"""Handle D-Bus interface for rauc."""
|
||||
|
||||
name = DBUS_NAME_RAUC
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Properties."""
|
||||
self._operation: Optional[str] = None
|
||||
|
@ -13,6 +13,8 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
class Systemd(DBusInterface):
|
||||
"""Systemd function handler."""
|
||||
|
||||
name = DBUS_NAME_SYSTEMD
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
try:
|
||||
|
@ -9,6 +9,13 @@ class HassioNotSupportedError(HassioError):
|
||||
"""Function is not supported."""
|
||||
|
||||
|
||||
# JobManager
|
||||
|
||||
|
||||
class JobException(HassioError):
|
||||
"""Base job exception."""
|
||||
|
||||
|
||||
# HomeAssistant
|
||||
|
||||
|
||||
@ -32,6 +39,10 @@ class HomeAssistantAuthError(HomeAssistantAPIError):
|
||||
"""Home Assistant Auth API exception."""
|
||||
|
||||
|
||||
class HomeAssistantJobError(HomeAssistantError, JobException):
|
||||
"""Raise on Home Assistant job error."""
|
||||
|
||||
|
||||
# Supervisor
|
||||
|
||||
|
||||
@ -43,6 +54,10 @@ class SupervisorUpdateError(SupervisorError):
|
||||
"""Supervisor update error."""
|
||||
|
||||
|
||||
class SupervisorJobError(SupervisorError, JobException):
|
||||
"""Raise on job errors."""
|
||||
|
||||
|
||||
# HassOS
|
||||
|
||||
|
||||
@ -128,6 +143,10 @@ class AddonsNotSupportedError(HassioNotSupportedError):
|
||||
"""Addons don't support a function."""
|
||||
|
||||
|
||||
class AddonsJobError(AddonsError, JobException):
|
||||
"""Raise on job errors."""
|
||||
|
||||
|
||||
# Arch
|
||||
|
||||
|
||||
@ -138,10 +157,14 @@ class HassioArchNotFound(HassioNotSupportedError):
|
||||
# Updater
|
||||
|
||||
|
||||
class HassioUpdaterError(HassioError):
|
||||
class UpdaterError(HassioError):
|
||||
"""Error on Updater."""
|
||||
|
||||
|
||||
class UpdaterJobError(UpdaterError, JobException):
|
||||
"""Raise on job error."""
|
||||
|
||||
|
||||
# Auth
|
||||
|
||||
|
||||
@ -299,6 +322,10 @@ class ResolutionFixupError(HassioError):
|
||||
"""Rasie if a fixup fails."""
|
||||
|
||||
|
||||
class ResolutionFixupJobError(ResolutionFixupError, JobException):
|
||||
"""Raise on job error."""
|
||||
|
||||
|
||||
# Store
|
||||
|
||||
|
||||
@ -310,8 +337,9 @@ class StoreGitError(StoreError):
|
||||
"""Raise if something on git is happening."""
|
||||
|
||||
|
||||
# JobManager
|
||||
class StoreNotFound(StoreError):
|
||||
"""Raise if slug is not known."""
|
||||
|
||||
|
||||
class JobException(HassioError):
|
||||
"""Base job exception."""
|
||||
class StoreJobError(StoreError, JobException):
|
||||
"""Raise on job error with git."""
|
||||
|
@ -8,7 +8,6 @@ import aiohttp
|
||||
from cpe import CPE
|
||||
from packaging.version import parse as pkg_parse
|
||||
|
||||
from .const import URL_HASSOS_OTA
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .dbus.rauc import RaucState
|
||||
from .exceptions import DBusError, HassOSNotSupportedError, HassOSUpdateError
|
||||
@ -64,10 +63,14 @@ class HassOS(CoreSysAttributes):
|
||||
|
||||
async def _download_raucb(self, version: str) -> Path:
|
||||
"""Download rauc bundle (OTA) from github."""
|
||||
url = URL_HASSOS_OTA.format(version=version, board=self.board)
|
||||
raucb = Path(self.sys_config.path_tmp, f"hassos-{version}.raucb")
|
||||
raw_url = self.sys_updater.ota_url
|
||||
if raw_url is None:
|
||||
_LOGGER.error("Don't have an URL for OTA updates!")
|
||||
raise HassOSNotSupportedError()
|
||||
url = raw_url.format(version=version, board=self.board)
|
||||
|
||||
_LOGGER.info("Fetch OTA update from %s", url)
|
||||
raucb = Path(self.sys_config.path_tmp, f"hassos-{version}.raucb")
|
||||
try:
|
||||
timeout = aiohttp.ClientTimeout(total=60 * 60, connect=180)
|
||||
async with self.sys_websession.get(url, timeout=timeout) as request:
|
||||
|
@ -19,6 +19,7 @@ from ..exceptions import (
|
||||
DockerError,
|
||||
HomeAssistantCrashError,
|
||||
HomeAssistantError,
|
||||
HomeAssistantJobError,
|
||||
HomeAssistantUpdateError,
|
||||
)
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
@ -158,7 +159,8 @@ class HomeAssistantCore(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.HEALTHY,
|
||||
JobCondition.INTERNET_HOST,
|
||||
]
|
||||
],
|
||||
on_condition=HomeAssistantJobError,
|
||||
)
|
||||
async def update(self, version: Optional[str] = None) -> None:
|
||||
"""Update HomeAssistant version."""
|
||||
|
@ -111,7 +111,12 @@ class NetworkManager(CoreSysAttributes):
|
||||
inet = self.sys_dbus.network.interfaces.get(interface.name)
|
||||
|
||||
# Update exist configuration
|
||||
if inet and inet.settings and interface.enabled:
|
||||
if (
|
||||
inet
|
||||
and inet.settings
|
||||
and inet.settings.connection.interface_name == interface.name
|
||||
and interface.enabled
|
||||
):
|
||||
settings = interface_update_payload(
|
||||
interface,
|
||||
name=inet.settings.connection.id,
|
||||
@ -279,7 +284,7 @@ class Interface:
|
||||
inet.connection.ipv4.nameservers,
|
||||
)
|
||||
if inet.connection and inet.connection.ipv4
|
||||
else None,
|
||||
else IpConfig(InterfaceMethod.DISABLED, [], None, []),
|
||||
IpConfig(
|
||||
Interface._map_nm_method(inet.settings.ipv6.method),
|
||||
inet.connection.ipv6.address,
|
||||
@ -287,7 +292,7 @@ class Interface:
|
||||
inet.connection.ipv6.nameservers,
|
||||
)
|
||||
if inet.connection and inet.connection.ipv6
|
||||
else None,
|
||||
else IpConfig(InterfaceMethod.DISABLED, [], None, []),
|
||||
Interface._map_nm_wifi(inet),
|
||||
Interface._map_nm_vlan(inet),
|
||||
)
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Job decorator."""
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from typing import Any, List, Optional
|
||||
|
||||
import sentry_sdk
|
||||
|
||||
@ -21,11 +21,13 @@ class Job:
|
||||
name: Optional[str] = None,
|
||||
conditions: Optional[List[JobCondition]] = None,
|
||||
cleanup: bool = True,
|
||||
on_condition: Optional[JobException] = None,
|
||||
):
|
||||
"""Initialize the Job class."""
|
||||
self.name = name
|
||||
self.conditions = conditions
|
||||
self.cleanup = cleanup
|
||||
self.on_condition = on_condition
|
||||
self._coresys: Optional[CoreSys] = None
|
||||
self._method = None
|
||||
|
||||
@ -33,23 +35,28 @@ class Job:
|
||||
"""Call the wrapper logic."""
|
||||
self._method = method
|
||||
|
||||
async def wrapper(*args, **kwargs):
|
||||
async def wrapper(*args, **kwargs) -> Any:
|
||||
"""Wrap the method."""
|
||||
if self.name is None:
|
||||
self.name = str(self._method.__qualname__).lower().replace(".", "_")
|
||||
|
||||
# Evaluate coresys
|
||||
try:
|
||||
self._coresys = args[0].coresys
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
pass
|
||||
if not self._coresys:
|
||||
raise JobException(f"coresys is missing on {self.name}")
|
||||
|
||||
job = self._coresys.jobs.job
|
||||
|
||||
# Handle condition
|
||||
if self.conditions and not self._check_conditions():
|
||||
return False
|
||||
if self.on_condition is None:
|
||||
return
|
||||
raise self.on_condition()
|
||||
|
||||
# Execute Job
|
||||
try:
|
||||
return await self._method(*args, **kwargs)
|
||||
except HassioError as err:
|
||||
|
@ -62,6 +62,7 @@ def filter_data(coresys: CoreSys, event: dict, hint: dict) -> dict:
|
||||
"host": coresys.host.info.operating_system,
|
||||
"kernel": coresys.host.info.kernel,
|
||||
"machine": coresys.machine,
|
||||
"images": list(coresys.resolution.evaluate.cached_images),
|
||||
},
|
||||
"versions": {
|
||||
"audio": coresys.plugins.audio.version,
|
||||
@ -76,6 +77,10 @@ def filter_data(coresys: CoreSys, event: dict, hint: dict) -> dict:
|
||||
},
|
||||
"resolution": {
|
||||
"issues": [attr.asdict(issue) for issue in coresys.resolution.issues],
|
||||
"suggestions": [
|
||||
attr.asdict(suggestion)
|
||||
for suggestion in coresys.resolution.suggestions
|
||||
],
|
||||
"unhealthy": coresys.resolution.unhealthy,
|
||||
},
|
||||
}
|
||||
|
@ -1,181 +1 @@
|
||||
"""Supervisor resolution center."""
|
||||
from datetime import time
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import ResolutionError, ResolutionNotFound
|
||||
from .check import ResolutionCheck
|
||||
from .const import (
|
||||
SCHEDULED_HEALTHCHECK,
|
||||
ContextType,
|
||||
IssueType,
|
||||
SuggestionType,
|
||||
UnhealthyReason,
|
||||
UnsupportedReason,
|
||||
)
|
||||
from .data import Issue, Suggestion
|
||||
from .evaluate import ResolutionEvaluation
|
||||
from .fixup import ResolutionFixup
|
||||
from .notify import ResolutionNotify
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResolutionManager(CoreSysAttributes):
|
||||
"""Resolution manager for supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Resolution manager."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._evaluate = ResolutionEvaluation(coresys)
|
||||
self._check = ResolutionCheck(coresys)
|
||||
self._fixup = ResolutionFixup(coresys)
|
||||
self._notify = ResolutionNotify(coresys)
|
||||
|
||||
self._suggestions: List[Suggestion] = []
|
||||
self._issues: List[Issue] = []
|
||||
self._unsupported: List[UnsupportedReason] = []
|
||||
self._unhealthy: List[UnhealthyReason] = []
|
||||
|
||||
@property
|
||||
def evaluate(self) -> ResolutionEvaluation:
|
||||
"""Return the ResolutionEvaluation class."""
|
||||
return self._evaluate
|
||||
|
||||
@property
|
||||
def check(self) -> ResolutionCheck:
|
||||
"""Return the ResolutionCheck class."""
|
||||
return self._check
|
||||
|
||||
@property
|
||||
def fixup(self) -> ResolutionFixup:
|
||||
"""Return the ResolutionFixup class."""
|
||||
return self._fixup
|
||||
|
||||
@property
|
||||
def notify(self) -> ResolutionNotify:
|
||||
"""Return the ResolutionNotify class."""
|
||||
return self._notify
|
||||
|
||||
@property
|
||||
def issues(self) -> List[Issue]:
|
||||
"""Return a list of issues."""
|
||||
return self._issues
|
||||
|
||||
@issues.setter
|
||||
def issues(self, issue: Issue) -> None:
|
||||
"""Add issues."""
|
||||
if issue not in self._issues:
|
||||
self._issues.append(issue)
|
||||
|
||||
@property
|
||||
def suggestions(self) -> List[Suggestion]:
|
||||
"""Return a list of suggestions that can handled."""
|
||||
return self._suggestions
|
||||
|
||||
@suggestions.setter
|
||||
def suggestions(self, suggestion: Suggestion) -> None:
|
||||
"""Add suggestion."""
|
||||
if suggestion not in self._suggestions:
|
||||
self._suggestions.append(suggestion)
|
||||
|
||||
@property
|
||||
def unsupported(self) -> List[UnsupportedReason]:
|
||||
"""Return a list of unsupported reasons."""
|
||||
return self._unsupported
|
||||
|
||||
@unsupported.setter
|
||||
def unsupported(self, reason: UnsupportedReason) -> None:
|
||||
"""Add a reason for unsupported."""
|
||||
if reason not in self._unsupported:
|
||||
self._unsupported.append(reason)
|
||||
|
||||
@property
|
||||
def unhealthy(self) -> List[UnhealthyReason]:
|
||||
"""Return a list of unsupported reasons."""
|
||||
return self._unhealthy
|
||||
|
||||
@unhealthy.setter
|
||||
def unhealthy(self, reason: UnhealthyReason) -> None:
|
||||
"""Add a reason for unsupported."""
|
||||
if reason not in self._unhealthy:
|
||||
self._unhealthy.append(reason)
|
||||
|
||||
def get_suggestion(self, uuid: str) -> Suggestion:
|
||||
"""Return suggestion with uuid."""
|
||||
for suggestion in self._suggestions:
|
||||
if suggestion.uuid != uuid:
|
||||
continue
|
||||
return suggestion
|
||||
raise ResolutionNotFound()
|
||||
|
||||
def get_issue(self, uuid: str) -> Issue:
|
||||
"""Return issue with uuid."""
|
||||
for issue in self._issues:
|
||||
if issue.uuid != uuid:
|
||||
continue
|
||||
return issue
|
||||
raise ResolutionNotFound()
|
||||
|
||||
def create_issue(
|
||||
self,
|
||||
issue: IssueType,
|
||||
context: ContextType,
|
||||
reference: Optional[str] = None,
|
||||
suggestions: Optional[List[SuggestionType]] = None,
|
||||
) -> None:
|
||||
"""Create issues and suggestion."""
|
||||
self.issues = Issue(issue, context, reference)
|
||||
if not suggestions:
|
||||
return
|
||||
|
||||
# Add suggestions
|
||||
for suggestion in suggestions:
|
||||
self.suggestions = Suggestion(suggestion, context, reference)
|
||||
|
||||
async def load(self):
|
||||
"""Load the resoulution manager."""
|
||||
# Initial healthcheck when the manager is loaded
|
||||
await self.healthcheck()
|
||||
|
||||
# Schedule the healthcheck
|
||||
self.sys_scheduler.register_task(self.healthcheck, SCHEDULED_HEALTHCHECK)
|
||||
self.sys_scheduler.register_task(self.fixup.run_autofix, time(hour=2))
|
||||
|
||||
async def healthcheck(self):
|
||||
"""Scheduled task to check for known issues."""
|
||||
await self.check.check_system()
|
||||
|
||||
# Create notification for any known issues
|
||||
await self.notify.issue_notifications()
|
||||
|
||||
async def apply_suggestion(self, suggestion: Suggestion) -> None:
|
||||
"""Apply suggested action."""
|
||||
if suggestion not in self._suggestions:
|
||||
_LOGGER.warning("Suggestion %s is not valid", suggestion.uuid)
|
||||
raise ResolutionError()
|
||||
|
||||
await self.fixup.apply_fixup(suggestion)
|
||||
await self.healthcheck()
|
||||
|
||||
def dismiss_suggestion(self, suggestion: Suggestion) -> None:
|
||||
"""Dismiss suggested action."""
|
||||
if suggestion not in self._suggestions:
|
||||
_LOGGER.warning("The UUID %s is not valid suggestion", suggestion.uuid)
|
||||
raise ResolutionError()
|
||||
self._suggestions.remove(suggestion)
|
||||
|
||||
def dismiss_issue(self, issue: Issue) -> None:
|
||||
"""Dismiss suggested action."""
|
||||
if issue not in self._issues:
|
||||
_LOGGER.warning("The UUID %s is not a valid issue", issue.uuid)
|
||||
raise ResolutionError()
|
||||
self._issues.remove(issue)
|
||||
|
||||
def dismiss_unsupported(self, reason: Issue) -> None:
|
||||
"""Dismiss a reason for unsupported."""
|
||||
if reason not in self._unsupported:
|
||||
_LOGGER.warning("The reason %s is not active", reason)
|
||||
raise ResolutionError()
|
||||
self._unsupported.remove(reason)
|
||||
"""Resolution Supervisor module."""
|
||||
|
@ -3,7 +3,6 @@ import logging
|
||||
from typing import List
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import HassioError
|
||||
from .checks.base import CheckBase
|
||||
from .checks.free_space import CheckFreeSpace
|
||||
|
||||
@ -31,7 +30,7 @@ class ResolutionCheck(CoreSysAttributes):
|
||||
for test in self.all_tests:
|
||||
try:
|
||||
await test()
|
||||
except HassioError as err:
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Error during processing %s: %s", test.issue, err)
|
||||
self.sys_capture_exception(err)
|
||||
|
||||
|
@ -66,5 +66,5 @@ class SuggestionType(str, Enum):
|
||||
EXECUTE_REPAIR = "execute_repair"
|
||||
EXECUTE_RESET = "execute_reset"
|
||||
EXECUTE_RELOAD = "execute_reload"
|
||||
EXECUTE_REMOVE = "execute_remove"
|
||||
REGISTRY_LOGIN = "registry_login"
|
||||
NEW_INITIALIZE = "new_initialize"
|
||||
|
@ -1,8 +1,6 @@
|
||||
"""Helpers to evaluate the system."""
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
from supervisor.exceptions import HassioError
|
||||
from typing import List, Set
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .const import UnhealthyReason, UnsupportedReason
|
||||
@ -35,6 +33,8 @@ class ResolutionEvaluation(CoreSysAttributes):
|
||||
"""Initialize the evaluation class."""
|
||||
self.coresys = coresys
|
||||
|
||||
self.cached_images: Set[str] = set()
|
||||
|
||||
self._container = EvaluateContainer(coresys)
|
||||
self._dbus = EvaluateDbus(coresys)
|
||||
self._docker_configuration = EvaluateDockerConfiguration(coresys)
|
||||
@ -69,7 +69,7 @@ class ResolutionEvaluation(CoreSysAttributes):
|
||||
for evaluation in self.all_evalutions:
|
||||
try:
|
||||
await evaluation()
|
||||
except HassioError as err:
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning(
|
||||
"Error during processing %s: %s", evaluation.reason, err
|
||||
)
|
||||
|
@ -7,15 +7,14 @@ from requests import RequestException
|
||||
|
||||
from ...const import CoreState
|
||||
from ...coresys import CoreSys
|
||||
from ..const import UnsupportedReason
|
||||
from ..const import ContextType, IssueType, SuggestionType, UnsupportedReason
|
||||
from .base import EvaluateBase
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
DOCKER_IMAGE_DENYLIST = [
|
||||
"containrrr/watchtower",
|
||||
"pyouroboros/ouroboros",
|
||||
"v2tec/watchtower",
|
||||
"watchtower",
|
||||
"ouroboros",
|
||||
]
|
||||
|
||||
|
||||
@ -41,16 +40,24 @@ class EvaluateContainer(EvaluateBase):
|
||||
@property
|
||||
def states(self) -> List[CoreState]:
|
||||
"""Return a list of valid states when this evaluation can run."""
|
||||
return [CoreState.SETUP, CoreState.RUNNING]
|
||||
return [CoreState.SETUP, CoreState.RUNNING, CoreState.INITIALIZE]
|
||||
|
||||
async def evaluate(self) -> None:
|
||||
"""Run evaluation."""
|
||||
self.sys_resolution.evaluate.cached_images.clear()
|
||||
self._images.clear()
|
||||
|
||||
for image in await self.sys_run_in_executor(self._get_images):
|
||||
for tag in image.tags:
|
||||
image_name = tag.split(":")[0]
|
||||
self.sys_resolution.evaluate.cached_images.add(tag)
|
||||
|
||||
# Evalue system
|
||||
image_name = tag.partition(":")[0].split("/")[-1]
|
||||
if (
|
||||
image_name in DOCKER_IMAGE_DENYLIST
|
||||
any(
|
||||
image_name.startswith(deny_name)
|
||||
for deny_name in DOCKER_IMAGE_DENYLIST
|
||||
)
|
||||
and image_name not in self._images
|
||||
):
|
||||
self._images.add(image_name)
|
||||
@ -64,5 +71,10 @@ class EvaluateContainer(EvaluateBase):
|
||||
images = self.sys_docker.images.list()
|
||||
except (DockerException, RequestException) as err:
|
||||
_LOGGER.error("Corrupt docker overlayfs detect: %s", err)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_DOCKER,
|
||||
ContextType.SYSTEM,
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
|
||||
return images
|
||||
|
@ -2,13 +2,14 @@
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
from supervisor.exceptions import HassioError
|
||||
from supervisor.resolution.data import Suggestion
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .data import Suggestion
|
||||
from .fixups.base import FixupBase
|
||||
from .fixups.clear_full_snapshot import FixupClearFullSnapshot
|
||||
from .fixups.create_full_snapshot import FixupCreateFullSnapshot
|
||||
from .fixups.store_execute_reload import FixupStoreExecuteReload
|
||||
from .fixups.store_execute_remove import FixupStoreExecuteRemove
|
||||
from .fixups.store_execute_reset import FixupStoreExecuteReset
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -22,11 +23,23 @@ class ResolutionFixup(CoreSysAttributes):
|
||||
|
||||
self._create_full_snapshot = FixupCreateFullSnapshot(coresys)
|
||||
self._clear_full_snapshot = FixupClearFullSnapshot(coresys)
|
||||
self._store_execute_reset = FixupStoreExecuteReset(coresys)
|
||||
self._store_execute_reload = FixupStoreExecuteReload(coresys)
|
||||
self._store_execute_remove = FixupStoreExecuteRemove(coresys)
|
||||
|
||||
@property
|
||||
def all_fixes(self) -> List[FixupBase]:
|
||||
"""Return a list of all fixups."""
|
||||
return [self._create_full_snapshot, self._clear_full_snapshot]
|
||||
"""Return a list of all fixups.
|
||||
|
||||
Order can be important!
|
||||
"""
|
||||
return [
|
||||
self._create_full_snapshot,
|
||||
self._clear_full_snapshot,
|
||||
self._store_execute_reload,
|
||||
self._store_execute_reset,
|
||||
self._store_execute_remove,
|
||||
]
|
||||
|
||||
async def run_autofix(self) -> None:
|
||||
"""Run all startup fixes."""
|
||||
@ -37,7 +50,7 @@ class ResolutionFixup(CoreSysAttributes):
|
||||
continue
|
||||
try:
|
||||
await fix()
|
||||
except HassioError as err:
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Error during processing %s: %s", fix.suggestion, err)
|
||||
self.sys_capture_exception(err)
|
||||
|
||||
|
@ -1,11 +1,10 @@
|
||||
"""Baseclass for system fixup."""
|
||||
from abc import ABC, abstractmethod, abstractproperty
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
|
||||
from ...coresys import CoreSys, CoreSysAttributes
|
||||
from ...exceptions import ResolutionError, ResolutionFixupError
|
||||
from ...exceptions import ResolutionFixupError
|
||||
from ..const import ContextType, IssueType, SuggestionType
|
||||
from ..data import Issue, Suggestion
|
||||
|
||||
@ -42,13 +41,12 @@ class FixupBase(ABC, CoreSysAttributes):
|
||||
|
||||
self.sys_resolution.dismiss_suggestion(fixing_suggestion)
|
||||
|
||||
if self.issue is None:
|
||||
return
|
||||
|
||||
with suppress(ResolutionError):
|
||||
self.sys_resolution.dismiss_issue(
|
||||
Issue(self.issue, self.context, fixing_suggestion.reference)
|
||||
)
|
||||
# Cleanup issue
|
||||
for issue_type in self.issues:
|
||||
issue = Issue(issue_type, self.context, fixing_suggestion.reference)
|
||||
if issue not in self.sys_resolution.issues:
|
||||
continue
|
||||
self.sys_resolution.dismiss_issue(issue)
|
||||
|
||||
@abstractmethod
|
||||
async def process_fixup(self, reference: Optional[str] = None) -> None:
|
||||
@ -65,9 +63,9 @@ class FixupBase(ABC, CoreSysAttributes):
|
||||
"""Return a ContextType enum."""
|
||||
|
||||
@property
|
||||
def issue(self) -> Optional[IssueType]:
|
||||
"""Return a IssueType enum."""
|
||||
return None
|
||||
def issues(self) -> List[IssueType]:
|
||||
"""Return a IssueType enum list."""
|
||||
return []
|
||||
|
||||
@property
|
||||
def auto(self) -> bool:
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Helpers to check and fix issues with free space."""
|
||||
import logging
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
|
||||
from ...const import SNAPSHOT_FULL
|
||||
from ..const import MINIMUM_FULL_SNAPSHOTS, ContextType, IssueType, SuggestionType
|
||||
@ -36,6 +36,6 @@ class FixupClearFullSnapshot(FixupBase):
|
||||
return ContextType.SYSTEM
|
||||
|
||||
@property
|
||||
def issue(self) -> IssueType:
|
||||
"""Return a IssueType enum."""
|
||||
return IssueType.FREE_SPACE
|
||||
def issues(self) -> List[IssueType]:
|
||||
"""Return a IssueType enum list."""
|
||||
return [IssueType.FREE_SPACE]
|
||||
|
50
supervisor/resolution/fixups/store_execute_reload.py
Normal file
50
supervisor/resolution/fixups/store_execute_reload.py
Normal file
@ -0,0 +1,50 @@
|
||||
"""Helpers to check and fix issues with free space."""
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from supervisor.exceptions import ResolutionFixupError, StoreError, StoreNotFound
|
||||
|
||||
from ..const import ContextType, IssueType, SuggestionType
|
||||
from .base import FixupBase
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FixupStoreExecuteReload(FixupBase):
|
||||
"""Storage class for fixup."""
|
||||
|
||||
async def process_fixup(self, reference: Optional[str] = None) -> None:
|
||||
"""Initialize the fixup class."""
|
||||
_LOGGER.info("Reload Store: %s", reference)
|
||||
try:
|
||||
repository = self.sys_store.get(reference)
|
||||
except StoreNotFound:
|
||||
_LOGGER.warning("Can't find store %s for fixup", reference)
|
||||
return
|
||||
|
||||
# Load data again
|
||||
try:
|
||||
await repository.load()
|
||||
await repository.update()
|
||||
except StoreError:
|
||||
raise ResolutionFixupError() from None
|
||||
|
||||
@property
|
||||
def suggestion(self) -> SuggestionType:
|
||||
"""Return a SuggestionType enum."""
|
||||
return SuggestionType.EXECUTE_RELOAD
|
||||
|
||||
@property
|
||||
def context(self) -> ContextType:
|
||||
"""Return a ContextType enum."""
|
||||
return ContextType.STORE
|
||||
|
||||
@property
|
||||
def issues(self) -> List[IssueType]:
|
||||
"""Return a IssueType enum list."""
|
||||
return [IssueType.FATAL_ERROR]
|
||||
|
||||
@property
|
||||
def auto(self) -> bool:
|
||||
"""Return if a fixup can be apply as auto fix."""
|
||||
return True
|
54
supervisor/resolution/fixups/store_execute_remove.py
Normal file
54
supervisor/resolution/fixups/store_execute_remove.py
Normal file
@ -0,0 +1,54 @@
|
||||
"""Helpers to check and fix issues with free space."""
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from supervisor.exceptions import ResolutionFixupError, StoreError, StoreNotFound
|
||||
|
||||
from ..const import ContextType, IssueType, SuggestionType
|
||||
from .base import FixupBase
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FixupStoreExecuteRemove(FixupBase):
|
||||
"""Storage class for fixup."""
|
||||
|
||||
async def process_fixup(self, reference: Optional[str] = None) -> None:
|
||||
"""Initialize the fixup class."""
|
||||
_LOGGER.info("Remove invalid Store: %s", reference)
|
||||
try:
|
||||
repository = self.sys_store.get(reference)
|
||||
except StoreNotFound:
|
||||
_LOGGER.warning("Can't find store %s for fixup", reference)
|
||||
return
|
||||
|
||||
# Remove repository
|
||||
try:
|
||||
await repository.remove()
|
||||
except StoreError:
|
||||
raise ResolutionFixupError() from None
|
||||
else:
|
||||
self.sys_store.repositories.pop(repository.slug, None)
|
||||
|
||||
self.sys_config.drop_addon_repository(repository.source)
|
||||
self.sys_config.save_data()
|
||||
|
||||
@property
|
||||
def suggestion(self) -> SuggestionType:
|
||||
"""Return a SuggestionType enum."""
|
||||
return SuggestionType.EXECUTE_REMOVE
|
||||
|
||||
@property
|
||||
def context(self) -> ContextType:
|
||||
"""Return a ContextType enum."""
|
||||
return ContextType.STORE
|
||||
|
||||
@property
|
||||
def issues(self) -> List[IssueType]:
|
||||
"""Return a IssueType enum list."""
|
||||
return [IssueType.CORRUPT_REPOSITORY]
|
||||
|
||||
@property
|
||||
def auto(self) -> bool:
|
||||
"""Return if a fixup can be apply as auto fix."""
|
||||
return True
|
61
supervisor/resolution/fixups/store_execute_reset.py
Normal file
61
supervisor/resolution/fixups/store_execute_reset.py
Normal file
@ -0,0 +1,61 @@
|
||||
"""Helpers to check and fix issues with free space."""
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from ...exceptions import (
|
||||
ResolutionFixupError,
|
||||
ResolutionFixupJobError,
|
||||
StoreError,
|
||||
StoreNotFound,
|
||||
)
|
||||
from ...jobs.const import JobCondition
|
||||
from ...jobs.decorator import Job
|
||||
from ...utils import remove_folder
|
||||
from ..const import ContextType, IssueType, SuggestionType
|
||||
from .base import FixupBase
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FixupStoreExecuteReset(FixupBase):
|
||||
"""Storage class for fixup."""
|
||||
|
||||
@Job(
|
||||
conditions=[JobCondition.INTERNET_SYSTEM], on_condition=ResolutionFixupJobError
|
||||
)
|
||||
async def process_fixup(self, reference: Optional[str] = None) -> None:
|
||||
"""Initialize the fixup class."""
|
||||
_LOGGER.info("Reset corrupt Store: %s", reference)
|
||||
try:
|
||||
repository = self.sys_store.get(reference)
|
||||
except StoreNotFound:
|
||||
_LOGGER.warning("Can't find store %s for fixup", reference)
|
||||
return
|
||||
|
||||
await remove_folder(repository.git.path)
|
||||
|
||||
# Load data again
|
||||
try:
|
||||
await repository.load()
|
||||
except StoreError:
|
||||
raise ResolutionFixupError() from None
|
||||
|
||||
@property
|
||||
def suggestion(self) -> SuggestionType:
|
||||
"""Return a SuggestionType enum."""
|
||||
return SuggestionType.EXECUTE_RESET
|
||||
|
||||
@property
|
||||
def context(self) -> ContextType:
|
||||
"""Return a ContextType enum."""
|
||||
return ContextType.STORE
|
||||
|
||||
@property
|
||||
def issues(self) -> List[IssueType]:
|
||||
"""Return a IssueType enum list."""
|
||||
return [IssueType.CORRUPT_REPOSITORY, IssueType.FATAL_ERROR]
|
||||
|
||||
@property
|
||||
def auto(self) -> bool:
|
||||
"""Return if a fixup can be apply as auto fix."""
|
||||
return True
|
181
supervisor/resolution/module.py
Normal file
181
supervisor/resolution/module.py
Normal file
@ -0,0 +1,181 @@
|
||||
"""Supervisor resolution center."""
|
||||
from datetime import time
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import ResolutionError, ResolutionNotFound
|
||||
from .check import ResolutionCheck
|
||||
from .const import (
|
||||
SCHEDULED_HEALTHCHECK,
|
||||
ContextType,
|
||||
IssueType,
|
||||
SuggestionType,
|
||||
UnhealthyReason,
|
||||
UnsupportedReason,
|
||||
)
|
||||
from .data import Issue, Suggestion
|
||||
from .evaluate import ResolutionEvaluation
|
||||
from .fixup import ResolutionFixup
|
||||
from .notify import ResolutionNotify
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResolutionManager(CoreSysAttributes):
|
||||
"""Resolution manager for supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Resolution manager."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._evaluate = ResolutionEvaluation(coresys)
|
||||
self._check = ResolutionCheck(coresys)
|
||||
self._fixup = ResolutionFixup(coresys)
|
||||
self._notify = ResolutionNotify(coresys)
|
||||
|
||||
self._suggestions: List[Suggestion] = []
|
||||
self._issues: List[Issue] = []
|
||||
self._unsupported: List[UnsupportedReason] = []
|
||||
self._unhealthy: List[UnhealthyReason] = []
|
||||
|
||||
@property
|
||||
def evaluate(self) -> ResolutionEvaluation:
|
||||
"""Return the ResolutionEvaluation class."""
|
||||
return self._evaluate
|
||||
|
||||
@property
|
||||
def check(self) -> ResolutionCheck:
|
||||
"""Return the ResolutionCheck class."""
|
||||
return self._check
|
||||
|
||||
@property
|
||||
def fixup(self) -> ResolutionFixup:
|
||||
"""Return the ResolutionFixup class."""
|
||||
return self._fixup
|
||||
|
||||
@property
|
||||
def notify(self) -> ResolutionNotify:
|
||||
"""Return the ResolutionNotify class."""
|
||||
return self._notify
|
||||
|
||||
@property
|
||||
def issues(self) -> List[Issue]:
|
||||
"""Return a list of issues."""
|
||||
return self._issues
|
||||
|
||||
@issues.setter
|
||||
def issues(self, issue: Issue) -> None:
|
||||
"""Add issues."""
|
||||
if issue not in self._issues:
|
||||
self._issues.append(issue)
|
||||
|
||||
@property
|
||||
def suggestions(self) -> List[Suggestion]:
|
||||
"""Return a list of suggestions that can handled."""
|
||||
return self._suggestions
|
||||
|
||||
@suggestions.setter
|
||||
def suggestions(self, suggestion: Suggestion) -> None:
|
||||
"""Add suggestion."""
|
||||
if suggestion not in self._suggestions:
|
||||
self._suggestions.append(suggestion)
|
||||
|
||||
@property
|
||||
def unsupported(self) -> List[UnsupportedReason]:
|
||||
"""Return a list of unsupported reasons."""
|
||||
return self._unsupported
|
||||
|
||||
@unsupported.setter
|
||||
def unsupported(self, reason: UnsupportedReason) -> None:
|
||||
"""Add a reason for unsupported."""
|
||||
if reason not in self._unsupported:
|
||||
self._unsupported.append(reason)
|
||||
|
||||
@property
|
||||
def unhealthy(self) -> List[UnhealthyReason]:
|
||||
"""Return a list of unsupported reasons."""
|
||||
return self._unhealthy
|
||||
|
||||
@unhealthy.setter
|
||||
def unhealthy(self, reason: UnhealthyReason) -> None:
|
||||
"""Add a reason for unsupported."""
|
||||
if reason not in self._unhealthy:
|
||||
self._unhealthy.append(reason)
|
||||
|
||||
def get_suggestion(self, uuid: str) -> Suggestion:
|
||||
"""Return suggestion with uuid."""
|
||||
for suggestion in self._suggestions:
|
||||
if suggestion.uuid != uuid:
|
||||
continue
|
||||
return suggestion
|
||||
raise ResolutionNotFound()
|
||||
|
||||
def get_issue(self, uuid: str) -> Issue:
|
||||
"""Return issue with uuid."""
|
||||
for issue in self._issues:
|
||||
if issue.uuid != uuid:
|
||||
continue
|
||||
return issue
|
||||
raise ResolutionNotFound()
|
||||
|
||||
def create_issue(
|
||||
self,
|
||||
issue: IssueType,
|
||||
context: ContextType,
|
||||
reference: Optional[str] = None,
|
||||
suggestions: Optional[List[SuggestionType]] = None,
|
||||
) -> None:
|
||||
"""Create issues and suggestion."""
|
||||
self.issues = Issue(issue, context, reference)
|
||||
if not suggestions:
|
||||
return
|
||||
|
||||
# Add suggestions
|
||||
for suggestion in suggestions:
|
||||
self.suggestions = Suggestion(suggestion, context, reference)
|
||||
|
||||
async def load(self):
|
||||
"""Load the resoulution manager."""
|
||||
# Initial healthcheck when the manager is loaded
|
||||
await self.healthcheck()
|
||||
|
||||
# Schedule the healthcheck
|
||||
self.sys_scheduler.register_task(self.healthcheck, SCHEDULED_HEALTHCHECK)
|
||||
self.sys_scheduler.register_task(self.fixup.run_autofix, time(hour=2))
|
||||
|
||||
async def healthcheck(self):
|
||||
"""Scheduled task to check for known issues."""
|
||||
await self.check.check_system()
|
||||
|
||||
# Create notification for any known issues
|
||||
await self.notify.issue_notifications()
|
||||
|
||||
async def apply_suggestion(self, suggestion: Suggestion) -> None:
|
||||
"""Apply suggested action."""
|
||||
if suggestion not in self._suggestions:
|
||||
_LOGGER.warning("Suggestion %s is not valid", suggestion.uuid)
|
||||
raise ResolutionError()
|
||||
|
||||
await self.fixup.apply_fixup(suggestion)
|
||||
await self.healthcheck()
|
||||
|
||||
def dismiss_suggestion(self, suggestion: Suggestion) -> None:
|
||||
"""Dismiss suggested action."""
|
||||
if suggestion not in self._suggestions:
|
||||
_LOGGER.warning("The UUID %s is not valid suggestion", suggestion.uuid)
|
||||
raise ResolutionError()
|
||||
self._suggestions.remove(suggestion)
|
||||
|
||||
def dismiss_issue(self, issue: Issue) -> None:
|
||||
"""Dismiss suggested action."""
|
||||
if issue not in self._issues:
|
||||
_LOGGER.warning("The UUID %s is not a valid issue", issue.uuid)
|
||||
raise ResolutionError()
|
||||
self._issues.remove(issue)
|
||||
|
||||
def dismiss_unsupported(self, reason: Issue) -> None:
|
||||
"""Dismiss a reason for unsupported."""
|
||||
if reason not in self._unsupported:
|
||||
_LOGGER.warning("The reason %s is not active", reason)
|
||||
raise ResolutionError()
|
||||
self._unsupported.remove(reason)
|
@ -122,7 +122,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
self.snapshots_obj[snapshot.slug] = snapshot
|
||||
return snapshot
|
||||
|
||||
@Job(conditions=[JobCondition.FREE_SPACE])
|
||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING])
|
||||
async def do_snapshot_full(self, name="", password=None):
|
||||
"""Create a full snapshot."""
|
||||
if self.lock.locked():
|
||||
@ -144,9 +144,9 @@ class SnapshotManager(CoreSysAttributes):
|
||||
_LOGGER.info("Snapshotting %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders()
|
||||
|
||||
except Exception as excep: # pylint: disable=broad-except
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Snapshot %s error", snapshot.slug)
|
||||
print(excep)
|
||||
self.sys_capture_exception(err)
|
||||
return None
|
||||
|
||||
else:
|
||||
@ -158,7 +158,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
self.sys_core.state = CoreState.RUNNING
|
||||
self.lock.release()
|
||||
|
||||
@Job(conditions=[JobCondition.FREE_SPACE])
|
||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING])
|
||||
async def do_snapshot_partial(
|
||||
self, name="", addons=None, folders=None, password=None
|
||||
):
|
||||
@ -195,8 +195,9 @@ class SnapshotManager(CoreSysAttributes):
|
||||
_LOGGER.info("Snapshotting %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders(folders)
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Snapshot %s error", snapshot.slug)
|
||||
self.sys_capture_exception(err)
|
||||
return None
|
||||
|
||||
else:
|
||||
@ -216,6 +217,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
JobCondition.HEALTHY,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.INTERNET_SYSTEM,
|
||||
JobCondition.RUNNING,
|
||||
]
|
||||
)
|
||||
async def do_restore_full(self, snapshot, password=None):
|
||||
@ -282,8 +284,9 @@ class SnapshotManager(CoreSysAttributes):
|
||||
await task_hass
|
||||
await self.sys_homeassistant.core.start()
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||
self.sys_capture_exception(err)
|
||||
return False
|
||||
|
||||
else:
|
||||
@ -300,6 +303,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
JobCondition.HEALTHY,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.INTERNET_SYSTEM,
|
||||
JobCondition.RUNNING,
|
||||
]
|
||||
)
|
||||
async def do_restore_partial(
|
||||
@ -368,8 +372,9 @@ class SnapshotManager(CoreSysAttributes):
|
||||
_LOGGER.warning("Need restart HomeAssistant for API")
|
||||
await self.sys_homeassistant.core.restart()
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||
self.sys_capture_exception(err)
|
||||
return False
|
||||
|
||||
else:
|
||||
|
@ -1,25 +1,20 @@
|
||||
"""Add-on Store handler."""
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.store.validate import SCHEMA_REPOSITORY_CONFIG
|
||||
from supervisor.utils.json import read_json_file
|
||||
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import JsonFileError, StoreGitError
|
||||
from ..exceptions import StoreGitError, StoreJobError, StoreNotFound
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from .addon import AddonStore
|
||||
from .const import StoreType
|
||||
from .data import StoreData
|
||||
from .repository import Repository
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = {REPOSITORY_CORE, REPOSITORY_LOCAL}
|
||||
BUILTIN_REPOSITORIES = {StoreType.CORE.value, StoreType.LOCAL.value}
|
||||
|
||||
|
||||
class StoreManager(CoreSysAttributes):
|
||||
@ -36,6 +31,20 @@ class StoreManager(CoreSysAttributes):
|
||||
"""Return list of add-on repositories."""
|
||||
return list(self.repositories.values())
|
||||
|
||||
def get(self, slug: str) -> Repository:
|
||||
"""Return Repository with slug."""
|
||||
if slug not in self.repositories:
|
||||
raise StoreNotFound()
|
||||
return self.repositories[slug]
|
||||
|
||||
def get_from_url(self, url: str) -> Repository:
|
||||
"""Return Repository with slug."""
|
||||
for repository in self.all:
|
||||
if repository.source != url:
|
||||
continue
|
||||
return repository
|
||||
raise StoreNotFound()
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
self.data.update()
|
||||
@ -48,7 +57,7 @@ class StoreManager(CoreSysAttributes):
|
||||
|
||||
async def reload(self) -> None:
|
||||
"""Update add-ons from repository and reload list."""
|
||||
tasks = [repository.update() for repository in self.repositories.values()]
|
||||
tasks = [repository.update() for repository in self.all]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
@ -61,35 +70,41 @@ class StoreManager(CoreSysAttributes):
|
||||
"""Add a new custom repository."""
|
||||
job = self.sys_jobs.get_job("storemanager_update_repositories")
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories)
|
||||
old_rep = {repository.source for repository in self.all}
|
||||
|
||||
# add new repository
|
||||
async def _add_repository(url: str, step: int):
|
||||
"""Add a repository."""
|
||||
job.update(progress=job.progress + step, stage=f"Checking {url} started")
|
||||
repository = Repository(self.coresys, url)
|
||||
|
||||
# Load the repository
|
||||
try:
|
||||
await repository.load()
|
||||
except StoreGitError:
|
||||
_LOGGER.error("Can't load data from repository %s", url)
|
||||
return
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
# Verify that it is a add-on repository
|
||||
repository_file = Path(repository.git.path, "repository.json")
|
||||
try:
|
||||
await self.sys_run_in_executor(
|
||||
SCHEMA_REPOSITORY_CONFIG, read_json_file(repository_file)
|
||||
except StoreJobError:
|
||||
_LOGGER.warning("Skip update to later for %s", repository.slug)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.FATAL_ERROR,
|
||||
ContextType.STORE,
|
||||
refrence=repository.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_RELOAD],
|
||||
)
|
||||
else:
|
||||
if not repository.validate():
|
||||
_LOGGER.error("%s is not a valid add-on repository", url)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_REPOSITORY,
|
||||
ContextType.STORE,
|
||||
reference=repository.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REMOVE],
|
||||
)
|
||||
except (JsonFileError, vol.Invalid) as err:
|
||||
_LOGGER.error("%s is not a valid add-on repository. %s", url, err)
|
||||
await repository.remove()
|
||||
return
|
||||
|
||||
self.sys_config.add_addon_repository(url)
|
||||
|
||||
self.repositories[url] = repository
|
||||
# Add Repository to list
|
||||
if repository.type == StoreType.GIT:
|
||||
self.sys_config.add_addon_repository(repository.source)
|
||||
self.repositories[repository.slug] = repository
|
||||
|
||||
job.update(progress=10, stage="Check repositories")
|
||||
repos = new_rep - old_rep
|
||||
@ -97,9 +112,10 @@ class StoreManager(CoreSysAttributes):
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# del new repository
|
||||
# Delete stale repositories
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
await self.repositories.pop(url).remove()
|
||||
repository = self.get_from_url(url)
|
||||
await self.repositories.pop(repository.slug).remove()
|
||||
self.sys_config.drop_addon_repository(url)
|
||||
|
||||
# update data
|
||||
|
10
supervisor/store/const.py
Normal file
10
supervisor/store/const.py
Normal file
@ -0,0 +1,10 @@
|
||||
"""Constants for the add-on store."""
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class StoreType(str, Enum):
|
||||
"""Store Types."""
|
||||
|
||||
CORE = "core"
|
||||
LOCAL = "local"
|
||||
GIT = "git"
|
@ -16,7 +16,7 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import JsonFileError
|
||||
from ..resolution.const import ContextType, IssueType
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..utils.json import read_json_file
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import SCHEMA_REPOSITORY_CONFIG
|
||||
@ -33,7 +33,7 @@ class StoreData(CoreSysAttributes):
|
||||
self.repositories: Dict[str, Any] = {}
|
||||
self.addons: Dict[str, Any] = {}
|
||||
|
||||
def update(self):
|
||||
def update(self) -> None:
|
||||
"""Read data from add-on repository."""
|
||||
self.repositories.clear()
|
||||
self.addons.clear()
|
||||
@ -52,7 +52,7 @@ class StoreData(CoreSysAttributes):
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
def _read_git_repository(self, path: Path) -> None:
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
|
||||
@ -73,7 +73,7 @@ class StoreData(CoreSysAttributes):
|
||||
self.repositories[slug] = repository_info
|
||||
self._read_addons_folder(path, slug)
|
||||
|
||||
def _read_addons_folder(self, path, repository):
|
||||
def _read_addons_folder(self, path: Path, repository: Dict) -> None:
|
||||
"""Read data from add-ons folder."""
|
||||
try:
|
||||
# Generate a list without artefact, safe for corruptions
|
||||
@ -84,12 +84,14 @@ class StoreData(CoreSysAttributes):
|
||||
]
|
||||
except OSError as err:
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_REPOSITORY, ContextType.SYSTEM
|
||||
IssueType.CORRUPT_REPOSITORY,
|
||||
ContextType.STORE,
|
||||
reference=path.stem,
|
||||
suggestions=[SuggestionType.EXECUTE_RESET],
|
||||
)
|
||||
_LOGGER.critical(
|
||||
"Can't process %s because of Filesystem issues: %s", repository, err
|
||||
)
|
||||
self.sys_capture_exception(err)
|
||||
return
|
||||
|
||||
for addon in addon_list:
|
||||
|
@ -3,16 +3,16 @@ import asyncio
|
||||
import functools as ft
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
from typing import Dict, Optional
|
||||
|
||||
import git
|
||||
|
||||
from ..const import ATTR_BRANCH, ATTR_URL, URL_HASSIO_ADDONS
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import StoreGitError
|
||||
from ..exceptions import StoreGitError, StoreJobError
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..utils import remove_folder
|
||||
from ..validate import RE_REPOSITORY
|
||||
from .utils import get_hash_from_repository
|
||||
|
||||
@ -22,6 +22,8 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
class GitRepo(CoreSysAttributes):
|
||||
"""Manage Add-on Git repository."""
|
||||
|
||||
builtin: bool
|
||||
|
||||
def __init__(self, coresys: CoreSys, path: Path, url: str):
|
||||
"""Initialize Git base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
@ -30,7 +32,6 @@ class GitRepo(CoreSysAttributes):
|
||||
self.lock: asyncio.Lock = asyncio.Lock()
|
||||
|
||||
self.data: Dict[str, str] = RE_REPOSITORY.match(url).groupdict()
|
||||
self.slug: str = url
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
@ -59,11 +60,12 @@ class GitRepo(CoreSysAttributes):
|
||||
git.NoSuchPathError,
|
||||
git.GitCommandError,
|
||||
) as err:
|
||||
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
|
||||
_LOGGER.error("Can't load %s", self.path)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.FATAL_ERROR,
|
||||
ContextType.STORE,
|
||||
reference=self.slug,
|
||||
reference=self.path.stem,
|
||||
suggestions=[SuggestionType.EXECUTE_RESET],
|
||||
)
|
||||
raise StoreGitError() from err
|
||||
|
||||
@ -77,12 +79,15 @@ class GitRepo(CoreSysAttributes):
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_REPOSITORY,
|
||||
ContextType.STORE,
|
||||
reference=self.slug,
|
||||
reference=self.path.stem,
|
||||
suggestions=[SuggestionType.EXECUTE_RESET],
|
||||
)
|
||||
raise StoreGitError() from err
|
||||
|
||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_SYSTEM])
|
||||
@Job(
|
||||
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_SYSTEM],
|
||||
on_condition=StoreJobError,
|
||||
)
|
||||
async def clone(self) -> None:
|
||||
"""Clone git add-on repository."""
|
||||
async with self.lock:
|
||||
@ -114,12 +119,19 @@ class GitRepo(CoreSysAttributes):
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.FATAL_ERROR,
|
||||
ContextType.STORE,
|
||||
reference=self.slug,
|
||||
suggestions=[SuggestionType.NEW_INITIALIZE],
|
||||
reference=self.path.stem,
|
||||
suggestions=[
|
||||
SuggestionType.EXECUTE_RELOAD
|
||||
if self.builtin
|
||||
else SuggestionType.EXECUTE_REMOVE
|
||||
],
|
||||
)
|
||||
raise StoreGitError() from err
|
||||
|
||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_SYSTEM])
|
||||
@Job(
|
||||
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_SYSTEM],
|
||||
on_condition=StoreJobError,
|
||||
)
|
||||
async def pull(self):
|
||||
"""Pull Git add-on repo."""
|
||||
if self.lock.locked():
|
||||
@ -156,8 +168,8 @@ class GitRepo(CoreSysAttributes):
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_REPOSITORY,
|
||||
ContextType.STORE,
|
||||
reference=self.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_RELOAD],
|
||||
reference=self.path.stem,
|
||||
suggestions=[SuggestionType.EXECUTE_RESET],
|
||||
)
|
||||
raise StoreGitError() from err
|
||||
|
||||
@ -169,19 +181,14 @@ class GitRepo(CoreSysAttributes):
|
||||
|
||||
if not self.path.is_dir():
|
||||
return
|
||||
|
||||
def log_err(funct, path, _):
|
||||
"""Log error."""
|
||||
_LOGGER.warning("Can't remove %s", path)
|
||||
|
||||
await self.sys_run_in_executor(
|
||||
ft.partial(shutil.rmtree, self.path, onerror=log_err)
|
||||
)
|
||||
await remove_folder(self.path)
|
||||
|
||||
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""Supervisor add-ons repository."""
|
||||
|
||||
builtin: bool = False
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Git Supervisor add-on repository."""
|
||||
super().__init__(coresys, coresys.config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
@ -190,6 +197,8 @@ class GitRepoHassIO(GitRepo):
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom add-ons repository."""
|
||||
|
||||
builtin: bool = False
|
||||
|
||||
def __init__(self, coresys, url):
|
||||
"""Initialize custom Git Supervisor addo-n repository."""
|
||||
path = Path(coresys.config.path_addons_git, get_hash_from_repository(url))
|
||||
|
@ -1,75 +1,113 @@
|
||||
"""Represent a Supervisor repository."""
|
||||
from ..const import (
|
||||
ATTR_MAINTAINER,
|
||||
ATTR_NAME,
|
||||
ATTR_URL,
|
||||
REPOSITORY_CORE,
|
||||
REPOSITORY_LOCAL,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import ATTR_MAINTAINER, ATTR_NAME, ATTR_URL
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import JsonFileError, StoreError
|
||||
from ..utils.json import read_json_file
|
||||
from .const import StoreType
|
||||
from .git import GitRepoCustom, GitRepoHassIO
|
||||
from .utils import get_hash_from_repository
|
||||
from .validate import SCHEMA_REPOSITORY_CONFIG
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
class Repository(CoreSysAttributes):
|
||||
"""Repository in Supervisor."""
|
||||
|
||||
slug: str = None
|
||||
|
||||
def __init__(self, coresys, repository):
|
||||
def __init__(self, coresys: CoreSys, repository: str):
|
||||
"""Initialize repository object."""
|
||||
self.coresys = coresys
|
||||
self.source = None
|
||||
self.git = None
|
||||
self.coresys: CoreSys = coresys
|
||||
self.git: Optional[str] = None
|
||||
|
||||
if repository == REPOSITORY_LOCAL:
|
||||
self.slug = repository
|
||||
elif repository == REPOSITORY_CORE:
|
||||
self.slug = repository
|
||||
self.source: str = repository
|
||||
if repository == StoreType.LOCAL:
|
||||
self._slug = repository
|
||||
self._type = StoreType.LOCAL
|
||||
elif repository == StoreType.CORE:
|
||||
self.git = GitRepoHassIO(coresys)
|
||||
self._slug = repository
|
||||
self._type = StoreType.CORE
|
||||
else:
|
||||
self.slug = get_hash_from_repository(repository)
|
||||
self.git = GitRepoCustom(coresys, repository)
|
||||
self.source = repository
|
||||
self._slug = get_hash_from_repository(repository)
|
||||
self._type = StoreType.GIT
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return internal representation."""
|
||||
return f"<Store.Repository: {self.slug} / {self.source}>"
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
def slug(self) -> str:
|
||||
"""Return repo slug."""
|
||||
return self._slug
|
||||
|
||||
@property
|
||||
def type(self) -> StoreType:
|
||||
"""Return type of the store."""
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def data(self) -> Dict:
|
||||
"""Return data struct repository."""
|
||||
return self.sys_store.data.repositories.get(self.slug, {})
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return name of repository."""
|
||||
return self.data.get(ATTR_NAME, UNKNOWN)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
def url(self) -> str:
|
||||
"""Return URL of repository."""
|
||||
return self.data.get(ATTR_URL, self.source)
|
||||
|
||||
@property
|
||||
def maintainer(self):
|
||||
def maintainer(self) -> str:
|
||||
"""Return url of repository."""
|
||||
return self.data.get(ATTR_MAINTAINER, UNKNOWN)
|
||||
|
||||
async def load(self):
|
||||
def validate(self) -> bool:
|
||||
"""Check if store is valid."""
|
||||
if self.type != StoreType.GIT:
|
||||
return True
|
||||
|
||||
# If exists?
|
||||
repository_file = Path(self.git.path, "repository.json")
|
||||
if not repository_file.exists():
|
||||
return False
|
||||
|
||||
# If valid?
|
||||
try:
|
||||
SCHEMA_REPOSITORY_CONFIG(read_json_file(repository_file))
|
||||
except (JsonFileError, vol.Invalid):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load addon repository."""
|
||||
if not self.git:
|
||||
return
|
||||
await self.git.load()
|
||||
|
||||
async def update(self):
|
||||
async def update(self) -> None:
|
||||
"""Update add-on repository."""
|
||||
if not self.git:
|
||||
if self.type == StoreType.LOCAL or not self.validate():
|
||||
return
|
||||
await self.git.pull()
|
||||
|
||||
async def remove(self):
|
||||
async def remove(self) -> None:
|
||||
"""Remove add-on repository."""
|
||||
if self.slug in (REPOSITORY_CORE, REPOSITORY_LOCAL):
|
||||
raise APIError("Can't remove built-in repositories!")
|
||||
if self.type != StoreType.GIT:
|
||||
_LOGGER.error("Can't remove built-in repositories!")
|
||||
raise StoreError()
|
||||
|
||||
await self.git.remove()
|
||||
|
@ -21,6 +21,7 @@ from .exceptions import (
|
||||
DockerError,
|
||||
HostAppArmorError,
|
||||
SupervisorError,
|
||||
SupervisorJobError,
|
||||
SupervisorUpdateError,
|
||||
)
|
||||
from .resolution.const import ContextType, IssueType
|
||||
@ -137,6 +138,7 @@ class Supervisor(CoreSysAttributes):
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.UPDATE_FAILED, ContextType.SUPERVISOR
|
||||
)
|
||||
self.sys_capture_exception(err)
|
||||
raise SupervisorUpdateError() from err
|
||||
else:
|
||||
self.sys_config.version = version
|
||||
@ -146,7 +148,7 @@ class Supervisor(CoreSysAttributes):
|
||||
await self.update_apparmor()
|
||||
self.sys_create_task(self.sys_core.stop())
|
||||
|
||||
@Job(conditions=[JobCondition.RUNNING])
|
||||
@Job(conditions=[JobCondition.RUNNING], on_condition=SupervisorJobError)
|
||||
async def restart(self) -> None:
|
||||
"""Restart Supervisor soft."""
|
||||
self.sys_core.exit_code = 100
|
||||
|
@ -18,13 +18,14 @@ from .const import (
|
||||
ATTR_IMAGE,
|
||||
ATTR_MULTICAST,
|
||||
ATTR_OBSERVER,
|
||||
ATTR_OTA,
|
||||
ATTR_SUPERVISOR,
|
||||
FILE_HASSIO_UPDATER,
|
||||
URL_HASSIO_VERSION,
|
||||
UpdateChannel,
|
||||
)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .exceptions import HassioUpdaterError
|
||||
from .exceptions import UpdaterError, UpdaterJobError
|
||||
from .jobs.decorator import Job, JobCondition
|
||||
from .utils import AsyncThrottle
|
||||
from .utils.json import JsonConfig
|
||||
@ -43,12 +44,12 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Update internal data."""
|
||||
with suppress(HassioUpdaterError):
|
||||
with suppress(UpdaterError):
|
||||
await self.fetch_data()
|
||||
|
||||
async def reload(self) -> None:
|
||||
"""Update internal data."""
|
||||
with suppress(HassioUpdaterError):
|
||||
with suppress(UpdaterError):
|
||||
await self.fetch_data()
|
||||
|
||||
@property
|
||||
@ -93,7 +94,7 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def image_homeassistant(self) -> Optional[str]:
|
||||
"""Return latest version of Home Assistant."""
|
||||
"""Return image of Home Assistant docker."""
|
||||
if ATTR_HOMEASSISTANT not in self._data[ATTR_IMAGE]:
|
||||
return None
|
||||
return self._data[ATTR_IMAGE][ATTR_HOMEASSISTANT].format(
|
||||
@ -102,7 +103,7 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def image_supervisor(self) -> Optional[str]:
|
||||
"""Return latest version of Supervisor."""
|
||||
"""Return image of Supervisor docker."""
|
||||
if ATTR_SUPERVISOR not in self._data[ATTR_IMAGE]:
|
||||
return None
|
||||
return self._data[ATTR_IMAGE][ATTR_SUPERVISOR].format(
|
||||
@ -111,28 +112,28 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def image_cli(self) -> Optional[str]:
|
||||
"""Return latest version of CLI."""
|
||||
"""Return image of CLI docker."""
|
||||
if ATTR_CLI not in self._data[ATTR_IMAGE]:
|
||||
return None
|
||||
return self._data[ATTR_IMAGE][ATTR_CLI].format(arch=self.sys_arch.supervisor)
|
||||
|
||||
@property
|
||||
def image_dns(self) -> Optional[str]:
|
||||
"""Return latest version of DNS."""
|
||||
"""Return image of DNS docker."""
|
||||
if ATTR_DNS not in self._data[ATTR_IMAGE]:
|
||||
return None
|
||||
return self._data[ATTR_IMAGE][ATTR_DNS].format(arch=self.sys_arch.supervisor)
|
||||
|
||||
@property
|
||||
def image_audio(self) -> Optional[str]:
|
||||
"""Return latest version of Audio."""
|
||||
"""Return image of Audio docker."""
|
||||
if ATTR_AUDIO not in self._data[ATTR_IMAGE]:
|
||||
return None
|
||||
return self._data[ATTR_IMAGE][ATTR_AUDIO].format(arch=self.sys_arch.supervisor)
|
||||
|
||||
@property
|
||||
def image_observer(self) -> Optional[str]:
|
||||
"""Return latest version of Observer."""
|
||||
"""Return image of Observer docker."""
|
||||
if ATTR_OBSERVER not in self._data[ATTR_IMAGE]:
|
||||
return None
|
||||
return self._data[ATTR_IMAGE][ATTR_OBSERVER].format(
|
||||
@ -141,13 +142,18 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def image_multicast(self) -> Optional[str]:
|
||||
"""Return latest version of Multicast."""
|
||||
"""Return image of Multicast docker."""
|
||||
if ATTR_MULTICAST not in self._data[ATTR_IMAGE]:
|
||||
return None
|
||||
return self._data[ATTR_IMAGE][ATTR_MULTICAST].format(
|
||||
arch=self.sys_arch.supervisor
|
||||
)
|
||||
|
||||
@property
|
||||
def ota_url(self) -> Optional[str]:
|
||||
"""Return OTA url for OS."""
|
||||
return self._data.get(ATTR_OTA)
|
||||
|
||||
@property
|
||||
def channel(self) -> UpdateChannel:
|
||||
"""Return upstream channel of Supervisor instance."""
|
||||
@ -159,7 +165,10 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
self._data[ATTR_CHANNEL] = value
|
||||
|
||||
@AsyncThrottle(timedelta(seconds=30))
|
||||
@Job(conditions=[JobCondition.INTERNET_SYSTEM])
|
||||
@Job(
|
||||
conditions=[JobCondition.INTERNET_SYSTEM],
|
||||
on_condition=UpdaterJobError,
|
||||
)
|
||||
async def fetch_data(self):
|
||||
"""Fetch current versions from Github.
|
||||
|
||||
@ -175,16 +184,16 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||
raise HassioUpdaterError() from err
|
||||
raise UpdaterError() from err
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
|
||||
raise HassioUpdaterError() from err
|
||||
raise UpdaterError() from err
|
||||
|
||||
# data valid?
|
||||
if not data or data.get(ATTR_CHANNEL) != self.channel:
|
||||
_LOGGER.warning("Invalid data from %s", url)
|
||||
raise HassioUpdaterError()
|
||||
raise UpdaterError()
|
||||
|
||||
try:
|
||||
# Update supervisor version
|
||||
@ -196,6 +205,7 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
# Update HassOS version
|
||||
if self.sys_hassos.board:
|
||||
self._data[ATTR_HASSOS] = data["hassos"][self.sys_hassos.board]
|
||||
self._data[ATTR_OTA] = data["ota"]
|
||||
|
||||
# Update Home Assistant plugins
|
||||
self._data[ATTR_CLI] = data["cli"]
|
||||
@ -215,7 +225,7 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
|
||||
except KeyError as err:
|
||||
_LOGGER.warning("Can't process version data: %s", err)
|
||||
raise HassioUpdaterError() from err
|
||||
raise UpdaterError() from err
|
||||
|
||||
else:
|
||||
self.save_data()
|
||||
|
@ -3,6 +3,7 @@ import asyncio
|
||||
from datetime import datetime
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import socket
|
||||
from typing import Any, Optional
|
||||
@ -132,3 +133,26 @@ def get_message_from_exception_chain(err: Exception) -> str:
|
||||
return ""
|
||||
|
||||
return get_message_from_exception_chain(err.__context__)
|
||||
|
||||
|
||||
async def remove_folder(folder: Path, content_only: bool = False) -> None:
|
||||
"""Remove folder and reset privileged.
|
||||
|
||||
Is needed to avoid issue with:
|
||||
- CAP_DAC_OVERRIDE
|
||||
- CAP_DAC_READ_SEARCH
|
||||
"""
|
||||
del_folder = f"{folder}" + "/{,.[!.],..?}*" if content_only else f"{folder}"
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"bash", "-c", f"rm -rf {del_folder}", stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
else:
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't remove folder %s: %s", folder, error_msg)
|
||||
|
@ -67,7 +67,7 @@ class JsonConfig:
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
_LOGGER.critical(
|
||||
"Can't parse %s: %s", self._file, humanize_error(self._data, ex)
|
||||
)
|
||||
|
||||
@ -81,7 +81,7 @@ class JsonConfig:
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse data: %s", humanize_error(self._data, ex))
|
||||
_LOGGER.critical("Can't parse data: %s", humanize_error(self._data, ex))
|
||||
|
||||
# Load last valid data
|
||||
_LOGGER.warning("Resetting %s to last version", self._file)
|
||||
|
@ -27,6 +27,7 @@ from .const import (
|
||||
ATTR_LOGGING,
|
||||
ATTR_MULTICAST,
|
||||
ATTR_OBSERVER,
|
||||
ATTR_OTA,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_PORTS,
|
||||
@ -135,6 +136,7 @@ SCHEMA_HASS_CONFIG = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_UPDATER_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CHANNEL, default=UpdateChannel.STABLE): vol.Coerce(
|
||||
@ -160,6 +162,7 @@ SCHEMA_UPDATER_CONFIG = vol.Schema(
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
),
|
||||
vol.Optional(ATTR_OTA): vol.Url(),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
@ -26,8 +26,18 @@ async def test_api_network_info(api_client, coresys):
|
||||
assert interface["ipv4"]["gateway"] == "192.168.2.1"
|
||||
if interface["interface"] == TEST_INTERFACE_WLAN:
|
||||
assert not interface["primary"]
|
||||
assert interface["ipv4"] is None
|
||||
assert interface["ipv6"] is None
|
||||
assert interface["ipv4"] == {
|
||||
"address": [],
|
||||
"gateway": None,
|
||||
"method": "disabled",
|
||||
"nameservers": [],
|
||||
}
|
||||
assert interface["ipv6"] == {
|
||||
"address": [],
|
||||
"gateway": None,
|
||||
"method": "disabled",
|
||||
"nameservers": [],
|
||||
}
|
||||
|
||||
assert result["data"]["docker"]["interface"] == DOCKER_NETWORK
|
||||
assert result["data"]["docker"]["address"] == str(DOCKER_NETWORK_MASK)
|
||||
@ -175,3 +185,12 @@ async def test_api_network_wireless_scan(api_client):
|
||||
ap["ssid"] for ap in result["data"]["accesspoints"]
|
||||
]
|
||||
assert [47, 63] == [ap["signal"] for ap in result["data"]["accesspoints"]]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_network_reload(api_client, coresys):
|
||||
"""Test network manager reload api."""
|
||||
resp = await api_client.post("/network/reload")
|
||||
result = await resp.json()
|
||||
|
||||
assert result["result"] == "ok"
|
||||
|
@ -182,7 +182,6 @@ async def api_client(aiohttp_client, coresys: CoreSys):
|
||||
def store_manager(coresys: CoreSys):
|
||||
"""Fixture for the store manager."""
|
||||
sm_obj = coresys.store
|
||||
sm_obj.repositories = set(coresys.config.addons_repositories)
|
||||
with patch("supervisor.store.data.StoreData.update", return_value=MagicMock()):
|
||||
yield sm_obj
|
||||
|
||||
|
@ -1,12 +1,29 @@
|
||||
"""Test NetwrokInterface."""
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
from supervisor.dbus.network import NetworkManager
|
||||
from supervisor.exceptions import HostNotSupportedError
|
||||
|
||||
from tests.const import TEST_INTERFACE
|
||||
|
||||
# pylint: disable=protected-access
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_network_manager(network_manager: NetworkManager):
|
||||
"""Test network manager update."""
|
||||
assert TEST_INTERFACE in network_manager.interfaces
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_network_manager_version(network_manager: NetworkManager):
|
||||
"""Test if version validate work."""
|
||||
await network_manager._validate_version()
|
||||
assert network_manager.version == "1.22.10"
|
||||
|
||||
network_manager.dbus.get_properties = AsyncMock(return_value={"Version": "1.13.9"})
|
||||
with pytest.raises(HostNotSupportedError):
|
||||
await network_manager._validate_version()
|
||||
assert network_manager.version == "1.13.9"
|
||||
|
@ -21,7 +21,8 @@ async def test_interface_update_payload_ethernet(coresys):
|
||||
assert DBus.parse_gvariant(data)["ipv6"]["method"] == "auto"
|
||||
|
||||
assert (
|
||||
DBus.parse_gvariant(data)["802-3-ethernet"]["assigned-mac-address"] == "stable"
|
||||
DBus.parse_gvariant(data)["802-3-ethernet"]["assigned-mac-address"]
|
||||
== "preserve"
|
||||
)
|
||||
|
||||
assert DBus.parse_gvariant(data)["connection"]["mdns"] == 2
|
||||
@ -49,11 +50,11 @@ async def test_interface_update_payload_ethernet_ipv4(coresys):
|
||||
DBus.parse_gvariant(data)["ipv4"]["address-data"][0]["address"] == "192.168.1.1"
|
||||
)
|
||||
assert DBus.parse_gvariant(data)["ipv4"]["address-data"][0]["prefix"] == 24
|
||||
assert DBus.parse_gvariant(data)["ipv4"]["dns"] == [16843009, 16777473]
|
||||
assert DBus.parse_gvariant(data)["ipv4"]["dns"] == [16843009, 16842753]
|
||||
assert (
|
||||
DBus.parse_gvariant(data)["connection"]["uuid"] == inet.settings.connection.uuid
|
||||
)
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == inet.settings.connection.id
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == "Supervisor eth0"
|
||||
assert DBus.parse_gvariant(data)["connection"]["type"] == "802-3-ethernet"
|
||||
assert DBus.parse_gvariant(data)["connection"]["interface-name"] == interface.name
|
||||
assert DBus.parse_gvariant(data)["ipv4"]["gateway"] == "192.168.1.1"
|
||||
@ -76,7 +77,7 @@ async def test_interface_update_payload_ethernet_ipv4_disabled(coresys):
|
||||
assert (
|
||||
DBus.parse_gvariant(data)["connection"]["uuid"] == inet.settings.connection.uuid
|
||||
)
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == inet.settings.connection.id
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == "Supervisor eth0"
|
||||
assert DBus.parse_gvariant(data)["connection"]["type"] == "802-3-ethernet"
|
||||
assert DBus.parse_gvariant(data)["connection"]["interface-name"] == interface.name
|
||||
|
||||
@ -98,7 +99,7 @@ async def test_interface_update_payload_ethernet_ipv4_auto(coresys):
|
||||
assert (
|
||||
DBus.parse_gvariant(data)["connection"]["uuid"] == inet.settings.connection.uuid
|
||||
)
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == inet.settings.connection.id
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == "Supervisor eth0"
|
||||
assert DBus.parse_gvariant(data)["connection"]["type"] == "802-3-ethernet"
|
||||
assert DBus.parse_gvariant(data)["connection"]["interface-name"] == interface.name
|
||||
|
||||
@ -114,6 +115,7 @@ async def test_interface_update_payload_ethernet_ipv6(coresys):
|
||||
interface.ipv6.nameservers = [
|
||||
ip_address("2606:4700:4700::64"),
|
||||
ip_address("2606:4700:4700::6400"),
|
||||
ip_address("2606:4700:4700::1111"),
|
||||
]
|
||||
interface.ipv6.gateway = ip_address("fe80::da58:d7ff:fe00:9c69")
|
||||
|
||||
@ -129,13 +131,14 @@ async def test_interface_update_payload_ethernet_ipv6(coresys):
|
||||
)
|
||||
assert DBus.parse_gvariant(data)["ipv6"]["address-data"][0]["prefix"] == 64
|
||||
assert DBus.parse_gvariant(data)["ipv6"]["dns"] == [
|
||||
50543257694033307102031451402929176676,
|
||||
50543257694033307102031451402929202176,
|
||||
[38, 6, 71, 0, 71, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100],
|
||||
[38, 6, 71, 0, 71, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 0],
|
||||
[38, 6, 71, 0, 71, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 17],
|
||||
]
|
||||
assert (
|
||||
DBus.parse_gvariant(data)["connection"]["uuid"] == inet.settings.connection.uuid
|
||||
)
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == inet.settings.connection.id
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == "Supervisor eth0"
|
||||
assert DBus.parse_gvariant(data)["connection"]["type"] == "802-3-ethernet"
|
||||
assert DBus.parse_gvariant(data)["connection"]["interface-name"] == interface.name
|
||||
assert DBus.parse_gvariant(data)["ipv6"]["gateway"] == "fe80::da58:d7ff:fe00:9c69"
|
||||
@ -157,7 +160,7 @@ async def test_interface_update_payload_ethernet_ipv6_disabled(coresys):
|
||||
assert (
|
||||
DBus.parse_gvariant(data)["connection"]["uuid"] == inet.settings.connection.uuid
|
||||
)
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == inet.settings.connection.id
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == "Supervisor eth0"
|
||||
assert DBus.parse_gvariant(data)["connection"]["type"] == "802-3-ethernet"
|
||||
assert DBus.parse_gvariant(data)["connection"]["interface-name"] == interface.name
|
||||
|
||||
@ -178,7 +181,7 @@ async def test_interface_update_payload_ethernet_ipv6_auto(coresys):
|
||||
assert (
|
||||
DBus.parse_gvariant(data)["connection"]["uuid"] == inet.settings.connection.uuid
|
||||
)
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == inet.settings.connection.id
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == "Supervisor eth0"
|
||||
assert DBus.parse_gvariant(data)["connection"]["type"] == "802-3-ethernet"
|
||||
assert DBus.parse_gvariant(data)["connection"]["interface-name"] == interface.name
|
||||
|
||||
@ -243,7 +246,8 @@ async def test_interface_update_payload_wireless_open(coresys):
|
||||
assert DBus.parse_gvariant(data)["802-11-wireless"]["ssid"] == [84, 101, 115, 116]
|
||||
assert DBus.parse_gvariant(data)["802-11-wireless"]["mode"] == "infrastructure"
|
||||
assert (
|
||||
DBus.parse_gvariant(data)["802-11-wireless"]["assigned-mac-address"] == "stable"
|
||||
DBus.parse_gvariant(data)["802-11-wireless"]["assigned-mac-address"]
|
||||
== "preserve"
|
||||
)
|
||||
assert "802-11-wireless-security" not in DBus.parse_gvariant(data)
|
||||
|
||||
@ -263,4 +267,5 @@ async def test_interface_update_payload_vlan(coresys):
|
||||
assert DBus.parse_gvariant(data)["vlan"]["id"] == 10
|
||||
assert DBus.parse_gvariant(data)["vlan"]["parent"] == interface.name
|
||||
assert DBus.parse_gvariant(data)["connection"]["type"] == "vlan"
|
||||
assert DBus.parse_gvariant(data)["connection"]["id"] == "Supervisor eth0.10"
|
||||
assert "interface-name" not in DBus.parse_gvariant(data)["connection"]
|
||||
|
@ -232,3 +232,28 @@ async def test_ignore_conditions(coresys: CoreSys):
|
||||
|
||||
coresys.jobs.ignore_conditions = [JobCondition.RUNNING]
|
||||
assert await test.execute()
|
||||
|
||||
|
||||
async def test_exception_conditions(coresys: CoreSys):
|
||||
"""Test the ignore conditions decorator."""
|
||||
|
||||
class TestClass:
|
||||
"""Test class."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize the test class."""
|
||||
self.coresys = coresys
|
||||
|
||||
@Job(conditions=[JobCondition.RUNNING], on_condition=HassioError)
|
||||
async def execute(self):
|
||||
"""Execute the class method."""
|
||||
return True
|
||||
|
||||
test = TestClass(coresys)
|
||||
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
assert await test.execute()
|
||||
|
||||
coresys.core.state = CoreState.FREEZE
|
||||
with pytest.raises(HassioError):
|
||||
await test.execute()
|
||||
|
@ -10,6 +10,7 @@ from supervisor.misc.filter import filter_data
|
||||
from supervisor.resolution.const import (
|
||||
ContextType,
|
||||
IssueType,
|
||||
SuggestionType,
|
||||
UnhealthyReason,
|
||||
UnsupportedReason,
|
||||
)
|
||||
@ -124,6 +125,34 @@ def test_issues_on_report(coresys):
|
||||
assert event["contexts"]["resolution"]["issues"][0]["context"] == ContextType.SYSTEM
|
||||
|
||||
|
||||
def test_suggestions_on_report(coresys):
|
||||
"""Attach suggestion to report."""
|
||||
|
||||
coresys.resolution.create_issue(
|
||||
IssueType.FATAL_ERROR,
|
||||
ContextType.SYSTEM,
|
||||
suggestions=[SuggestionType.EXECUTE_RELOAD],
|
||||
)
|
||||
|
||||
coresys.config.diagnostics = True
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
|
||||
with patch("shutil.disk_usage", return_value=(42, 42, 2 * (1024.0 ** 3))):
|
||||
event = filter_data(coresys, SAMPLE_EVENT, {})
|
||||
|
||||
assert "issues" in event["contexts"]["resolution"]
|
||||
assert event["contexts"]["resolution"]["issues"][0]["type"] == IssueType.FATAL_ERROR
|
||||
assert event["contexts"]["resolution"]["issues"][0]["context"] == ContextType.SYSTEM
|
||||
assert (
|
||||
event["contexts"]["resolution"]["suggestions"][0]["type"]
|
||||
== SuggestionType.EXECUTE_RELOAD
|
||||
)
|
||||
assert (
|
||||
event["contexts"]["resolution"]["suggestions"][0]["context"]
|
||||
== ContextType.SYSTEM
|
||||
)
|
||||
|
||||
|
||||
def test_unhealthy_on_report(coresys):
|
||||
"""Attach unhealthy to report."""
|
||||
|
||||
@ -136,3 +165,17 @@ def test_unhealthy_on_report(coresys):
|
||||
|
||||
assert "issues" in event["contexts"]["resolution"]
|
||||
assert event["contexts"]["resolution"]["unhealthy"][-1] == UnhealthyReason.DOCKER
|
||||
|
||||
|
||||
def test_images_report(coresys):
|
||||
"""Attach image to report."""
|
||||
|
||||
coresys.config.diagnostics = True
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
coresys.resolution.evaluate.cached_images.add("my/test:image")
|
||||
|
||||
with patch("shutil.disk_usage", return_value=(42, 42, 2 * (1024.0 ** 3))):
|
||||
event = filter_data(coresys, SAMPLE_EVENT, {})
|
||||
|
||||
assert "issues" in event["contexts"]["resolution"]
|
||||
assert event["contexts"]["host"]["images"] == ["my/test:image"]
|
||||
|
@ -6,10 +6,7 @@ from docker.errors import DockerException
|
||||
|
||||
from supervisor.const import CoreState
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.resolution.evaluations.container import (
|
||||
DOCKER_IMAGE_DENYLIST,
|
||||
EvaluateContainer,
|
||||
)
|
||||
from supervisor.resolution.evaluations.container import EvaluateContainer
|
||||
|
||||
|
||||
def test_get_images(coresys: CoreSys):
|
||||
@ -37,11 +34,27 @@ async def test_evaluation(coresys: CoreSys):
|
||||
|
||||
with patch(
|
||||
"supervisor.resolution.evaluations.container.EvaluateContainer._get_images",
|
||||
return_value=[MagicMock(tags=[f"{DOCKER_IMAGE_DENYLIST[0]}:latest"])],
|
||||
return_value=[
|
||||
MagicMock(
|
||||
tags=[
|
||||
"armhfbuild/watchtower:latest",
|
||||
"concerco/watchtowerv6:10.0.2",
|
||||
"containrrr/watchtower:1.1",
|
||||
"pyouroboros/ouroboros:1.4.3",
|
||||
]
|
||||
)
|
||||
],
|
||||
):
|
||||
await container()
|
||||
assert container.reason in coresys.resolution.unsupported
|
||||
|
||||
assert coresys.resolution.evaluate.cached_images == {
|
||||
"armhfbuild/watchtower:latest",
|
||||
"concerco/watchtowerv6:10.0.2",
|
||||
"containrrr/watchtower:1.1",
|
||||
"pyouroboros/ouroboros:1.4.3",
|
||||
}
|
||||
|
||||
with patch(
|
||||
"supervisor.resolution.evaluations.container.EvaluateContainer._get_images",
|
||||
return_value=[MagicMock(tags=[])],
|
||||
@ -49,6 +62,8 @@ async def test_evaluation(coresys: CoreSys):
|
||||
await container()
|
||||
assert container.reason not in coresys.resolution.unsupported
|
||||
|
||||
assert coresys.resolution.evaluate.cached_images == set()
|
||||
|
||||
|
||||
async def test_did_run(coresys: CoreSys):
|
||||
"""Test that the evaluation ran as expected."""
|
||||
|
32
tests/resolution/fixup/test_store_execute_reload.py
Normal file
32
tests/resolution/fixup/test_store_execute_reload.py
Normal file
@ -0,0 +1,32 @@
|
||||
"""Test evaluation base."""
|
||||
# pylint: disable=import-error,protected-access
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||
from supervisor.resolution.data import Issue, Suggestion
|
||||
from supervisor.resolution.fixups.store_execute_reload import FixupStoreExecuteReload
|
||||
|
||||
|
||||
async def test_fixup(coresys: CoreSys):
|
||||
"""Test fixup."""
|
||||
store_execute_reload = FixupStoreExecuteReload(coresys)
|
||||
|
||||
assert store_execute_reload.auto
|
||||
|
||||
coresys.resolution.suggestions = Suggestion(
|
||||
SuggestionType.EXECUTE_RELOAD, ContextType.STORE, reference="test"
|
||||
)
|
||||
coresys.resolution.issues = Issue(
|
||||
IssueType.FATAL_ERROR, ContextType.STORE, reference="test"
|
||||
)
|
||||
|
||||
mock_repositorie = AsyncMock()
|
||||
coresys.store.repositories["test"] = mock_repositorie
|
||||
|
||||
await store_execute_reload()
|
||||
|
||||
assert mock_repositorie.load.called
|
||||
assert mock_repositorie.update.called
|
||||
assert len(coresys.resolution.suggestions) == 0
|
||||
assert len(coresys.resolution.issues) == 0
|
36
tests/resolution/fixup/test_store_execute_remove.py
Normal file
36
tests/resolution/fixup/test_store_execute_remove.py
Normal file
@ -0,0 +1,36 @@
|
||||
"""Test evaluation base."""
|
||||
# pylint: disable=import-error,protected-access
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||
from supervisor.resolution.data import Issue, Suggestion
|
||||
from supervisor.resolution.fixups.store_execute_remove import FixupStoreExecuteRemove
|
||||
|
||||
|
||||
async def test_fixup(coresys: CoreSys):
|
||||
"""Test fixup."""
|
||||
store_execute_remove = FixupStoreExecuteRemove(coresys)
|
||||
|
||||
assert store_execute_remove.auto
|
||||
|
||||
coresys.resolution.suggestions = Suggestion(
|
||||
SuggestionType.EXECUTE_REMOVE, ContextType.STORE, reference="test"
|
||||
)
|
||||
coresys.resolution.issues = Issue(
|
||||
IssueType.CORRUPT_REPOSITORY, ContextType.STORE, reference="test"
|
||||
)
|
||||
|
||||
mock_repositorie = AsyncMock()
|
||||
mock_repositorie.slug = "test"
|
||||
|
||||
coresys.store.repositories["test"] = mock_repositorie
|
||||
|
||||
await store_execute_remove()
|
||||
|
||||
assert mock_repositorie.remove.called
|
||||
assert coresys.config.save_data.called
|
||||
assert len(coresys.resolution.suggestions) == 0
|
||||
assert len(coresys.resolution.issues) == 0
|
||||
|
||||
assert "test" not in coresys.store.repositories
|
38
tests/resolution/fixup/test_store_execute_reset.py
Normal file
38
tests/resolution/fixup/test_store_execute_reset.py
Normal file
@ -0,0 +1,38 @@
|
||||
"""Test evaluation base."""
|
||||
# pylint: disable=import-error,protected-access
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||
from supervisor.resolution.data import Issue, Suggestion
|
||||
from supervisor.resolution.fixups.store_execute_reset import FixupStoreExecuteReset
|
||||
|
||||
|
||||
async def test_fixup(coresys: CoreSys, tmp_path):
|
||||
"""Test fixup."""
|
||||
store_execute_reset = FixupStoreExecuteReset(coresys)
|
||||
test_repo = Path(tmp_path, "test_repo")
|
||||
|
||||
assert store_execute_reset.auto
|
||||
|
||||
coresys.resolution.suggestions = Suggestion(
|
||||
SuggestionType.EXECUTE_RESET, ContextType.STORE, reference="test"
|
||||
)
|
||||
coresys.resolution.issues = Issue(
|
||||
IssueType.CORRUPT_REPOSITORY, ContextType.STORE, reference="test"
|
||||
)
|
||||
|
||||
test_repo.mkdir()
|
||||
assert test_repo.exists()
|
||||
|
||||
mock_repositorie = AsyncMock()
|
||||
mock_repositorie.git.path = test_repo
|
||||
coresys.store.repositories["test"] = mock_repositorie
|
||||
|
||||
await store_execute_reset()
|
||||
|
||||
assert not test_repo.exists()
|
||||
assert mock_repositorie.load.called
|
||||
assert len(coresys.resolution.suggestions) == 0
|
||||
assert len(coresys.resolution.issues) == 0
|
@ -4,16 +4,35 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from supervisor.resolution.const import SuggestionType
|
||||
from supervisor.store import BUILTIN_REPOSITORIES
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_valid_repository(coresys, store_manager):
|
||||
"""Test add custom repository."""
|
||||
current = coresys.config.addons_repositories
|
||||
with patch("supervisor.store.repository.Repository.load", return_value=True), patch(
|
||||
with patch("supervisor.store.repository.Repository.load", return_value=None), patch(
|
||||
"pathlib.Path.read_text",
|
||||
return_value=json.dumps({"name": "Awesome repository"}),
|
||||
):
|
||||
), patch("pathlib.Path.exists", return_value=True):
|
||||
await store_manager.update_repositories(current + ["http://example.com"])
|
||||
assert store_manager.get_from_url("http://example.com").validate()
|
||||
assert "http://example.com" in coresys.config.addons_repositories
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_valid_repository_url(coresys, store_manager):
|
||||
"""Test add custom repository."""
|
||||
current = coresys.config.addons_repositories
|
||||
with patch("supervisor.store.repository.Repository.load", return_value=None), patch(
|
||||
"pathlib.Path.read_text",
|
||||
return_value=json.dumps(
|
||||
{"name": "Awesome repository", "url": "http://example2.com/docs"}
|
||||
),
|
||||
), patch("pathlib.Path.exists", return_value=True):
|
||||
await store_manager.update_repositories(current + ["http://example.com"])
|
||||
assert store_manager.get_from_url("http://example.com").validate()
|
||||
assert "http://example.com" in coresys.config.addons_repositories
|
||||
|
||||
|
||||
@ -21,9 +40,36 @@ async def test_add_valid_repository(coresys, store_manager):
|
||||
async def test_add_invalid_repository(coresys, store_manager):
|
||||
"""Test add custom repository."""
|
||||
current = coresys.config.addons_repositories
|
||||
with patch("supervisor.store.repository.Repository.load", return_value=True), patch(
|
||||
with patch("supervisor.store.repository.Repository.load", return_value=None), patch(
|
||||
"pathlib.Path.read_text",
|
||||
return_value="",
|
||||
):
|
||||
await store_manager.update_repositories(current + ["http://example.com"])
|
||||
assert "http://example.com" not in coresys.config.addons_repositories
|
||||
assert not store_manager.get_from_url("http://example.com").validate()
|
||||
|
||||
assert "http://example.com" in coresys.config.addons_repositories
|
||||
assert coresys.resolution.suggestions[-1].type == SuggestionType.EXECUTE_REMOVE
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_invalid_repository_file(coresys, store_manager):
|
||||
"""Test add custom repository."""
|
||||
current = coresys.config.addons_repositories
|
||||
with patch("supervisor.store.repository.Repository.load", return_value=None), patch(
|
||||
"pathlib.Path.read_text",
|
||||
return_value=json.dumps({"name": "Awesome repository"}),
|
||||
), patch("pathlib.Path.exists", return_value=False):
|
||||
await store_manager.update_repositories(current + ["http://example.com"])
|
||||
assert not store_manager.get_from_url("http://example.com").validate()
|
||||
|
||||
assert "http://example.com" in coresys.config.addons_repositories
|
||||
assert coresys.resolution.suggestions[-1].type == SuggestionType.EXECUTE_REMOVE
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_preinstall_valid_repository(coresys, store_manager):
|
||||
"""Test add core repository valid."""
|
||||
with patch("supervisor.store.repository.Repository.load", return_value=None):
|
||||
await store_manager.update_repositories(BUILTIN_REPOSITORIES)
|
||||
assert store_manager.get("core").validate()
|
||||
assert store_manager.get("local").validate()
|
||||
|
45
tests/utils/test_remove_folder.py
Normal file
45
tests/utils/test_remove_folder.py
Normal file
@ -0,0 +1,45 @@
|
||||
"""test json."""
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
from supervisor.utils import remove_folder
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_all(tmp_path):
|
||||
"""Test remove folder."""
|
||||
# Prepair test folder
|
||||
temp_orig = tmp_path.joinpath("orig")
|
||||
fixture_data = Path(__file__).parents[1].joinpath("fixtures/tar_data")
|
||||
shutil.copytree(fixture_data, temp_orig, symlinks=True)
|
||||
|
||||
assert temp_orig.exists()
|
||||
await remove_folder(temp_orig)
|
||||
assert not temp_orig.exists()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_content(tmp_path):
|
||||
"""Test remove content of folder."""
|
||||
# Prepair test folder
|
||||
temp_orig = tmp_path.joinpath("orig")
|
||||
fixture_data = Path(__file__).parents[1].joinpath("fixtures/tar_data")
|
||||
shutil.copytree(fixture_data, temp_orig, symlinks=True)
|
||||
|
||||
test_folder = Path(temp_orig, "test1")
|
||||
test_file = Path(temp_orig, "README.md")
|
||||
test_hidden = Path(temp_orig, ".hidden")
|
||||
|
||||
test_hidden.touch()
|
||||
|
||||
assert test_folder.exists()
|
||||
assert test_file.exists()
|
||||
assert test_hidden.exists()
|
||||
|
||||
await remove_folder(temp_orig, content_only=True)
|
||||
|
||||
assert not test_folder.exists()
|
||||
assert not test_file.exists()
|
||||
assert not test_hidden.exists()
|
Loading…
x
Reference in New Issue
Block a user