mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-25 18:16:32 +00:00
commit
3af970ead6
@ -1,4 +1,3 @@
|
||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||
{
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
|
2
pylintrc
2
pylintrc
@ -2,6 +2,8 @@
|
||||
reports=no
|
||||
jobs=2
|
||||
|
||||
good-names=id,i,j,k,ex,Run,_,fp,T
|
||||
|
||||
# Reasons disabled:
|
||||
# format - handled by black
|
||||
# locally-disabled - it spams too much
|
||||
|
@ -2,4 +2,4 @@
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Supervisor fails
|
||||
# ==============================================================================
|
||||
s6-svscanctl -t /var/run/s6/services
|
||||
redirfd -w 2 /dev/null s6-svscanctl -t /var/run/s6/services
|
||||
|
15
setup.cfg
15
setup.cfg
@ -11,7 +11,20 @@ default_section = THIRDPARTY
|
||||
forced_separate = tests
|
||||
combine_as_imports = true
|
||||
use_parentheses = true
|
||||
known_first_party = supervisor,tests
|
||||
|
||||
[flake8]
|
||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
||||
doctests = True
|
||||
max-line-length = 88
|
||||
ignore = E501, W503
|
||||
# E501: line too long
|
||||
# W503: Line break occurred before a binary operator
|
||||
# E203: Whitespace before ':'
|
||||
# D202 No blank lines allowed after function docstring
|
||||
# W504 line break after binary operator
|
||||
ignore =
|
||||
E501,
|
||||
W503,
|
||||
E203,
|
||||
D202,
|
||||
W504
|
||||
|
@ -71,9 +71,8 @@ class Addon(AddonModel):
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
super().__init__(coresys, slug)
|
||||
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||
self.slug: str = slug
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
@ -626,7 +625,7 @@ class Addon(AddonModel):
|
||||
_LOGGER.error("Add-on %s is not available for this Platform", self.slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# Restore local add-on informations
|
||||
# Restore local add-on information
|
||||
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||
restore_image = self._image(data[ATTR_SYSTEM])
|
||||
self.sys_addons.data.restore(
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Supervisor add-on build environment."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
|
||||
@ -30,7 +31,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
"""Base images for this add-on."""
|
||||
"""Return base image for this add-on."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
|
||||
)
|
||||
|
@ -12,8 +12,8 @@ from ..const import (
|
||||
FILE_HASSIO_ADDONS,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils.json import JsonConfig
|
||||
from .addon import Addon
|
||||
from .validate import SCHEMA_ADDONS_FILE
|
||||
|
||||
|
@ -65,7 +65,7 @@ from ..const import (
|
||||
SECURITY_PROFILE,
|
||||
AddonStages,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .validate import RE_SERVICE, RE_VOLUME, schema_ui_options, validate_options
|
||||
|
||||
Data = Dict[str, Any]
|
||||
@ -74,7 +74,10 @@ Data = Dict[str, Any]
|
||||
class AddonModel(CoreSysAttributes):
|
||||
"""Add-on Data layout."""
|
||||
|
||||
slug: str = None
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.slug: str = slug
|
||||
|
||||
@property
|
||||
def data(self) -> Data:
|
||||
|
@ -170,7 +170,7 @@ MACHINE_ALL = [
|
||||
|
||||
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
"""Define startup schema."""
|
||||
if value == "before":
|
||||
return STARTUP_SERVICES
|
||||
if value == "after":
|
||||
|
@ -13,17 +13,17 @@ from .cli import APICli
|
||||
from .discovery import APIDiscovery
|
||||
from .dns import APICoreDNS
|
||||
from .hardware import APIHardware
|
||||
from .os import APIOS
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .info import APIInfo
|
||||
from .ingress import APIIngress
|
||||
from .multicast import APIMulticast
|
||||
from .os import APIOS
|
||||
from .proxy import APIProxy
|
||||
from .security import SecurityMiddleware
|
||||
from .services import APIServices
|
||||
from .snapshots import APISnapshots
|
||||
from .supervisor import APISupervisor
|
||||
from .multicast import APIMulticast
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -54,7 +54,6 @@ from ..const import (
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_LOGO,
|
||||
ATTR_LONG_DESCRIPTION,
|
||||
ATTR_MACHINE,
|
||||
@ -83,6 +82,7 @@ from ..const import (
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
|
@ -18,7 +18,6 @@ from ..const import (
|
||||
ATTR_HOST,
|
||||
ATTR_INDEX,
|
||||
ATTR_INPUT,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
@ -27,17 +26,19 @@ from ..const import (
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_OUTPUT,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_VOLUME,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..host.sound import StreamType
|
||||
from ..validate import simple_version
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): simple_version})
|
||||
|
||||
SCHEMA_VOLUME = vol.Schema(
|
||||
{
|
||||
@ -108,7 +109,7 @@ class APIAudio(CoreSysAttributes):
|
||||
version = body.get(ATTR_VERSION, self.sys_plugins.audio.latest_version)
|
||||
|
||||
if version == self.sys_plugins.audio.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.audio.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
|
@ -7,8 +7,6 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
@ -17,13 +15,16 @@ from ..const import (
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import simple_version
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): simple_version})
|
||||
|
||||
|
||||
class APICli(CoreSysAttributes):
|
||||
|
@ -1,19 +1,18 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDON,
|
||||
ATTR_UUID,
|
||||
ATTR_CONFIG,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_SERVICE,
|
||||
ATTR_UUID,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
{
|
||||
|
@ -11,7 +11,6 @@ from ..const import (
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_HOST,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_LOCALS,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
@ -20,11 +19,12 @@ from ..const import (
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_SERVERS,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import dns_server_list
|
||||
from ..validate import dns_server_list, simple_version
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@ -32,7 +32,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): simple_version})
|
||||
|
||||
|
||||
class APICoreDNS(CoreSysAttributes):
|
||||
@ -83,7 +83,7 @@ class APICoreDNS(CoreSysAttributes):
|
||||
version = body.get(ATTR_VERSION, self.sys_plugins.dns.latest_version)
|
||||
|
||||
if version == self.sys_plugins.dns.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.dns.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
|
@ -5,16 +5,16 @@ from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_AUDIO,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_AUDIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
ATTR_SERIAL,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -33,7 +33,7 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import docker_image, network_port
|
||||
from ..validate import docker_image, network_port, complex_version
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@ -53,7 +53,7 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): complex_version})
|
||||
|
||||
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
|
@ -16,11 +16,11 @@ from multidict import CIMultiDict, istr
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_ADMIN,
|
||||
ATTR_ENABLE,
|
||||
ATTR_ICON,
|
||||
ATTR_PANELS,
|
||||
ATTR_SESSION,
|
||||
ATTR_TITLE,
|
||||
ATTR_PANELS,
|
||||
ATTR_ENABLE,
|
||||
COOKIE_INGRESS,
|
||||
HEADER_TOKEN,
|
||||
HEADER_TOKEN_OLD,
|
||||
@ -129,7 +129,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
# Support GET query
|
||||
if request.query_string:
|
||||
url = "{}?{}".format(url, request.query_string)
|
||||
url = f"{url}?{request.query_string}"
|
||||
|
||||
# Start proxy
|
||||
async with self.sys_websession.ws_connect(
|
||||
|
@ -10,22 +10,23 @@ from ..const import (
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import simple_version
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): simple_version})
|
||||
|
||||
|
||||
class APIMulticast(CoreSysAttributes):
|
||||
@ -62,7 +63,7 @@ class APIMulticast(CoreSysAttributes):
|
||||
version = body.get(ATTR_VERSION, self.sys_plugins.multicast.latest_version)
|
||||
|
||||
if version == self.sys_plugins.multicast.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
|
@ -6,18 +6,14 @@ from typing import Any, Awaitable, Dict
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_BOARD,
|
||||
ATTR_BOOT,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..const import ATTR_BOARD, ATTR_BOOT, ATTR_VERSION, ATTR_VERSION_LATEST
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import complex_version
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): complex_version})
|
||||
|
||||
|
||||
class APIOS(CoreSysAttributes):
|
||||
|
@ -5,12 +5,12 @@ import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION
|
||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HomeAssistantAuthError, HomeAssistantAPIError, APIError
|
||||
from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -5,7 +5,6 @@ import re
|
||||
from aiohttp.web import middleware
|
||||
from aiohttp.web_exceptions import HTTPForbidden, HTTPUnauthorized
|
||||
|
||||
from .utils import excract_supervisor_token
|
||||
from ..const import (
|
||||
REQUEST_FROM,
|
||||
ROLE_ADMIN,
|
||||
@ -15,6 +14,7 @@ from ..const import (
|
||||
ROLE_MANAGER,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import excract_supervisor_token
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -1,16 +1,16 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_AVAILABLE,
|
||||
ATTR_PROVIDERS,
|
||||
ATTR_SLUG,
|
||||
ATTR_SERVICES,
|
||||
REQUEST_FROM,
|
||||
ATTR_SLUG,
|
||||
PROVIDE_SERVICE,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
|
||||
class APIServices(CoreSysAttributes):
|
||||
|
@ -7,26 +7,26 @@ from tempfile import TemporaryDirectory
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_NAME,
|
||||
ATTR_SLUG,
|
||||
ATTR_DATE,
|
||||
ATTR_ADDONS,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_VERSION,
|
||||
ATTR_SIZE,
|
||||
ATTR_DATE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_TYPE,
|
||||
ATTR_SNAPSHOTS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOTS,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
CONTENT_TYPE_TAR,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -20,12 +20,11 @@ from ..const import (
|
||||
ATTR_ICON,
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_LOGGING,
|
||||
ATTR_LOGO,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
@ -34,16 +33,17 @@ from ..const import (
|
||||
ATTR_STATE,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_WAIT_BOOT,
|
||||
CONTENT_TYPE_BINARY,
|
||||
SUPERVISOR_VERSION,
|
||||
UpdateChannels,
|
||||
LogLevel,
|
||||
UpdateChannels,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..utils.validate import validate_timezone
|
||||
from ..validate import repositories, wait_boot
|
||||
from ..validate import repositories, wait_boot, simple_version
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@ -61,7 +61,7 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): simple_version})
|
||||
|
||||
|
||||
class APISupervisor(CoreSysAttributes):
|
||||
@ -157,7 +157,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
version = body.get(ATTR_VERSION, self.sys_updater.version_supervisor)
|
||||
|
||||
if version == self.sys_supervisor.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
|
@ -182,7 +182,7 @@ def migrate_system_env(coresys: CoreSys):
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
"""Initialize the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||
@ -256,7 +256,7 @@ def reg_signal(loop):
|
||||
|
||||
|
||||
def supervisor_debugger(coresys: CoreSys) -> None:
|
||||
"""Setup debugger if needed."""
|
||||
"""Start debugger if needed."""
|
||||
if not coresys.config.debug:
|
||||
return
|
||||
# pylint: disable=import-outside-toplevel
|
||||
|
@ -3,7 +3,7 @@ from enum import Enum
|
||||
from ipaddress import ip_network
|
||||
from pathlib import Path
|
||||
|
||||
SUPERVISOR_VERSION = "223"
|
||||
SUPERVISOR_VERSION = "224"
|
||||
|
||||
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
|
@ -57,7 +57,7 @@ class Core(CoreSysAttributes):
|
||||
)
|
||||
|
||||
async def setup(self):
|
||||
"""Setup supervisor orchestration."""
|
||||
"""Start setting up supervisor orchestration."""
|
||||
self.state = CoreStates.STARTUP
|
||||
|
||||
# Load DBus
|
||||
|
@ -1,7 +1,8 @@
|
||||
"""Handle core shared data."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypeVar
|
||||
|
||||
import aiohttp
|
||||
|
||||
@ -34,6 +35,9 @@ if TYPE_CHECKING:
|
||||
from .plugins import PluginManager
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class CoreSys:
|
||||
"""Class that handle all shared data."""
|
||||
|
||||
@ -81,6 +85,8 @@ class CoreSys:
|
||||
@property
|
||||
def dev(self) -> bool:
|
||||
"""Return True if we run dev mode."""
|
||||
if self._updater is None:
|
||||
return False
|
||||
return self._updater.channel == UpdateChannels.DEV
|
||||
|
||||
@property
|
||||
@ -126,10 +132,12 @@ class CoreSys:
|
||||
@property
|
||||
def core(self) -> Core:
|
||||
"""Return core object."""
|
||||
if self._core is None:
|
||||
raise RuntimeError("Core not set!")
|
||||
return self._core
|
||||
|
||||
@core.setter
|
||||
def core(self, value: Core):
|
||||
def core(self, value: Core) -> None:
|
||||
"""Set a Core object."""
|
||||
if self._core:
|
||||
raise RuntimeError("Core already set!")
|
||||
@ -138,10 +146,12 @@ class CoreSys:
|
||||
@property
|
||||
def plugins(self) -> PluginManager:
|
||||
"""Return PluginManager object."""
|
||||
if self._plugins is None:
|
||||
raise RuntimeError("PluginManager not set!")
|
||||
return self._plugins
|
||||
|
||||
@plugins.setter
|
||||
def plugins(self, value: PluginManager):
|
||||
def plugins(self, value: PluginManager) -> None:
|
||||
"""Set a PluginManager object."""
|
||||
if self._plugins:
|
||||
raise RuntimeError("PluginManager already set!")
|
||||
@ -150,10 +160,12 @@ class CoreSys:
|
||||
@property
|
||||
def arch(self) -> CpuArch:
|
||||
"""Return CpuArch object."""
|
||||
if self._arch is None:
|
||||
raise RuntimeError("CpuArch not set!")
|
||||
return self._arch
|
||||
|
||||
@arch.setter
|
||||
def arch(self, value: CpuArch):
|
||||
def arch(self, value: CpuArch) -> None:
|
||||
"""Set a CpuArch object."""
|
||||
if self._arch:
|
||||
raise RuntimeError("CpuArch already set!")
|
||||
@ -162,10 +174,12 @@ class CoreSys:
|
||||
@property
|
||||
def auth(self) -> Auth:
|
||||
"""Return Auth object."""
|
||||
if self._auth is None:
|
||||
raise RuntimeError("Auth not set!")
|
||||
return self._auth
|
||||
|
||||
@auth.setter
|
||||
def auth(self, value: Auth):
|
||||
def auth(self, value: Auth) -> None:
|
||||
"""Set a Auth object."""
|
||||
if self._auth:
|
||||
raise RuntimeError("Auth already set!")
|
||||
@ -174,10 +188,12 @@ class CoreSys:
|
||||
@property
|
||||
def homeassistant(self) -> HomeAssistant:
|
||||
"""Return Home Assistant object."""
|
||||
if self._homeassistant is None:
|
||||
raise RuntimeError("Home Assistant not set!")
|
||||
return self._homeassistant
|
||||
|
||||
@homeassistant.setter
|
||||
def homeassistant(self, value: HomeAssistant):
|
||||
def homeassistant(self, value: HomeAssistant) -> None:
|
||||
"""Set a HomeAssistant object."""
|
||||
if self._homeassistant:
|
||||
raise RuntimeError("Home Assistant already set!")
|
||||
@ -186,10 +202,12 @@ class CoreSys:
|
||||
@property
|
||||
def supervisor(self) -> Supervisor:
|
||||
"""Return Supervisor object."""
|
||||
if self._supervisor is None:
|
||||
raise RuntimeError("Supervisor not set!")
|
||||
return self._supervisor
|
||||
|
||||
@supervisor.setter
|
||||
def supervisor(self, value: Supervisor):
|
||||
def supervisor(self, value: Supervisor) -> None:
|
||||
"""Set a Supervisor object."""
|
||||
if self._supervisor:
|
||||
raise RuntimeError("Supervisor already set!")
|
||||
@ -198,10 +216,12 @@ class CoreSys:
|
||||
@property
|
||||
def api(self) -> RestAPI:
|
||||
"""Return API object."""
|
||||
if self._api is None:
|
||||
raise RuntimeError("API not set!")
|
||||
return self._api
|
||||
|
||||
@api.setter
|
||||
def api(self, value: RestAPI):
|
||||
def api(self, value: RestAPI) -> None:
|
||||
"""Set an API object."""
|
||||
if self._api:
|
||||
raise RuntimeError("API already set!")
|
||||
@ -210,10 +230,12 @@ class CoreSys:
|
||||
@property
|
||||
def updater(self) -> Updater:
|
||||
"""Return Updater object."""
|
||||
if self._updater is None:
|
||||
raise RuntimeError("Updater not set!")
|
||||
return self._updater
|
||||
|
||||
@updater.setter
|
||||
def updater(self, value: Updater):
|
||||
def updater(self, value: Updater) -> None:
|
||||
"""Set a Updater object."""
|
||||
if self._updater:
|
||||
raise RuntimeError("Updater already set!")
|
||||
@ -222,10 +244,12 @@ class CoreSys:
|
||||
@property
|
||||
def secrets(self) -> SecretsManager:
|
||||
"""Return SecretsManager object."""
|
||||
if self._secrets is None:
|
||||
raise RuntimeError("SecretsManager not set!")
|
||||
return self._secrets
|
||||
|
||||
@secrets.setter
|
||||
def secrets(self, value: SecretsManager):
|
||||
def secrets(self, value: SecretsManager) -> None:
|
||||
"""Set a Updater object."""
|
||||
if self._secrets:
|
||||
raise RuntimeError("SecretsManager already set!")
|
||||
@ -234,10 +258,12 @@ class CoreSys:
|
||||
@property
|
||||
def addons(self) -> AddonManager:
|
||||
"""Return AddonManager object."""
|
||||
if self._addons is None:
|
||||
raise RuntimeError("AddonManager not set!")
|
||||
return self._addons
|
||||
|
||||
@addons.setter
|
||||
def addons(self, value: AddonManager):
|
||||
def addons(self, value: AddonManager) -> None:
|
||||
"""Set a AddonManager object."""
|
||||
if self._addons:
|
||||
raise RuntimeError("AddonManager already set!")
|
||||
@ -246,10 +272,12 @@ class CoreSys:
|
||||
@property
|
||||
def store(self) -> StoreManager:
|
||||
"""Return StoreManager object."""
|
||||
if self._store is None:
|
||||
raise RuntimeError("StoreManager not set!")
|
||||
return self._store
|
||||
|
||||
@store.setter
|
||||
def store(self, value: StoreManager):
|
||||
def store(self, value: StoreManager) -> None:
|
||||
"""Set a StoreManager object."""
|
||||
if self._store:
|
||||
raise RuntimeError("StoreManager already set!")
|
||||
@ -258,10 +286,12 @@ class CoreSys:
|
||||
@property
|
||||
def snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
if self._snapshots is None:
|
||||
raise RuntimeError("SnapshotManager not set!")
|
||||
return self._snapshots
|
||||
|
||||
@snapshots.setter
|
||||
def snapshots(self, value: SnapshotManager):
|
||||
def snapshots(self, value: SnapshotManager) -> None:
|
||||
"""Set a SnapshotManager object."""
|
||||
if self._snapshots:
|
||||
raise RuntimeError("SnapshotsManager already set!")
|
||||
@ -270,10 +300,12 @@ class CoreSys:
|
||||
@property
|
||||
def tasks(self) -> Tasks:
|
||||
"""Return Tasks object."""
|
||||
if self._tasks is None:
|
||||
raise RuntimeError("Tasks not set!")
|
||||
return self._tasks
|
||||
|
||||
@tasks.setter
|
||||
def tasks(self, value: Tasks):
|
||||
def tasks(self, value: Tasks) -> None:
|
||||
"""Set a Tasks object."""
|
||||
if self._tasks:
|
||||
raise RuntimeError("Tasks already set!")
|
||||
@ -282,10 +314,12 @@ class CoreSys:
|
||||
@property
|
||||
def services(self) -> ServiceManager:
|
||||
"""Return ServiceManager object."""
|
||||
if self._services is None:
|
||||
raise RuntimeError("Services not set!")
|
||||
return self._services
|
||||
|
||||
@services.setter
|
||||
def services(self, value: ServiceManager):
|
||||
def services(self, value: ServiceManager) -> None:
|
||||
"""Set a ServiceManager object."""
|
||||
if self._services:
|
||||
raise RuntimeError("Services already set!")
|
||||
@ -294,10 +328,12 @@ class CoreSys:
|
||||
@property
|
||||
def discovery(self) -> Discovery:
|
||||
"""Return ServiceManager object."""
|
||||
if self._discovery is None:
|
||||
raise RuntimeError("Discovery not set!")
|
||||
return self._discovery
|
||||
|
||||
@discovery.setter
|
||||
def discovery(self, value: Discovery):
|
||||
def discovery(self, value: Discovery) -> None:
|
||||
"""Set a Discovery object."""
|
||||
if self._discovery:
|
||||
raise RuntimeError("Discovery already set!")
|
||||
@ -306,10 +342,12 @@ class CoreSys:
|
||||
@property
|
||||
def dbus(self) -> DBusManager:
|
||||
"""Return DBusManager object."""
|
||||
if self._dbus is None:
|
||||
raise RuntimeError("DBusManager not set!")
|
||||
return self._dbus
|
||||
|
||||
@dbus.setter
|
||||
def dbus(self, value: DBusManager):
|
||||
def dbus(self, value: DBusManager) -> None:
|
||||
"""Set a DBusManager object."""
|
||||
if self._dbus:
|
||||
raise RuntimeError("DBusManager already set!")
|
||||
@ -318,10 +356,12 @@ class CoreSys:
|
||||
@property
|
||||
def host(self) -> HostManager:
|
||||
"""Return HostManager object."""
|
||||
if self._host is None:
|
||||
raise RuntimeError("HostManager not set!")
|
||||
return self._host
|
||||
|
||||
@host.setter
|
||||
def host(self, value: HostManager):
|
||||
def host(self, value: HostManager) -> None:
|
||||
"""Set a HostManager object."""
|
||||
if self._host:
|
||||
raise RuntimeError("HostManager already set!")
|
||||
@ -330,10 +370,12 @@ class CoreSys:
|
||||
@property
|
||||
def hwmonitor(self) -> HwMonitor:
|
||||
"""Return HwMonitor object."""
|
||||
if self._hwmonitor is None:
|
||||
raise RuntimeError("HwMonitor not set!")
|
||||
return self._hwmonitor
|
||||
|
||||
@hwmonitor.setter
|
||||
def hwmonitor(self, value: HwMonitor):
|
||||
def hwmonitor(self, value: HwMonitor) -> None:
|
||||
"""Set a HwMonitor object."""
|
||||
if self._hwmonitor:
|
||||
raise RuntimeError("HwMonitor already set!")
|
||||
@ -342,10 +384,12 @@ class CoreSys:
|
||||
@property
|
||||
def ingress(self) -> Ingress:
|
||||
"""Return Ingress object."""
|
||||
if self._ingress is None:
|
||||
raise RuntimeError("Ingress not set!")
|
||||
return self._ingress
|
||||
|
||||
@ingress.setter
|
||||
def ingress(self, value: Ingress):
|
||||
def ingress(self, value: Ingress) -> None:
|
||||
"""Set a Ingress object."""
|
||||
if self._ingress:
|
||||
raise RuntimeError("Ingress already set!")
|
||||
@ -354,10 +398,12 @@ class CoreSys:
|
||||
@property
|
||||
def hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
if self._hassos is None:
|
||||
raise RuntimeError("HassOS not set!")
|
||||
return self._hassos
|
||||
|
||||
@hassos.setter
|
||||
def hassos(self, value: HassOS):
|
||||
def hassos(self, value: HassOS) -> None:
|
||||
"""Set a HassOS object."""
|
||||
if self._hassos:
|
||||
raise RuntimeError("HassOS already set!")
|
||||
@ -369,7 +415,7 @@ class CoreSys:
|
||||
return self._machine
|
||||
|
||||
@machine.setter
|
||||
def machine(self, value: str):
|
||||
def machine(self, value: str) -> None:
|
||||
"""Set a machine type string."""
|
||||
if self._machine:
|
||||
raise RuntimeError("Machine type already set!")
|
||||
@ -381,7 +427,7 @@ class CoreSys:
|
||||
return self._machine_id
|
||||
|
||||
@machine_id.setter
|
||||
def machine_id(self, value: str):
|
||||
def machine_id(self, value: str) -> None:
|
||||
"""Set a machine-id type string."""
|
||||
if self._machine_id:
|
||||
raise RuntimeError("Machine-ID type already set!")
|
||||
@ -389,17 +435,17 @@ class CoreSys:
|
||||
|
||||
|
||||
class CoreSysAttributes:
|
||||
"""Inheret basic CoreSysAttributes."""
|
||||
"""Inherit basic CoreSysAttributes."""
|
||||
|
||||
coresys = None
|
||||
coresys: CoreSys
|
||||
|
||||
@property
|
||||
def sys_machine(self) -> str:
|
||||
def sys_machine(self) -> Optional[str]:
|
||||
"""Return running machine type of the Supervisor system."""
|
||||
return self.coresys.machine
|
||||
|
||||
@property
|
||||
def sys_dev(self) -> str:
|
||||
def sys_dev(self) -> bool:
|
||||
"""Return True if we run dev mode."""
|
||||
return self.coresys.dev
|
||||
|
||||
@ -409,7 +455,7 @@ class CoreSysAttributes:
|
||||
return self.coresys.timezone
|
||||
|
||||
@property
|
||||
def sys_machine_id(self) -> str:
|
||||
def sys_machine_id(self) -> Optional[str]:
|
||||
"""Return timezone."""
|
||||
return self.coresys.machine_id
|
||||
|
||||
@ -548,10 +594,12 @@ class CoreSysAttributes:
|
||||
"""Return HassOS object."""
|
||||
return self.coresys.hassos
|
||||
|
||||
def sys_run_in_executor(self, funct, *args) -> asyncio.Future:
|
||||
"""Wrapper for executor pool."""
|
||||
def sys_run_in_executor(
|
||||
self, funct: Callable[..., T], *args: Any
|
||||
) -> Coroutine[Any, Any, T]:
|
||||
"""Add an job to the executor pool."""
|
||||
return self.sys_loop.run_in_executor(None, funct, *args)
|
||||
|
||||
def sys_create_task(self, coroutine) -> asyncio.Task:
|
||||
"""Wrapper for async task."""
|
||||
def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
||||
"""Create an async task."""
|
||||
return self.sys_loop.create_task(coroutine)
|
||||
|
@ -1,12 +1,12 @@
|
||||
"""D-Bus interface objects."""
|
||||
import logging
|
||||
|
||||
from .systemd import Systemd
|
||||
from .hostname import Hostname
|
||||
from .rauc import Rauc
|
||||
from .nmi_dns import NMIDnsManager
|
||||
from ..coresys import CoreSysAttributes, CoreSys
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DBusNotConnectedError
|
||||
from .hostname import Hostname
|
||||
from .nmi_dns import NMIDnsManager
|
||||
from .rauc import Rauc
|
||||
from .systemd import Systemd
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -2,10 +2,10 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError, DBusInterfaceError
|
||||
from ..utils.gdbus import DBus
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -1,13 +1,13 @@
|
||||
"""D-Bus interface for hostname."""
|
||||
import logging
|
||||
from typing import Optional, List
|
||||
from typing import List, Optional
|
||||
|
||||
import attr
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError, DBusInterfaceError
|
||||
from ..utils.gdbus import DBus
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
"""D-Bus interface for rauc."""
|
||||
from enum import Enum
|
||||
import logging
|
||||
from typing import Optional
|
||||
from enum import Enum
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError, DBusInterfaceError
|
||||
from ..utils.gdbus import DBus
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
"""Interface to Systemd over D-Bus."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError, DBusInterfaceError
|
||||
from ..utils.gdbus import DBus
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -4,7 +4,7 @@ from ..exceptions import DBusNotConnectedError
|
||||
|
||||
|
||||
def dbus_connected(method):
|
||||
"""Wrapper for check if D-Bus is connected."""
|
||||
"""Wrap check if D-Bus is connected."""
|
||||
|
||||
def wrap_dbus(api, *args, **kwargs):
|
||||
"""Check if D-Bus is connected before call a method."""
|
||||
|
@ -3,8 +3,8 @@ from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional, TYPE_CHECKING
|
||||
from uuid import uuid4, UUID
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
|
@ -5,7 +5,6 @@ from supervisor.validate import network_port
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT
|
||||
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): network_port}
|
||||
)
|
||||
|
@ -5,7 +5,6 @@ from supervisor.validate import network_port
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT
|
||||
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): network_port}
|
||||
)
|
||||
|
@ -3,8 +3,7 @@ import voluptuous as vol
|
||||
|
||||
from supervisor.validate import network_port
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT, ATTR_API_KEY, ATTR_SERIAL
|
||||
|
||||
from ..const import ATTR_API_KEY, ATTR_HOST, ATTR_PORT, ATTR_SERIAL
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{
|
||||
|
@ -5,7 +5,6 @@ from supervisor.validate import network_port
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT
|
||||
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): vol.Schema(
|
||||
|
@ -3,12 +3,7 @@ import voluptuous as vol
|
||||
|
||||
from supervisor.validate import network_port
|
||||
|
||||
from ..const import (
|
||||
ATTR_HOST,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_USERNAME,
|
||||
)
|
||||
from ..const import ATTR_HOST, ATTR_PASSWORD, ATTR_PORT, ATTR_USERNAME
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA = vol.Schema(
|
||||
|
@ -5,7 +5,6 @@ from supervisor.validate import network_port
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT
|
||||
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): network_port}
|
||||
)
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Validate services schema."""
|
||||
from pathlib import Path
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -8,7 +8,7 @@ import attr
|
||||
import docker
|
||||
from packaging import version as pkg_version
|
||||
|
||||
from ..const import SOCKET_DOCKER, DNS_SUFFIX
|
||||
from ..const import DNS_SUFFIX, SOCKET_DOCKER
|
||||
from ..exceptions import DockerAPIError
|
||||
from .network import DockerNetwork
|
||||
|
||||
@ -97,7 +97,7 @@ class DockerAPI:
|
||||
ipv4: Optional[IPv4Address] = None,
|
||||
**kwargs: Dict[str, Any],
|
||||
) -> docker.models.containers.Container:
|
||||
""""Create a Docker container and run it.
|
||||
"""Create a Docker container and run it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
|
@ -164,7 +164,7 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
@property
|
||||
def security_opt(self) -> List[str]:
|
||||
"""Controlling security options."""
|
||||
"""Control security options."""
|
||||
security = []
|
||||
|
||||
# AppArmor
|
||||
@ -175,7 +175,7 @@ class DockerAddon(DockerInterface):
|
||||
security.append(f"apparmor={self.addon.slug}")
|
||||
|
||||
# Disable Seccomp / We don't support it official and it
|
||||
# make troubles on some kind of host systems.
|
||||
# causes problems on some types of host systems.
|
||||
security.append("seccomp=unconfined")
|
||||
|
||||
return security
|
||||
|
@ -2,10 +2,10 @@
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
from ..const import ENV_TIME, ENV_TOKEN
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import DockerAPIError
|
||||
from .interface import DockerInterface
|
||||
from ..const import ENV_TIME, ENV_TOKEN
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -227,9 +227,9 @@ class DockerInterface(CoreSysAttributes):
|
||||
return self.sys_run_in_executor(self._remove)
|
||||
|
||||
def _remove(self) -> None:
|
||||
"""remove docker images.
|
||||
"""Remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
Needs run inside executor.
|
||||
"""
|
||||
# Cleanup container
|
||||
with suppress(DockerAPIError):
|
||||
|
@ -8,13 +8,9 @@ import aiohttp
|
||||
from cpe import CPE
|
||||
|
||||
from .const import URL_HASSOS_OTA
|
||||
from .coresys import CoreSysAttributes, CoreSys
|
||||
from .exceptions import (
|
||||
DBusError,
|
||||
HassOSNotSupportedError,
|
||||
HassOSUpdateError,
|
||||
)
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .dbus.rauc import RaucState
|
||||
from .exceptions import DBusError, HassOSNotSupportedError, HassOSUpdateError
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -483,7 +483,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
return ConfigResult(True, log)
|
||||
|
||||
async def ensure_access_token(self) -> None:
|
||||
"""Ensures there is an access token."""
|
||||
"""Ensure there is an access token."""
|
||||
if (
|
||||
self.access_token is not None
|
||||
and self._access_token_expires > datetime.utcnow()
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""AppArmor control for host."""
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import DBusError, HostAppArmorError
|
||||
|
@ -5,10 +5,10 @@ from typing import Optional
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
HostNotSupportedError,
|
||||
HostError,
|
||||
DBusNotConnectedError,
|
||||
DBusError,
|
||||
DBusNotConnectedError,
|
||||
HostError,
|
||||
HostNotSupportedError,
|
||||
)
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
@ -2,8 +2,8 @@
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
from ..coresys import CoreSysAttributes, CoreSys
|
||||
from ..exceptions import HostNotSupportedError, DBusNotConnectedError, DBusError
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DBusError, DBusNotConnectedError, HostNotSupportedError
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -20,7 +20,7 @@ class ServiceManager(CoreSysAttributes):
|
||||
self._services = set()
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterator trought services."""
|
||||
"""Iterate through services."""
|
||||
return iter(self._services)
|
||||
|
||||
def _check_dbus(self, unit=None):
|
||||
|
@ -6,7 +6,7 @@ from typing import Optional
|
||||
|
||||
import pyudev
|
||||
|
||||
from .coresys import CoreSysAttributes, CoreSys
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .utils import AsyncCallFilter
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""Setup the internal DNS service for host applications."""
|
||||
import asyncio
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
import shlex
|
||||
from ipaddress import IPv4Address
|
||||
from typing import Optional
|
||||
|
||||
import async_timeout
|
||||
|
@ -414,6 +414,7 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
|
||||
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if a Docker container is fails state.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_fails()
|
||||
|
@ -192,6 +192,7 @@ class Multicast(JsonConfig, CoreSysAttributes):
|
||||
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if a Docker container is fails state.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_fails()
|
||||
|
@ -3,12 +3,11 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import ATTR_ACCESS_TOKEN, ATTR_IMAGE, ATTR_SERVERS, ATTR_VERSION
|
||||
from ..validate import dns_server_list, docker_image, token
|
||||
|
||||
from ..validate import dns_server_list, docker_image, token, simple_version
|
||||
|
||||
SCHEMA_DNS_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_VERSION): simple_version,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_SERVERS, default=list): dns_server_list,
|
||||
},
|
||||
@ -18,7 +17,7 @@ SCHEMA_DNS_CONFIG = vol.Schema(
|
||||
|
||||
SCHEMA_AUDIO_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_VERSION): simple_version,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
@ -27,7 +26,7 @@ SCHEMA_AUDIO_CONFIG = vol.Schema(
|
||||
|
||||
SCHEMA_CLI_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_VERSION): simple_version,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||
},
|
||||
@ -37,7 +36,7 @@ SCHEMA_CLI_CONFIG = vol.Schema(
|
||||
|
||||
SCHEMA_MULTICAST_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_VERSION): simple_version,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
|
@ -2,10 +2,11 @@
|
||||
import logging
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.exceptions import ServicesError
|
||||
from supervisor.validate import network_port
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADDON,
|
||||
|
@ -2,10 +2,11 @@
|
||||
import logging
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.addons.addon import Addon
|
||||
from supervisor.exceptions import ServicesError
|
||||
from supervisor.validate import network_port
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADDON,
|
||||
|
@ -6,7 +6,6 @@ from .const import SERVICE_MQTT, SERVICE_MYSQL
|
||||
from .modules.mqtt import SCHEMA_CONFIG_MQTT
|
||||
from .modules.mysql import SCHEMA_CONFIG_MYSQL
|
||||
|
||||
|
||||
SCHEMA_SERVICES_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(SERVICE_MQTT, default=dict): schema_or(SCHEMA_CONFIG_MQTT),
|
||||
|
@ -3,11 +3,11 @@ import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from .snapshot import Snapshot
|
||||
from .utils import create_slug
|
||||
from ..const import FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils.dt import utcnow
|
||||
from .snapshot import Snapshot
|
||||
from .utils import create_slug
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -58,7 +58,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
self.snapshots_obj = {}
|
||||
|
||||
async def _load_snapshot(tar_file):
|
||||
"""Internal function to load snapshot."""
|
||||
"""Load the snapshot."""
|
||||
snapshot = Snapshot(self.coresys, tar_file)
|
||||
if await snapshot.load():
|
||||
self.snapshots_obj[snapshot.slug] = snapshot
|
||||
|
@ -356,7 +356,7 @@ class Snapshot(CoreSysAttributes):
|
||||
folder_list = set(folder_list or ALL_FOLDERS)
|
||||
|
||||
def _folder_save(name):
|
||||
"""Internal function to snapshot a folder."""
|
||||
"""Take snapshot of a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||
origin_dir = Path(self.sys_config.path_supervisor, name)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Util add-on functions."""
|
||||
import hashlib
|
||||
import shutil
|
||||
import re
|
||||
import shutil
|
||||
|
||||
RE_DIGITS = re.compile(r"\d+")
|
||||
|
||||
@ -33,7 +33,7 @@ def key_to_iv(key):
|
||||
|
||||
def create_slug(name, date_str):
|
||||
"""Generate a hash from repository."""
|
||||
key = "{} - {}".format(date_str, name).lower().encode()
|
||||
key = f"{date_str} - {name}".lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
|
@ -31,14 +31,14 @@ from ..const import (
|
||||
SNAPSHOT_FULL,
|
||||
SNAPSHOT_PARTIAL,
|
||||
)
|
||||
from ..validate import docker_image, network_port, repositories
|
||||
from ..validate import docker_image, network_port, repositories, complex_version
|
||||
|
||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||
|
||||
|
||||
def unique_addons(addons_list):
|
||||
"""Validate that an add-on is unique."""
|
||||
single = set(addon[ATTR_SLUG] for addon in addons_list)
|
||||
single = {addon[ATTR_SLUG] for addon in addons_list}
|
||||
|
||||
if len(single) != len(addons_list):
|
||||
raise vol.Invalid("Invalid addon list on snapshot!")
|
||||
@ -58,7 +58,7 @@ SCHEMA_SNAPSHOT = vol.Schema(
|
||||
vol.Inclusive(ATTR_CRYPTO, "encrypted"): CRYPTO_AES128,
|
||||
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_VERSION): complex_version,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
|
@ -11,7 +11,7 @@ from .repository import Repository
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
BUILTIN_REPOSITORIES = {REPOSITORY_CORE, REPOSITORY_LOCAL}
|
||||
|
||||
|
||||
class StoreManager(CoreSysAttributes):
|
||||
@ -55,7 +55,7 @@ class StoreManager(CoreSysAttributes):
|
||||
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
"""Add a repository."""
|
||||
repository = Repository(self.coresys, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
|
@ -1,7 +1,6 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
import logging
|
||||
|
||||
from ..coresys import CoreSys
|
||||
from ..addons.model import AddonModel, Data
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@ -10,11 +9,6 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
class AddonStore(AddonModel):
|
||||
"""Hold data for add-on inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.slug: str = slug
|
||||
|
||||
@property
|
||||
def data(self) -> Data:
|
||||
"""Return add-on data/config."""
|
||||
|
@ -91,7 +91,7 @@ class StoreData(CoreSysAttributes):
|
||||
continue
|
||||
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(repository, addon_config[ATTR_SLUG])
|
||||
addon_slug = f"{repository}_{addon_config[ATTR_SLUG]}"
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
|
@ -1,16 +1,16 @@
|
||||
"""Init file for Supervisor add-on Git."""
|
||||
import asyncio
|
||||
import logging
|
||||
import functools as ft
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import git
|
||||
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
|
||||
from ..const import ATTR_BRANCH, ATTR_URL, URL_HASSIO_ADDONS
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import RE_REPOSITORY
|
||||
from .utils import get_hash_from_repository
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -1,15 +1,15 @@
|
||||
"""Represent a Supervisor repository."""
|
||||
from .git import GitRepoHassIO, GitRepoCustom
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import (
|
||||
REPOSITORY_CORE,
|
||||
REPOSITORY_LOCAL,
|
||||
ATTR_MAINTAINER,
|
||||
ATTR_NAME,
|
||||
ATTR_URL,
|
||||
ATTR_MAINTAINER,
|
||||
REPOSITORY_CORE,
|
||||
REPOSITORY_LOCAL,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from .git import GitRepoCustom, GitRepoHassIO
|
||||
from .utils import get_hash_from_repository
|
||||
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
@ -2,8 +2,7 @@
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import ATTR_NAME, ATTR_URL, ATTR_MAINTAINER
|
||||
|
||||
from ..const import ATTR_MAINTAINER, ATTR_NAME, ATTR_URL
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_REPOSITORY_CONFIG = vol.Schema(
|
||||
|
@ -9,7 +9,7 @@ from typing import Awaitable, Optional
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .const import URL_HASSIO_APPARMOR, SUPERVISOR_VERSION
|
||||
from .const import SUPERVISOR_VERSION, URL_HASSIO_APPARMOR
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .docker.stats import DockerStats
|
||||
from .docker.supervisor import DockerSupervisor
|
||||
|
@ -34,7 +34,8 @@ def process_lock(method):
|
||||
|
||||
|
||||
class AsyncThrottle:
|
||||
"""
|
||||
"""A class for throttling the execution of tasks.
|
||||
|
||||
Decorator that prevents a function from being called more than once every
|
||||
time period with blocking.
|
||||
"""
|
||||
@ -46,10 +47,10 @@ class AsyncThrottle:
|
||||
self.synchronize: Optional[asyncio.Lock] = None
|
||||
|
||||
def __call__(self, method):
|
||||
"""Throttle function"""
|
||||
"""Throttle function."""
|
||||
|
||||
async def wrapper(*args, **kwargs):
|
||||
"""Throttle function wrapper"""
|
||||
"""Throttle function wrapper."""
|
||||
if not self.synchronize:
|
||||
self.synchronize = asyncio.Lock()
|
||||
|
||||
@ -65,7 +66,8 @@ class AsyncThrottle:
|
||||
|
||||
|
||||
class AsyncCallFilter:
|
||||
"""
|
||||
"""A class for throttling the execution of tasks, with a filter.
|
||||
|
||||
Decorator that prevents a function from being called more than once every
|
||||
time period.
|
||||
"""
|
||||
@ -76,10 +78,10 @@ class AsyncCallFilter:
|
||||
self.time_of_last_call = datetime.min
|
||||
|
||||
def __call__(self, method):
|
||||
"""Throttle function"""
|
||||
"""Throttle function."""
|
||||
|
||||
async def wrapper(*args, **kwargs):
|
||||
"""Throttle function wrapper"""
|
||||
"""Throttle function wrapper."""
|
||||
now = datetime.now()
|
||||
time_since_last_call = now - self.time_of_last_call
|
||||
|
||||
|
@ -2,19 +2,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
import shlex
|
||||
import logging
|
||||
import re
|
||||
import shlex
|
||||
from signal import SIGINT
|
||||
from typing import Any, Dict, List, Optional, Set
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from ..exceptions import (
|
||||
DBusFatalError,
|
||||
DBusParseError,
|
||||
DBusInterfaceError,
|
||||
DBusNotConnectedError,
|
||||
DBusParseError,
|
||||
)
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@ -223,7 +223,7 @@ class DBus:
|
||||
return signal
|
||||
|
||||
def __getattr__(self, name: str) -> DBusCallWrapper:
|
||||
"""Mapping to dbus method."""
|
||||
"""Map to dbus method."""
|
||||
return getattr(DBusCallWrapper(self, self.bus_name), name)
|
||||
|
||||
|
||||
@ -236,12 +236,12 @@ class DBusCallWrapper:
|
||||
self.interface: str = interface
|
||||
|
||||
def __call__(self) -> None:
|
||||
"""Should never be called."""
|
||||
"""Catch this method from being called."""
|
||||
_LOGGER.error("DBus method %s not exists!", self.interface)
|
||||
raise DBusFatalError()
|
||||
|
||||
def __getattr__(self, name: str):
|
||||
"""Mapping to dbus method."""
|
||||
"""Map to dbus method."""
|
||||
interface = f"{self.interface}.{name}"
|
||||
|
||||
if interface not in self.dbus.methods:
|
||||
|
@ -140,7 +140,7 @@ def exclude_filter(
|
||||
"""Create callable filter function to check TarInfo for add."""
|
||||
|
||||
def my_filter(tar: tarfile.TarInfo) -> Optional[tarfile.TarInfo]:
|
||||
"""Custom exclude filter."""
|
||||
"""Filter to filter excludes."""
|
||||
file_path = Path(tar.name)
|
||||
for exclude in exclude_list:
|
||||
if not file_path.match(exclude):
|
||||
|
@ -8,7 +8,7 @@ def schema_or(schema):
|
||||
"""Allow schema or empty."""
|
||||
|
||||
def _wrapper(value):
|
||||
"""Wrapper for validator."""
|
||||
"""Define a wrapper for validator."""
|
||||
if not value:
|
||||
return value
|
||||
return schema(value)
|
||||
|
@ -2,8 +2,10 @@
|
||||
import ipaddress
|
||||
import re
|
||||
import uuid
|
||||
from typing import Optional, Union
|
||||
|
||||
import voluptuous as vol
|
||||
from packaging import version as pkg_version
|
||||
|
||||
from .const import (
|
||||
ATTR_ACCESS_TOKEN,
|
||||
@ -51,15 +53,38 @@ sha256 = vol.Match(r"^[0-9a-f]{64}$")
|
||||
token = vol.Match(r"^[0-9a-f]{32,256}$")
|
||||
|
||||
|
||||
def simple_version(value: Union[str, int, None]) -> Optional[str]:
|
||||
"""Validate main version handling."""
|
||||
if not isinstance(value, (str, int)):
|
||||
return None
|
||||
elif isinstance(value, int):
|
||||
return str(value)
|
||||
elif value.isnumeric() or value == "dev":
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
def complex_version(value: Union[str, None]) -> Optional[str]:
|
||||
"""Validate main version handling."""
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
|
||||
try:
|
||||
pkg_version.parse(value)
|
||||
except pkg_version.InvalidVersion:
|
||||
raise vol.Invalid(f"Invalid version format {value}")
|
||||
return value
|
||||
|
||||
|
||||
def dns_url(url: str) -> str:
|
||||
""" takes a DNS url (str) and validates that it matches the scheme dns://<ip address>."""
|
||||
"""Take a DNS url (str) and validates that it matches the scheme dns://<ip address>."""
|
||||
if not url.lower().startswith("dns://"):
|
||||
raise vol.Invalid("Doesn't start with dns://")
|
||||
address: str = url[6:] # strip the dns:// off
|
||||
try:
|
||||
ipaddress.ip_address(address) # matches ipv4 or ipv6 addresses
|
||||
except ValueError:
|
||||
raise vol.Invalid("Invalid DNS URL: {}".format(url))
|
||||
raise vol.Invalid(f"Invalid DNS URL: {url}")
|
||||
return url
|
||||
|
||||
|
||||
@ -100,7 +125,7 @@ DOCKER_PORTS_DESCRIPTION = vol.Schema(
|
||||
SCHEMA_HASS_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_VERSION): complex_version,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
@ -123,13 +148,13 @@ SCHEMA_UPDATER_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_CHANNEL, default=UpdateChannels.STABLE): vol.Coerce(
|
||||
UpdateChannels
|
||||
),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SUPERVISOR): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HASSOS): vol.Coerce(str),
|
||||
vol.Optional(ATTR_CLI): vol.Coerce(str),
|
||||
vol.Optional(ATTR_DNS): vol.Coerce(str),
|
||||
vol.Optional(ATTR_AUDIO): vol.Coerce(str),
|
||||
vol.Optional(ATTR_MULTICAST): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): complex_version,
|
||||
vol.Optional(ATTR_SUPERVISOR): simple_version,
|
||||
vol.Optional(ATTR_HASSOS): complex_version,
|
||||
vol.Optional(ATTR_CLI): simple_version,
|
||||
vol.Optional(ATTR_DNS): simple_version,
|
||||
vol.Optional(ATTR_AUDIO): simple_version,
|
||||
vol.Optional(ATTR_MULTICAST): simple_version,
|
||||
vol.Optional(ATTR_IMAGE, default=dict): vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_HOMEASSISTANT): docker_image,
|
||||
@ -151,7 +176,7 @@ SCHEMA_SUPERVISOR_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_TIMEZONE, default="UTC"): validate_timezone,
|
||||
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_VERSION): simple_version,
|
||||
vol.Optional(
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
default=["https://github.com/hassio-addons/repository"],
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Validate Add-on configs."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.addons import validate as vd
|
||||
|
||||
@ -48,7 +48,7 @@ def test_invalid_repository():
|
||||
|
||||
|
||||
def test_valid_repository():
|
||||
"""Validate basic config with different valid repositories"""
|
||||
"""Validate basic config with different valid repositories."""
|
||||
config = load_json_fixture("basic-addon-config.json")
|
||||
|
||||
custom_registry = "registry.gitlab.com/company/add-ons/core/test-example"
|
||||
@ -58,7 +58,7 @@ def test_valid_repository():
|
||||
|
||||
|
||||
def test_valid_map():
|
||||
"""Validate basic config with different valid maps"""
|
||||
"""Validate basic config with different valid maps."""
|
||||
config = load_json_fixture("basic-addon-config.json")
|
||||
|
||||
config["map"] = ["backup:rw", "ssl:ro", "config"]
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""Common test functions."""
|
||||
from unittest.mock import patch, PropertyMock, MagicMock
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Test adguard discovery."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.discovery.validate import valid_discovery_config
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Test adguard discovery."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.discovery.validate import valid_discovery_config
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Test DeConz discovery."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.discovery.validate import valid_discovery_config
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Test HomeMatic discovery."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.discovery.validate import valid_discovery_config
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Test MQTT discovery."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.discovery.validate import valid_discovery_config
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Test Zwave MQTT discovery."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.discovery.validate import valid_discovery_config
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Test unifi discovery."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.discovery.validate import valid_discovery_config
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Test validate of discovery."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.discovery import validate
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""Test hardware utils."""
|
||||
from unittest.mock import patch, PropertyMock
|
||||
from pathlib import Path
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from supervisor.misc.hardware import Hardware, Device
|
||||
from supervisor.misc.hardware import Device, Hardware
|
||||
|
||||
|
||||
def test_read_all_devices():
|
||||
|
@ -1,8 +1,9 @@
|
||||
"""Test validators."""
|
||||
|
||||
import supervisor.validate
|
||||
import voluptuous.error
|
||||
import pytest
|
||||
import voluptuous.error
|
||||
|
||||
import supervisor.validate
|
||||
|
||||
GOOD_V4 = [
|
||||
"dns://10.0.0.1", # random local
|
||||
@ -18,29 +19,29 @@ BAD = ["hello world", "https://foo.bar", "", "dns://example.com"]
|
||||
|
||||
|
||||
async def test_dns_url_v4_good():
|
||||
""" tests the DNS validator with known-good ipv6 DNS URLs """
|
||||
"""Test the DNS validator with known-good ipv6 DNS URLs."""
|
||||
for url in GOOD_V4:
|
||||
assert supervisor.validate.dns_url(url)
|
||||
|
||||
|
||||
async def test_dns_url_v6_good():
|
||||
""" tests the DNS validator with known-good ipv6 DNS URLs """
|
||||
"""Test the DNS validator with known-good ipv6 DNS URLs."""
|
||||
for url in GOOD_V6:
|
||||
assert supervisor.validate.dns_url(url)
|
||||
|
||||
|
||||
async def test_dns_server_list_v4():
|
||||
""" test a list with v4 addresses """
|
||||
"""Test a list with v4 addresses."""
|
||||
assert supervisor.validate.dns_server_list(GOOD_V4)
|
||||
|
||||
|
||||
async def test_dns_server_list_v6():
|
||||
""" test a list with v6 addresses """
|
||||
"""Test a list with v6 addresses."""
|
||||
assert supervisor.validate.dns_server_list(GOOD_V6)
|
||||
|
||||
|
||||
async def test_dns_server_list_combined():
|
||||
""" test a list with both v4 and v6 addresses """
|
||||
"""Test a list with both v4 and v6 addresses."""
|
||||
combined = GOOD_V4 + GOOD_V6
|
||||
# test the matches
|
||||
assert supervisor.validate.dns_server_list(combined)
|
||||
@ -52,14 +53,14 @@ async def test_dns_server_list_combined():
|
||||
|
||||
|
||||
async def test_dns_server_list_bad():
|
||||
""" test the bad list """
|
||||
"""Test the bad list."""
|
||||
# test the matches
|
||||
with pytest.raises(voluptuous.error.Invalid):
|
||||
assert supervisor.validate.dns_server_list(BAD)
|
||||
|
||||
|
||||
async def test_dns_server_list_bad_combined():
|
||||
""" test the bad list, combined with the good """
|
||||
"""Test the bad list, combined with the good."""
|
||||
combined = GOOD_V4 + GOOD_V6 + BAD
|
||||
|
||||
with pytest.raises(voluptuous.error.Invalid):
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
import attr
|
||||
|
||||
from supervisor.utils.tar import secure_path, exclude_filter
|
||||
from supervisor.utils.tar import exclude_filter, secure_path
|
||||
|
||||
|
||||
@attr.s
|
||||
|
Loading…
x
Reference in New Issue
Block a user