diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index c2b424959..18b012145 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -34,10 +34,10 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ && rm -rf /var/lib/apt/lists/* # Install Python dependencies from requirements.txt if it exists -COPY requirements.txt requirements_tests.txt /workspaces/ -RUN pip install -r requirements.txt \ - && pip3 install -r requirements_tests.txt \ - && pip install black tox +COPY requirements.txt requirements_tests.txt ./ +RUN pip3 install -r requirements.txt -r requirements_tests.txt \ + && pip3 install black tox \ + && rm -f requirements.txt requirements_tests.txt # Set the default shell to bash instead of sh ENV SHELL /bin/bash diff --git a/hassio/addons/addon.py b/hassio/addons/addon.py index be4fc2238..6b25b03f0 100644 --- a/hassio/addons/addon.py +++ b/hassio/addons/addon.py @@ -351,7 +351,7 @@ class Addon(AddonModel): options = self.options try: - schema(options) + options = schema(options) write_json_file(self.path_options, options) except vol.Invalid as ex: _LOGGER.error( @@ -438,7 +438,9 @@ class Addon(AddonModel): options = {**self.persist[ATTR_OPTIONS], **default_options} # create voluptuous - new_schema = vol.Schema(vol.All(dict, validate_options(new_raw_schema))) + new_schema = vol.Schema( + vol.All(dict, validate_options(self.coresys, new_raw_schema)) + ) # validate try: diff --git a/hassio/addons/model.py b/hassio/addons/model.py index 1a895de61..ffdada1c9 100644 --- a/hassio/addons/model.py +++ b/hassio/addons/model.py @@ -461,7 +461,7 @@ class AddonModel(CoreSysAttributes): if isinstance(raw_schema, bool): return vol.Schema(dict) - return vol.Schema(vol.All(dict, validate_options(raw_schema))) + return vol.Schema(vol.All(dict, validate_options(self.coresys, raw_schema))) def __eq__(self, other): """Compaired add-on objects.""" diff --git a/hassio/addons/validate.py b/hassio/addons/validate.py index 1c65b92bb..687eadfca 100644 --- a/hassio/addons/validate.py +++ b/hassio/addons/validate.py @@ -109,16 +109,21 @@ V_EMAIL = "email" V_URL = "url" V_PORT = "port" V_MATCH = "match" +V_LIST = "list" RE_SCHEMA_ELEMENT = re.compile( r"^(?:" - r"|str|bool|email|url|port" + r"|bool|email|url|port" + r"|str(?:\((?P\d+)?,(?P\d+)?\))?" r"|int(?:\((?P\d+)?,(?P\d+)?\))?" r"|float(?:\((?P[\d\.]+)?,(?P[\d\.]+)?\))?" r"|match\((?P.*)\)" + r"|list\((?P.+)\)" r")\??$" ) +_SCHEMA_LENGTH_PARTS = ("i_min", "i_max", "f_min", "f_max", "s_min", "s_max") + RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$") RE_DOCKER_IMAGE_BUILD = re.compile( r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$" @@ -305,7 +310,7 @@ SCHEMA_ADDON_SNAPSHOT = vol.Schema( ) -def validate_options(raw_schema): +def validate_options(coresys, raw_schema): """Validate schema.""" def validate(struct): @@ -323,13 +328,13 @@ def validate_options(raw_schema): try: if isinstance(typ, list): # nested value list - options[key] = _nested_validate_list(typ[0], value, key) + options[key] = _nested_validate_list(coresys, typ[0], value, key) elif isinstance(typ, dict): # nested value dict - options[key] = _nested_validate_dict(typ, value, key) + options[key] = _nested_validate_dict(coresys, typ, value, key) else: # normal value - options[key] = _single_validate(typ, value, key) + options[key] = _single_validate(coresys, typ, value, key) except (IndexError, KeyError): raise vol.Invalid(f"Type error for {key}") from None @@ -341,24 +346,31 @@ def validate_options(raw_schema): # pylint: disable=no-value-for-parameter # pylint: disable=inconsistent-return-statements -def _single_validate(typ, value, key): +def _single_validate(coresys, typ, value, key): """Validate a single element.""" # if required argument if value is None: raise vol.Invalid(f"Missing required option '{key}'") + # Lookup secret + if str(value).startswith("!secret "): + secret: str = value.partition(" ")[2] + value = coresys.secrets.get(secret) + if value is None: + raise vol.Invalid(f"Unknown secret {secret}") + # parse extend data from type match = RE_SCHEMA_ELEMENT.match(typ) # prepare range range_args = {} - for group_name in ("i_min", "i_max", "f_min", "f_max"): + for group_name in _SCHEMA_LENGTH_PARTS: group_value = match.group(group_name) if group_value: range_args[group_name[2:]] = float(group_value) if typ.startswith(V_STR): - return str(value) + return vol.All(str(value), vol.Range(**range_args))(value) elif typ.startswith(V_INT): return vol.All(vol.Coerce(int), vol.Range(**range_args))(value) elif typ.startswith(V_FLOAT): @@ -373,26 +385,28 @@ def _single_validate(typ, value, key): return NETWORK_PORT(value) elif typ.startswith(V_MATCH): return vol.Match(match.group("match"))(str(value)) + elif typ.strartswith(V_LIST): + return vol.In(match.group("list").split("|"))(str(value)) raise vol.Invalid(f"Fatal error for {key} type {typ}") -def _nested_validate_list(typ, data_list, key): +def _nested_validate_list(coresys, typ, data_list, key): """Validate nested items.""" options = [] for element in data_list: # Nested? if isinstance(typ, dict): - c_options = _nested_validate_dict(typ, element, key) + c_options = _nested_validate_dict(coresys, typ, element, key) options.append(c_options) else: - options.append(_single_validate(typ, element, key)) + options.append(_single_validate(coresys, typ, element, key)) return options -def _nested_validate_dict(typ, data_dict, key): +def _nested_validate_dict(coresys, typ, data_dict, key): """Validate nested items.""" options = {} @@ -404,9 +418,11 @@ def _nested_validate_dict(typ, data_dict, key): # Nested? if isinstance(typ[c_key], list): - options[c_key] = _nested_validate_list(typ[c_key][0], c_value, c_key) + options[c_key] = _nested_validate_list( + coresys, typ[c_key][0], c_value, c_key + ) else: - options[c_key] = _single_validate(typ[c_key], c_value, c_key) + options[c_key] = _single_validate(coresys, typ[c_key], c_value, c_key) _check_missing_options(typ, options, key) return options diff --git a/hassio/api/addons.py b/hassio/api/addons.py index 6ee425192..701dc3e39 100644 --- a/hassio/api/addons.py +++ b/hassio/api/addons.py @@ -269,7 +269,9 @@ class APIAddons(CoreSysAttributes): addon_schema = SCHEMA_OPTIONS.extend( {vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)} ) - body: Dict[str, Any] = await api_validate(addon_schema, request) + body: Dict[str, Any] = await api_validate( + addon_schema, request, origin=[ATTR_OPTIONS] + ) if ATTR_OPTIONS in body: addon.options = body[ATTR_OPTIONS] diff --git a/hassio/api/security.py b/hassio/api/security.py index 59fd70996..63d930197 100644 --- a/hassio/api/security.py +++ b/hassio/api/security.py @@ -40,6 +40,7 @@ NO_SECURITY_CHECK = re.compile( ADDONS_API_BYPASS = re.compile( r"^(?:" r"|/addons/self/(?!security|update)[^/]+" + r"|/secrets/.+" r"|/info" r"|/hardware/trigger" r"|/services.*" diff --git a/hassio/api/supervisor.py b/hassio/api/supervisor.py index d266f4b2b..0bde1bcf0 100644 --- a/hassio/api/supervisor.py +++ b/hassio/api/supervisor.py @@ -161,7 +161,9 @@ class APISupervisor(CoreSysAttributes): @api_process def reload(self, request: web.Request) -> Awaitable[None]: """Reload add-ons, configuration, etc.""" - return asyncio.shield(self.sys_updater.reload()) + return asyncio.shield( + asyncio.wait([self.sys_updater.reload(), self.sys_secrets.reload()]) + ) @api_process def repair(self, request: web.Request) -> Awaitable[None]: diff --git a/hassio/api/utils.py b/hassio/api/utils.py index 9cbd3fb99..126c2f40d 100644 --- a/hassio/api/utils.py +++ b/hassio/api/utils.py @@ -1,6 +1,7 @@ """Init file for Hass.io util for RESTful API.""" import json import logging +from typing import Optional, List from aiohttp import web import voluptuous as vol @@ -89,12 +90,22 @@ def api_return_ok(data=None): return web.json_response({JSON_RESULT: RESULT_OK, JSON_DATA: data or {}}) -async def api_validate(schema, request): +async def api_validate( + schema: vol.Schema, request: web.Request, origin: Optional[List[str]] = None +): """Validate request data with schema.""" data = await request.json(loads=json_loads) try: - data = schema(data) + data_validated = schema(data) except vol.Invalid as ex: raise APIError(humanize_error(data, ex)) from None - return data + if not origin: + return data_validated + + for origin_value in origin: + if origin_value not in data_validated: + continue + data_validated[origin_value] = data[origin_value] + + return data_validated diff --git a/hassio/bootstrap.py b/hassio/bootstrap.py index 348fdcf18..a5587f5d0 100644 --- a/hassio/bootstrap.py +++ b/hassio/bootstrap.py @@ -27,6 +27,7 @@ from .store import StoreManager from .supervisor import Supervisor from .tasks import Tasks from .updater import Updater +from .secrets import SecretsManager from .utils.dt import fetch_timezone _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -61,6 +62,7 @@ async def initialize_coresys(): coresys.discovery = Discovery(coresys) coresys.dbus = DBusManager(coresys) coresys.hassos = HassOS(coresys) + coresys.secrets = SecretsManager(coresys) # bootstrap config initialize_system_data(coresys) diff --git a/hassio/const.py b/hassio/const.py index f581d6a24..ad608f0da 100644 --- a/hassio/const.py +++ b/hassio/const.py @@ -2,7 +2,7 @@ from pathlib import Path from ipaddress import ip_network -HASSIO_VERSION = "185" +HASSIO_VERSION = "186" URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons" @@ -220,6 +220,7 @@ ATTR_DNS = "dns" ATTR_SERVERS = "servers" ATTR_LOCALS = "locals" ATTR_UDEV = "udev" +ATTR_VALUE = "value" PROVIDE_SERVICE = "provide" NEED_SERVICE = "need" diff --git a/hassio/core.py b/hassio/core.py index f3264cf69..572d21d4e 100644 --- a/hassio/core.py +++ b/hassio/core.py @@ -72,6 +72,9 @@ class HassIO(CoreSysAttributes): # Load ingress await self.sys_ingress.load() + # Load secrets + await self.sys_secrets.load() + async def start(self): """Start Hass.io orchestration.""" await self.sys_api.start() diff --git a/hassio/coresys.py b/hassio/coresys.py index 5d9892b34..70d47f707 100644 --- a/hassio/coresys.py +++ b/hassio/coresys.py @@ -24,6 +24,7 @@ if TYPE_CHECKING: from .homeassistant import HomeAssistant from .host import HostManager from .ingress import Ingress + from .secrets import SecretsManager from .services import ServiceManager from .snapshots import SnapshotManager from .supervisor import Supervisor @@ -70,6 +71,7 @@ class CoreSys: self._dbus: Optional[DBusManager] = None self._hassos: Optional[HassOS] = None self._services: Optional[ServiceManager] = None + self._secrets: Optional[SecretsManager] = None self._store: Optional[StoreManager] = None self._discovery: Optional[Discovery] = None @@ -209,6 +211,18 @@ class CoreSys: raise RuntimeError("Updater already set!") self._updater = value + @property + def secrets(self) -> SecretsManager: + """Return SecretsManager object.""" + return self._secrets + + @secrets.setter + def secrets(self, value: SecretsManager): + """Set a Updater object.""" + if self._secrets: + raise RuntimeError("SecretsManager already set!") + self._secrets = value + @property def addons(self) -> AddonManager: """Return AddonManager object.""" @@ -437,6 +451,11 @@ class CoreSysAttributes: """Return Updater object.""" return self.coresys.updater + @property + def sys_secrets(self) -> SecretsManager: + """Return SecretsManager object.""" + return self.coresys.secrets + @property def sys_addons(self) -> AddonManager: """Return AddonManager object.""" diff --git a/hassio/dns.py b/hassio/dns.py index 36671c705..3afc9331f 100644 --- a/hassio/dns.py +++ b/hassio/dns.py @@ -115,14 +115,15 @@ class CoreDNS(JsonConfig, CoreSysAttributes): # Start DNS forwarder self.sys_create_task(self.forwarder.start(self.sys_docker.network.dns)) + self._update_local_resolv() - with suppress(CoreDNSError): - self._update_local_resolv() - - # Start is not Running + # Reset container configuration if await self.instance.is_running(): - await self.restart() - else: + with suppress(DockerAPIError): + await self.instance.stop() + + # Run CoreDNS + with suppress(CoreDNSError): await self.start() async def unload(self) -> None: @@ -148,9 +149,8 @@ class CoreDNS(JsonConfig, CoreSysAttributes): self.version = self.instance.version self.save_data() - # Init Hosts / Run server + # Init Hosts self.write_hosts() - await self.start() async def update(self, version: Optional[str] = None) -> None: """Update CoreDNS plugin.""" @@ -207,6 +207,9 @@ class CoreDNS(JsonConfig, CoreSysAttributes): def _write_corefile(self) -> None: """Write CoreDNS config.""" + dns_servers: List[str] = [] + + # Load Template try: corefile_template: Template = Template(COREDNS_TMPL.read_text()) except OSError as err: @@ -214,8 +217,8 @@ class CoreDNS(JsonConfig, CoreSysAttributes): raise CoreDNSError() from None # Prepare DNS serverlist: Prio 1 Local, Prio 2 Manual, Prio 3 Fallback - dns_servers = [] - for server in self.sys_host.network.dns_servers + self.servers + DNS_SERVERS: + local_dns: List[str] = self.sys_host.network.dns_servers or ["dns://127.0.0.11"] + for server in local_dns + self.servers + DNS_SERVERS: try: DNS_URL(server) if server not in dns_servers: @@ -358,7 +361,7 @@ class CoreDNS(JsonConfig, CoreSysAttributes): resolv_lines.append(line.strip()) except OSError as err: _LOGGER.warning("Can't read local resolv: %s", err) - raise CoreDNSError() from None + return if nameserver in resolv_lines: return @@ -372,4 +375,4 @@ class CoreDNS(JsonConfig, CoreSysAttributes): resolv.write(f"{line}\n") except OSError as err: _LOGGER.warning("Can't write local resolv: %s", err) - raise CoreDNSError() from None + return diff --git a/hassio/docker/__init__.py b/hassio/docker/__init__.py index 2ee5223d7..66c0846ab 100644 --- a/hassio/docker/__init__.py +++ b/hassio/docker/__init__.py @@ -54,6 +54,7 @@ class DockerAPI: self, image: str, version: str = "latest", + dns: bool = True, ipv4: Optional[IPv4Address] = None, **kwargs: Dict[str, Any], ) -> docker.models.containers.Container: @@ -61,14 +62,15 @@ class DockerAPI: Need run inside executor. """ - name: str = kwargs.get("name", image) + name: str = kwargs.get("name") network_mode: str = kwargs.get("network_mode") hostname: str = kwargs.get("hostname") # Setup DNS - kwargs["dns"] = [str(self.network.dns)] - kwargs["dns_search"] = [DNS_SUFFIX] - kwargs["domainname"] = DNS_SUFFIX + if dns: + kwargs["dns"] = [str(self.network.dns)] + kwargs["dns_search"] = [DNS_SUFFIX] + kwargs["domainname"] = DNS_SUFFIX # Setup network if not network_mode: diff --git a/hassio/docker/dns.py b/hassio/docker/dns.py index c6d76cb47..3c75b1cd8 100644 --- a/hassio/docker/dns.py +++ b/hassio/docker/dns.py @@ -41,6 +41,7 @@ class DockerDNS(DockerInterface, CoreSysAttributes): docker_container = self.sys_docker.run( self.image, version=self.sys_dns.version, + dns=False, ipv4=self.sys_docker.network.dns, name=self.name, hostname=self.name.replace("_", "-"), diff --git a/hassio/docker/interface.py b/hassio/docker/interface.py index 12ad5e21d..fe3cd4332 100644 --- a/hassio/docker/interface.py +++ b/hassio/docker/interface.py @@ -42,6 +42,13 @@ class DockerInterface(CoreSysAttributes): return {} return self._meta.get("Config", {}) + @property + def meta_host(self) -> Dict[str, Any]: + """Return meta data of configuration for host.""" + if not self._meta: + return {} + return self._meta.get("HostConfig", {}) + @property def meta_labels(self) -> Dict[str, str]: """Return meta data of labels for container/image.""" diff --git a/hassio/docker/supervisor.py b/hassio/docker/supervisor.py index 961fd1551..174067b23 100644 --- a/hassio/docker/supervisor.py +++ b/hassio/docker/supervisor.py @@ -26,6 +26,11 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes): """Return IP address of this container.""" return self.sys_docker.network.supervisor + @property + def privileged(self) -> bool: + """Return True if the container run with Privileged.""" + return self.meta_host.get("Privileged", False) + def _attach(self, tag: str) -> None: """Attach to running docker container. diff --git a/hassio/misc/hardware.py b/hassio/misc/hardware.py index 9ec283c6e..800669650 100644 --- a/hassio/misc/hardware.py +++ b/hassio/misc/hardware.py @@ -159,5 +159,5 @@ class Hardware: if proc.returncode == 0: return - _LOGGER.waring("udevadm device triggering fails!") + _LOGGER.warning("udevadm device triggering fails!") raise HardwareNotSupportedError() diff --git a/hassio/secrets.py b/hassio/secrets.py new file mode 100644 index 000000000..33ff792df --- /dev/null +++ b/hassio/secrets.py @@ -0,0 +1,54 @@ +"""Handle Home Assistant secrets to add-ons.""" +from typing import Dict +from pathlib import Path +import logging + +from ruamel.yaml import YAML, YAMLError + +from .coresys import CoreSys, CoreSysAttributes + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +class SecretsManager(CoreSysAttributes): + """Manage Home Assistant secrets.""" + + def __init__(self, coresys: CoreSys): + """Initialize secret manager.""" + self.coresys: CoreSys = coresys + self.secrets: Dict[str, str] = {} + + @property + def path_secrets(self) -> Path: + """Return path to secret file.""" + return Path(self.sys_config.path_homeassistant, "secrets.yaml") + + def get(self, secret: str) -> str: + """Get secret from store.""" + _LOGGER.info("Request secret %s", secret) + return self.secrets.get(secret) + + async def load(self) -> None: + """Load secrets on start.""" + await self._read_secrets() + + _LOGGER.info("Load Home Assistant secrets: %s", len(self.secrets)) + + async def reload(self) -> None: + """Reload secrets.""" + await self._read_secrets() + + async def _read_secrets(self): + """Read secrets.yaml into memory.""" + if not self.path_secrets.exists(): + _LOGGER.debug("Home Assistant secrets not exists") + return + + # Read secrets + try: + yaml = YAML() + self.secrets = await self.sys_run_in_executor(yaml.load, self.path_secrets) + except YAMLError as err: + _LOGGER.error("Can't process Home Assistant secrets: %s", err) + else: + _LOGGER.debug("Reload Home Assistant secrets: %s", len(self.secrets)) diff --git a/hassio/supervisor.py b/hassio/supervisor.py index 0fc8039ee..372af6fd6 100644 --- a/hassio/supervisor.py +++ b/hassio/supervisor.py @@ -41,6 +41,12 @@ class Supervisor(CoreSysAttributes): with suppress(DockerAPIError): await self.instance.cleanup() + # Check privileged mode + if not self.instance.privileged: + _LOGGER.error( + "Supervisor does not run in Privileged mode. Hassio runs with limited functionality!" + ) + @property def ip_address(self) -> IPv4Address: """Return IP of Supervisor instance.""" diff --git a/hassio/tasks.py b/hassio/tasks.py index fc1dff749..95cb695a2 100644 --- a/hassio/tasks.py +++ b/hassio/tasks.py @@ -19,6 +19,7 @@ RUN_RELOAD_SNAPSHOTS = 72000 RUN_RELOAD_HOST = 72000 RUN_RELOAD_UPDATER = 7200 RUN_RELOAD_INGRESS = 930 +RUN_RELOAD_SECRETS = 940 RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15 RUN_WATCHDOG_HOMEASSISTANT_API = 300 @@ -77,6 +78,11 @@ class Tasks(CoreSysAttributes): self.sys_ingress.reload, RUN_RELOAD_INGRESS ) ) + self.jobs.add( + self.sys_scheduler.register_task( + self.sys_secrets.reload, RUN_RELOAD_SECRETS + ) + ) # Watchdog self.jobs.add( diff --git a/hassio/utils/gdbus.py b/hassio/utils/gdbus.py index 22b2283eb..617fa3c83 100644 --- a/hassio/utils/gdbus.py +++ b/hassio/utils/gdbus.py @@ -90,7 +90,6 @@ class DBus: raise DBusParseError() from None # Read available methods - _LOGGER.debug("Introspect XML: %s", data) for interface in xml.findall("./interface"): interface_name = interface.get("name") diff --git a/requirements.txt b/requirements.txt index ec9f64288..49123e988 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,6 +10,7 @@ gitpython==3.0.2 packaging==19.1 pytz==2019.2 pyudev==0.21.0 +ruamel.yaml==0.15.100 uvloop==0.12.2 voluptuous==0.11.7 ptvsd==4.3.2 diff --git a/requirements_tests.txt b/requirements_tests.txt index 57a7d7818..4752e0e69 100644 --- a/requirements_tests.txt +++ b/requirements_tests.txt @@ -1,5 +1,5 @@ flake8==3.7.8 pylint==2.3.1 -pytest==5.1.1 +pytest==5.1.2 pytest-timeout==1.3.3 pytest-aiohttp==0.3.0