Merge pull request #2045 from home-assistant/dev

Release 243
This commit is contained in:
Pascal Vizeli 2020-09-11 23:18:28 +02:00 committed by GitHub
commit 3b2351af0b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 679 additions and 50 deletions

37
API.md
View File

@ -851,6 +851,8 @@ return:
"hassos": "null|version", "hassos": "null|version",
"docker": "version", "docker": "version",
"hostname": "name", "hostname": "name",
"operating_system": "HassOS XY|Ubuntu 16.4|null",
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
"machine": "type", "machine": "type",
"arch": "arch", "arch": "arch",
"supported_arch": ["arch1", "arch2"], "supported_arch": ["arch1", "arch2"],
@ -946,6 +948,41 @@ return:
} }
``` ```
### Observer
- GET `/observer/info`
```json
{
"host": "ip-address",
"version": "1",
"version_latest": "2"
}
```
- POST `/observer/update`
```json
{
"version": "VERSION"
}
```
- GET `/observer/stats`
```json
{
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"memory_percent": 1.4,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
"blk_write": 0
}
```
### Multicast ### Multicast
- GET `/multicast/info` - GET `/multicast/info`

View File

@ -14,6 +14,6 @@ pulsectl==20.5.1
pytz==2020.1 pytz==2020.1
pyudev==0.22.0 pyudev==0.22.0
ruamel.yaml==0.15.100 ruamel.yaml==0.15.100
sentry-sdk==0.17.3 sentry-sdk==0.17.4
uvloop==0.14.0 uvloop==0.14.0
voluptuous==0.11.7 voluptuous==0.11.7

View File

@ -105,7 +105,7 @@ from ..validate import (
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$") RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$") RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
V_STR = "str" V_STR = "str"

View File

@ -19,6 +19,7 @@ from .info import APIInfo
from .ingress import APIIngress from .ingress import APIIngress
from .multicast import APIMulticast from .multicast import APIMulticast
from .network import APINetwork from .network import APINetwork
from .observer import APIObserver
from .os import APIOS from .os import APIOS
from .proxy import APIProxy from .proxy import APIProxy
from .security import SecurityMiddleware from .security import SecurityMiddleware
@ -54,6 +55,7 @@ class RestAPI(CoreSysAttributes):
self._register_host() self._register_host()
self._register_os() self._register_os()
self._register_cli() self._register_cli()
self._register_observer()
self._register_multicast() self._register_multicast()
self._register_network() self._register_network()
self._register_hardware() self._register_hardware()
@ -135,6 +137,19 @@ class RestAPI(CoreSysAttributes):
] ]
) )
def _register_observer(self) -> None:
"""Register Observer functions."""
api_observer = APIObserver()
api_observer.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/observer/info", api_observer.info),
web.get("/observer/stats", api_observer.stats),
web.post("/observer/update", api_observer.update),
]
)
def _register_multicast(self) -> None: def _register_multicast(self) -> None:
"""Register Multicast functions.""" """Register Multicast functions."""
api_multicast = APIMulticast() api_multicast = APIMulticast()

View File

@ -8,11 +8,13 @@ from ..const import (
ATTR_ARCH, ATTR_ARCH,
ATTR_CHANNEL, ATTR_CHANNEL,
ATTR_DOCKER, ATTR_DOCKER,
ATTR_FEATURES,
ATTR_HASSOS, ATTR_HASSOS,
ATTR_HOMEASSISTANT, ATTR_HOMEASSISTANT,
ATTR_HOSTNAME, ATTR_HOSTNAME,
ATTR_LOGGING, ATTR_LOGGING,
ATTR_MACHINE, ATTR_MACHINE,
ATTR_OPERATING_SYSTEM,
ATTR_SUPERVISOR, ATTR_SUPERVISOR,
ATTR_SUPPORTED, ATTR_SUPPORTED,
ATTR_SUPPORTED_ARCH, ATTR_SUPPORTED_ARCH,
@ -36,6 +38,8 @@ class APIInfo(CoreSysAttributes):
ATTR_HASSOS: self.sys_hassos.version, ATTR_HASSOS: self.sys_hassos.version,
ATTR_DOCKER: self.sys_docker.info.version, ATTR_DOCKER: self.sys_docker.info.version,
ATTR_HOSTNAME: self.sys_host.info.hostname, ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_FEATURES: self.sys_host.supported_features,
ATTR_MACHINE: self.sys_machine, ATTR_MACHINE: self.sys_machine,
ATTR_ARCH: self.sys_arch.default, ATTR_ARCH: self.sys_arch.default,
ATTR_SUPPORTED_ARCH: self.sys_arch.supported, ATTR_SUPPORTED_ARCH: self.sys_arch.supported,

View File

@ -0,0 +1,65 @@
"""Init file for Supervisor Observer RESTful API."""
import asyncio
import logging
from typing import Any, Dict
from aiohttp import web
import voluptuous as vol
from ..const import (
ATTR_BLK_READ,
ATTR_BLK_WRITE,
ATTR_CPU_PERCENT,
ATTR_HOST,
ATTR_MEMORY_LIMIT,
ATTR_MEMORY_PERCENT,
ATTR_MEMORY_USAGE,
ATTR_NETWORK_RX,
ATTR_NETWORK_TX,
ATTR_VERSION,
ATTR_VERSION_LATEST,
)
from ..coresys import CoreSysAttributes
from ..validate import version_tag
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
class APIObserver(CoreSysAttributes):
"""Handle RESTful API for Observer functions."""
@api_process
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return HA Observer information."""
return {
ATTR_HOST: str(self.sys_docker.network.observer),
ATTR_VERSION: self.sys_plugins.observer.version,
ATTR_VERSION_LATEST: self.sys_plugins.observer.latest_version,
}
@api_process
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
stats = await self.sys_plugins.observer.stats()
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_MEMORY_PERCENT: stats.memory_percent,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
ATTR_BLK_WRITE: stats.blk_write,
}
@api_process
async def update(self, request: web.Request) -> None:
"""Update HA observer."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_plugins.observer.latest_version)
await asyncio.shield(self.sys_plugins.observer.update(version))

View File

@ -39,17 +39,22 @@ NO_SECURITY_CHECK = re.compile(
r")$" r")$"
) )
# Observer allow API calls
OBSERVER_CHECK = re.compile(
r"^(?:"
r"|/[^/]+/info"
r")$"
)
# Can called by every add-on # Can called by every add-on
ADDONS_API_BYPASS = re.compile( ADDONS_API_BYPASS = re.compile(
r"^(?:" r"^(?:"
r"|/addons/self/(?!security|update)[^/]+" r"|/addons/self/(?!security|update)[^/]+"
r"|/secrets/.+"
r"|/info" r"|/info"
r"|/hardware/trigger" r"|/hardware/trigger"
r"|/services.*" r"|/services.*"
r"|/discovery.*" r"|/discovery.*"
r"|/auth" r"|/auth"
r"|/host/info"
r")$" r")$"
) )
@ -95,7 +100,7 @@ ADDONS_ROLE_ACCESS = {
), ),
} }
# fmt: off # fmt: on
class SecurityMiddleware(CoreSysAttributes): class SecurityMiddleware(CoreSysAttributes):
@ -136,6 +141,14 @@ class SecurityMiddleware(CoreSysAttributes):
_LOGGER.debug("%s access from Host", request.path) _LOGGER.debug("%s access from Host", request.path)
request_from = self.sys_host request_from = self.sys_host
# Observer
if supervisor_token == self.sys_plugins.observer.supervisor_token:
if not OBSERVER_CHECK.match(request.url):
_LOGGER.warning("%s invalid Observer access", request.path)
raise HTTPForbidden()
_LOGGER.debug("%s access from Observer", request.path)
request_from = self.sys_plugins.observer
# Add-on # Add-on
addon = None addon = None
if supervisor_token and not request_from: if supervisor_token and not request_from:

View File

@ -164,6 +164,11 @@ def initialize_system_data(coresys: CoreSys) -> None:
_LOGGER.info("Create Supervisor audio folder %s", config.path_audio) _LOGGER.info("Create Supervisor audio folder %s", config.path_audio)
config.path_audio.mkdir() config.path_audio.mkdir()
# Media folder
if not config.path_media.is_dir():
_LOGGER.info("Create Supervisor media folder %s", config.path_media)
config.path_media.mkdir()
# Update log level # Update log level
coresys.config.modify_log_level() coresys.config.modify_log_level()

View File

@ -41,6 +41,7 @@ TMP_DATA = PurePath("tmp")
APPARMOR_DATA = PurePath("apparmor") APPARMOR_DATA = PurePath("apparmor")
DNS_DATA = PurePath("dns") DNS_DATA = PurePath("dns")
AUDIO_DATA = PurePath("audio") AUDIO_DATA = PurePath("audio")
MEDIA_DATA = PurePath("media")
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat() DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
@ -258,6 +259,16 @@ class CoreConfig(JsonConfig):
"""Return dns path inside supervisor.""" """Return dns path inside supervisor."""
return Path(SUPERVISOR_DATA, DNS_DATA) return Path(SUPERVISOR_DATA, DNS_DATA)
@property
def path_media(self) -> Path:
"""Return root media data folder."""
return Path(SUPERVISOR_DATA, MEDIA_DATA)
@property
def path_extern_media(self) -> PurePath:
"""Return root media data folder external for Docker."""
return PurePath(self.path_extern_supervisor, MEDIA_DATA)
@property @property
def addons_repositories(self) -> List[str]: def addons_repositories(self) -> List[str]:
"""Return list of custom Add-on repositories.""" """Return list of custom Add-on repositories."""

View File

@ -3,7 +3,7 @@ from enum import Enum
from ipaddress import ip_network from ipaddress import ip_network
from pathlib import Path from pathlib import Path
SUPERVISOR_VERSION = "242" SUPERVISOR_VERSION = "243"
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons" URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
URL_HASSIO_APPARMOR = "https://version.home-assistant.io/apparmor.txt" URL_HASSIO_APPARMOR = "https://version.home-assistant.io/apparmor.txt"
@ -17,15 +17,11 @@ URL_HASSOS_OTA = (
SUPERVISOR_DATA = Path("/data") SUPERVISOR_DATA = Path("/data")
FILE_HASSIO_ADDONS = Path(SUPERVISOR_DATA, "addons.json") FILE_HASSIO_ADDONS = Path(SUPERVISOR_DATA, "addons.json")
FILE_HASSIO_AUDIO = Path(SUPERVISOR_DATA, "audio.json")
FILE_HASSIO_AUTH = Path(SUPERVISOR_DATA, "auth.json") FILE_HASSIO_AUTH = Path(SUPERVISOR_DATA, "auth.json")
FILE_HASSIO_CLI = Path(SUPERVISOR_DATA, "cli.json")
FILE_HASSIO_CONFIG = Path(SUPERVISOR_DATA, "config.json") FILE_HASSIO_CONFIG = Path(SUPERVISOR_DATA, "config.json")
FILE_HASSIO_DISCOVERY = Path(SUPERVISOR_DATA, "discovery.json") FILE_HASSIO_DISCOVERY = Path(SUPERVISOR_DATA, "discovery.json")
FILE_HASSIO_DNS = Path(SUPERVISOR_DATA, "dns.json")
FILE_HASSIO_HOMEASSISTANT = Path(SUPERVISOR_DATA, "homeassistant.json") FILE_HASSIO_HOMEASSISTANT = Path(SUPERVISOR_DATA, "homeassistant.json")
FILE_HASSIO_INGRESS = Path(SUPERVISOR_DATA, "ingress.json") FILE_HASSIO_INGRESS = Path(SUPERVISOR_DATA, "ingress.json")
FILE_HASSIO_MULTICAST = Path(SUPERVISOR_DATA, "multicast.json")
FILE_HASSIO_SERVICES = Path(SUPERVISOR_DATA, "services.json") FILE_HASSIO_SERVICES = Path(SUPERVISOR_DATA, "services.json")
FILE_HASSIO_UPDATER = Path(SUPERVISOR_DATA, "updater.json") FILE_HASSIO_UPDATER = Path(SUPERVISOR_DATA, "updater.json")
@ -74,7 +70,7 @@ HEADER_TOKEN_OLD = "X-Hassio-Key"
ENV_TIME = "TZ" ENV_TIME = "TZ"
ENV_TOKEN = "SUPERVISOR_TOKEN" ENV_TOKEN = "SUPERVISOR_TOKEN"
ENV_TOKEN_OLD = "HASSIO_TOKEN" ENV_TOKEN_HASSIO = "HASSIO_TOKEN"
ENV_HOMEASSISTANT_REPOSITORY = "HOMEASSISTANT_REPOSITORY" ENV_HOMEASSISTANT_REPOSITORY = "HOMEASSISTANT_REPOSITORY"
ENV_SUPERVISOR_DEV = "SUPERVISOR_DEV" ENV_SUPERVISOR_DEV = "SUPERVISOR_DEV"
@ -275,6 +271,7 @@ ATTR_VPN = "vpn"
ATTR_WAIT_BOOT = "wait_boot" ATTR_WAIT_BOOT = "wait_boot"
ATTR_WATCHDOG = "watchdog" ATTR_WATCHDOG = "watchdog"
ATTR_WEBUI = "webui" ATTR_WEBUI = "webui"
ATTR_OBSERVER = "observer"
PROVIDE_SERVICE = "provide" PROVIDE_SERVICE = "provide"
NEED_SERVICE = "need" NEED_SERVICE = "need"
@ -289,6 +286,7 @@ MAP_SSL = "ssl"
MAP_ADDONS = "addons" MAP_ADDONS = "addons"
MAP_BACKUP = "backup" MAP_BACKUP = "backup"
MAP_SHARE = "share" MAP_SHARE = "share"
MAP_MEDIA = "media"
ARCH_ARMHF = "armhf" ARCH_ARMHF = "armhf"
ARCH_ARMV7 = "armv7" ARCH_ARMV7 = "armv7"
@ -305,6 +303,7 @@ FOLDER_HOMEASSISTANT = "homeassistant"
FOLDER_SHARE = "share" FOLDER_SHARE = "share"
FOLDER_ADDONS = "addons/local" FOLDER_ADDONS = "addons/local"
FOLDER_SSL = "ssl" FOLDER_SSL = "ssl"
FOLDER_MEDIA = "media"
SNAPSHOT_FULL = "full" SNAPSHOT_FULL = "full"
SNAPSHOT_PARTIAL = "partial" SNAPSHOT_PARTIAL = "partial"

View File

@ -15,17 +15,18 @@ from ..addons.build import AddonBuild
from ..const import ( from ..const import (
ENV_TIME, ENV_TIME,
ENV_TOKEN, ENV_TOKEN,
ENV_TOKEN_OLD, ENV_TOKEN_HASSIO,
MAP_ADDONS, MAP_ADDONS,
MAP_BACKUP, MAP_BACKUP,
MAP_CONFIG, MAP_CONFIG,
MAP_MEDIA,
MAP_SHARE, MAP_SHARE,
MAP_SSL, MAP_SSL,
SECURITY_DISABLE, SECURITY_DISABLE,
SECURITY_PROFILE, SECURITY_PROFILE,
) )
from ..coresys import CoreSys from ..coresys import CoreSys
from ..exceptions import DockerAPIError from ..exceptions import CoreDNSError, DockerAPIError
from ..utils import process_lock from ..utils import process_lock
from .interface import DockerInterface from .interface import DockerInterface
@ -118,7 +119,7 @@ class DockerAddon(DockerInterface):
**addon_env, **addon_env,
ENV_TIME: self.sys_config.timezone, ENV_TIME: self.sys_config.timezone,
ENV_TOKEN: self.addon.supervisor_token, ENV_TOKEN: self.addon.supervisor_token,
ENV_TOKEN_OLD: self.addon.supervisor_token, ENV_TOKEN_HASSIO: self.addon.supervisor_token,
} }
@property @property
@ -269,6 +270,16 @@ class DockerAddon(DockerInterface):
} }
) )
if MAP_MEDIA in addon_mapping:
volumes.update(
{
str(self.sys_config.path_extern_media): {
"bind": "/media",
"mode": addon_mapping[MAP_MEDIA],
}
}
)
# Init other hardware mappings # Init other hardware mappings
# GPIO support # GPIO support
@ -368,7 +379,13 @@ class DockerAddon(DockerInterface):
_LOGGER.info("Start Docker add-on %s with version %s", self.image, self.version) _LOGGER.info("Start Docker add-on %s with version %s", self.image, self.version)
# Write data to DNS server # Write data to DNS server
self.sys_plugins.dns.add_host(ipv4=self.ip_address, names=[self.addon.hostname]) try:
self.sys_plugins.dns.add_host(
ipv4=self.ip_address, names=[self.addon.hostname]
)
except CoreDNSError as err:
_LOGGER.warning("Can't update DNS for %s", self.name)
self.sys_capture_exception(err)
def _install( def _install(
self, tag: str, image: Optional[str] = None, latest: bool = False self, tag: str, image: Optional[str] = None, latest: bool = False
@ -494,5 +511,9 @@ class DockerAddon(DockerInterface):
Need run inside executor. Need run inside executor.
""" """
if self.ip_address != NO_ADDDRESS: if self.ip_address != NO_ADDDRESS:
try:
self.sys_plugins.dns.delete_host(self.addon.hostname) self.sys_plugins.dns.delete_host(self.addon.hostname)
except CoreDNSError as err:
_LOGGER.warning("Can't update DNS for %s", self.name)
self.sys_capture_exception(err)
super()._stop(remove_container) super()._stop(remove_container)

View File

@ -45,7 +45,10 @@ class DockerCli(DockerInterface, CoreSysAttributes):
name=self.name, name=self.name,
hostname=self.name.replace("_", "-"), hostname=self.name.replace("_", "-"),
detach=True, detach=True,
extra_hosts={"supervisor": self.sys_docker.network.supervisor}, extra_hosts={
"supervisor": self.sys_docker.network.supervisor,
"observer": self.sys_docker.network.observer,
},
environment={ environment={
ENV_TIME: self.sys_config.timezone, ENV_TIME: self.sys_config.timezone,
ENV_TOKEN: self.sys_plugins.cli.supervisor_token, ENV_TOKEN: self.sys_plugins.cli.supervisor_token,

View File

@ -6,7 +6,7 @@ from typing import Awaitable, Dict, Optional
import docker import docker
import requests import requests
from ..const import ENV_TIME, ENV_TOKEN, ENV_TOKEN_OLD, LABEL_MACHINE, MACHINE_ID from ..const import ENV_TIME, ENV_TOKEN, ENV_TOKEN_HASSIO, LABEL_MACHINE, MACHINE_ID
from ..exceptions import DockerAPIError from ..exceptions import DockerAPIError
from .interface import CommandReturn, DockerInterface from .interface import CommandReturn, DockerInterface
@ -62,6 +62,10 @@ class DockerHomeAssistant(DockerInterface):
"bind": "/share", "bind": "/share",
"mode": "rw", "mode": "rw",
}, },
str(self.sys_config.path_extern_media): {
"bind": "/media",
"mode": "rw",
},
} }
) )
@ -111,12 +115,16 @@ class DockerHomeAssistant(DockerInterface):
init=False, init=False,
network_mode="host", network_mode="host",
volumes=self.volumes, volumes=self.volumes,
extra_hosts={
"supervisor": self.sys_docker.network.supervisor,
"observer": self.sys_docker.network.observer,
},
environment={ environment={
"HASSIO": self.sys_docker.network.supervisor, "HASSIO": self.sys_docker.network.supervisor,
"SUPERVISOR": self.sys_docker.network.supervisor, "SUPERVISOR": self.sys_docker.network.supervisor,
ENV_TIME: self.sys_config.timezone, ENV_TIME: self.sys_config.timezone,
ENV_TOKEN: self.sys_homeassistant.supervisor_token, ENV_TOKEN: self.sys_homeassistant.supervisor_token,
ENV_TOKEN_OLD: self.sys_homeassistant.supervisor_token, ENV_TOKEN_HASSIO: self.sys_homeassistant.supervisor_token,
}, },
) )

View File

@ -334,7 +334,13 @@ class DockerInterface(CoreSysAttributes):
raise DockerAPIError() from err raise DockerAPIError() from err
# Cleanup Current # Cleanup Current
for image in self.sys_docker.images.list(name=self.image): try:
images_list = self.sys_docker.images.list(name=self.image)
except (docker.errors.DockerException, requests.RequestException) as err:
_LOGGER.waring("Corrupt docker overlayfs found: %s", err)
raise DockerAPIError() from err
for image in images_list:
if origin.id == image.id: if origin.id == image.id:
continue continue
@ -346,7 +352,13 @@ class DockerInterface(CoreSysAttributes):
if not old_image or self.image == old_image: if not old_image or self.image == old_image:
return return
for image in self.sys_docker.images.list(name=old_image): try:
images_list = self.sys_docker.images.list(name=old_image)
except (docker.errors.DockerException, requests.RequestException) as err:
_LOGGER.waring("Corrupt docker overlayfs found: %s", err)
raise DockerAPIError() from err
for image in images_list:
with suppress(docker.errors.DockerException, requests.RequestException): with suppress(docker.errors.DockerException, requests.RequestException):
_LOGGER.info("Cleanup images: %s", image.tags) _LOGGER.info("Cleanup images: %s", image.tags)
self.sys_docker.images.remove(image.id, force=True) self.sys_docker.images.remove(image.id, force=True)

View File

@ -69,6 +69,11 @@ class DockerNetwork:
"""Return cli of the network.""" """Return cli of the network."""
return DOCKER_NETWORK_MASK[5] return DOCKER_NETWORK_MASK[5]
@property
def observer(self) -> IPv4Address:
"""Return observer of the network."""
return DOCKER_NETWORK_MASK[6]
def _get_network(self) -> docker.models.networks.Network: def _get_network(self) -> docker.models.networks.Network:
"""Get supervisor network.""" """Get supervisor network."""
try: try:

View File

@ -0,0 +1,62 @@
"""Observer docker object."""
import logging
from ..const import ENV_TIME, ENV_TOKEN
from ..coresys import CoreSysAttributes
from .interface import DockerInterface
_LOGGER: logging.Logger = logging.getLogger(__name__)
OBSERVER_DOCKER_NAME: str = "hassio_observer"
class DockerObserver(DockerInterface, CoreSysAttributes):
"""Docker Supervisor wrapper for observer plugin."""
@property
def image(self):
"""Return name of observer image."""
return self.sys_plugins.observer.image
@property
def name(self) -> str:
"""Return name of Docker container."""
return OBSERVER_DOCKER_NAME
def _run(self) -> None:
"""Run Docker image.
Need run inside executor.
"""
if self._is_running():
return
# Cleanup
self._stop()
# Create & Run container
docker_container = self.sys_docker.run(
self.image,
version=self.sys_plugins.observer.version,
init=False,
ipv4=self.sys_docker.network.observer,
name=self.name,
hostname=self.name.replace("_", "-"),
detach=True,
restart_policy={"Name": "always"},
extra_hosts={"supervisor": self.sys_docker.network.supervisor},
environment={
ENV_TIME: self.sys_config.timezone,
ENV_TOKEN: self.sys_plugins.observer.supervisor_token,
},
volumes={"/run/docker.sock": {"bind": "/run/docker.sock", "mode": "ro"}},
ports={"80/tcp": 4357},
)
self._meta = docker_container.attrs
_LOGGER.info(
"Start Observer %s with version %s - %s",
self.image,
self.version,
self.sys_docker.network.observer,
)

View File

@ -65,6 +65,17 @@ class CliUpdateError(CliError):
"""Error on update of a HA cli.""" """Error on update of a HA cli."""
# Observer
class ObserverError(HassioError):
"""General Observer exception."""
class ObserverUpdateError(ObserverError):
"""Error on update of a Observer."""
# Multicast # Multicast

View File

@ -10,6 +10,7 @@ from ..exceptions import (
CoreDNSError, CoreDNSError,
HomeAssistantError, HomeAssistantError,
MulticastError, MulticastError,
ObserverError,
) )
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -22,6 +23,7 @@ RUN_UPDATE_CLI = 28100
RUN_UPDATE_DNS = 30100 RUN_UPDATE_DNS = 30100
RUN_UPDATE_AUDIO = 30200 RUN_UPDATE_AUDIO = 30200
RUN_UPDATE_MULTICAST = 30300 RUN_UPDATE_MULTICAST = 30300
RUN_UPDATE_OBSERVER = 30400
RUN_RELOAD_ADDONS = 10800 RUN_RELOAD_ADDONS = 10800
RUN_RELOAD_SNAPSHOTS = 72000 RUN_RELOAD_SNAPSHOTS = 72000
@ -35,6 +37,7 @@ RUN_WATCHDOG_HOMEASSISTANT_API = 120
RUN_WATCHDOG_DNS_DOCKER = 30 RUN_WATCHDOG_DNS_DOCKER = 30
RUN_WATCHDOG_AUDIO_DOCKER = 60 RUN_WATCHDOG_AUDIO_DOCKER = 60
RUN_WATCHDOG_CLI_DOCKER = 60 RUN_WATCHDOG_CLI_DOCKER = 60
RUN_WATCHDOG_OBSERVER_DOCKER = 60
RUN_WATCHDOG_MULTICAST_DOCKER = 60 RUN_WATCHDOG_MULTICAST_DOCKER = 60
RUN_WATCHDOG_ADDON_DOCKER = 30 RUN_WATCHDOG_ADDON_DOCKER = 30
@ -60,6 +63,7 @@ class Tasks(CoreSysAttributes):
self.sys_scheduler.register_task(self._update_dns, RUN_UPDATE_DNS) self.sys_scheduler.register_task(self._update_dns, RUN_UPDATE_DNS)
self.sys_scheduler.register_task(self._update_audio, RUN_UPDATE_AUDIO) self.sys_scheduler.register_task(self._update_audio, RUN_UPDATE_AUDIO)
self.sys_scheduler.register_task(self._update_multicast, RUN_UPDATE_MULTICAST) self.sys_scheduler.register_task(self._update_multicast, RUN_UPDATE_MULTICAST)
self.sys_scheduler.register_task(self._update_observer, RUN_UPDATE_OBSERVER)
# Reload # Reload
self.sys_scheduler.register_task(self.sys_store.reload, RUN_RELOAD_ADDONS) self.sys_scheduler.register_task(self.sys_store.reload, RUN_RELOAD_ADDONS)
@ -86,6 +90,9 @@ class Tasks(CoreSysAttributes):
self.sys_scheduler.register_task( self.sys_scheduler.register_task(
self._watchdog_cli_docker, RUN_WATCHDOG_CLI_DOCKER self._watchdog_cli_docker, RUN_WATCHDOG_CLI_DOCKER
) )
self.sys_scheduler.register_task(
self._watchdog_observer_docker, RUN_WATCHDOG_OBSERVER_DOCKER
)
self.sys_scheduler.register_task( self.sys_scheduler.register_task(
self._watchdog_multicast_docker, RUN_WATCHDOG_MULTICAST_DOCKER self._watchdog_multicast_docker, RUN_WATCHDOG_MULTICAST_DOCKER
) )
@ -225,6 +232,14 @@ class Tasks(CoreSysAttributes):
_LOGGER.info("Found new PulseAudio plugin version") _LOGGER.info("Found new PulseAudio plugin version")
await self.sys_plugins.audio.update() await self.sys_plugins.audio.update()
async def _update_observer(self):
"""Check and run update of Observer plugin."""
if not self.sys_plugins.observer.need_update:
return
_LOGGER.info("Found new Observer plugin version")
await self.sys_plugins.observer.update()
async def _update_multicast(self): async def _update_multicast(self):
"""Check and run update of multicast.""" """Check and run update of multicast."""
if not self.sys_plugins.multicast.need_update: if not self.sys_plugins.multicast.need_update:
@ -278,6 +293,21 @@ class Tasks(CoreSysAttributes):
except CliError: except CliError:
_LOGGER.error("Watchdog cli reanimation failed!") _LOGGER.error("Watchdog cli reanimation failed!")
async def _watchdog_observer_docker(self):
"""Check running state of Docker and start if they is close."""
# if observer plugin is active
if (
await self.sys_plugins.observer.is_running()
or self.sys_plugins.observer.in_progress
):
return
_LOGGER.warning("Watchdog found a problem with observer plugin!")
try:
await self.sys_plugins.observer.start()
except ObserverError:
_LOGGER.error("Watchdog observer reanimation failed!")
async def _watchdog_multicast_docker(self): async def _watchdog_multicast_docker(self):
"""Check running state of Docker and start if they is close.""" """Check running state of Docker and start if they is close."""
# if multicast plugin is active # if multicast plugin is active

View File

@ -10,6 +10,7 @@ from .audio import Audio
from .cli import HaCli from .cli import HaCli
from .dns import CoreDNS from .dns import CoreDNS
from .multicast import Multicast from .multicast import Multicast
from .observer import Observer
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -20,6 +21,7 @@ class PluginManager(CoreSysAttributes):
required_cli: LegacyVersion = pkg_parse("26") required_cli: LegacyVersion = pkg_parse("26")
required_dns: LegacyVersion = pkg_parse("9") required_dns: LegacyVersion = pkg_parse("9")
required_audio: LegacyVersion = pkg_parse("17") required_audio: LegacyVersion = pkg_parse("17")
required_observer: LegacyVersion = pkg_parse("2")
required_multicast: LegacyVersion = pkg_parse("3") required_multicast: LegacyVersion = pkg_parse("3")
def __init__(self, coresys: CoreSys): def __init__(self, coresys: CoreSys):
@ -29,6 +31,7 @@ class PluginManager(CoreSysAttributes):
self._cli: HaCli = HaCli(coresys) self._cli: HaCli = HaCli(coresys)
self._dns: CoreDNS = CoreDNS(coresys) self._dns: CoreDNS = CoreDNS(coresys)
self._audio: Audio = Audio(coresys) self._audio: Audio = Audio(coresys)
self._observer: Observer = Observer(coresys)
self._multicast: Multicast = Multicast(coresys) self._multicast: Multicast = Multicast(coresys)
@property @property
@ -46,6 +49,11 @@ class PluginManager(CoreSysAttributes):
"""Return audio handler.""" """Return audio handler."""
return self._audio return self._audio
@property
def observer(self) -> Observer:
"""Return observer handler."""
return self._observer
@property @property
def multicast(self) -> Multicast: def multicast(self) -> Multicast:
"""Return multicast handler.""" """Return multicast handler."""
@ -58,6 +66,7 @@ class PluginManager(CoreSysAttributes):
self.dns, self.dns,
self.audio, self.audio,
self.cli, self.cli,
self.observer,
self.multicast, self.multicast,
): ):
try: try:
@ -71,6 +80,7 @@ class PluginManager(CoreSysAttributes):
(self._audio, self.required_audio), (self._audio, self.required_audio),
(self._dns, self.required_dns), (self._dns, self.required_dns),
(self._cli, self.required_cli), (self._cli, self.required_cli),
(self._observer, self.required_observer),
(self._multicast, self.required_multicast), (self._multicast, self.required_multicast),
): ):
# Check if need an update # Check if need an update
@ -109,6 +119,7 @@ class PluginManager(CoreSysAttributes):
self.dns.repair(), self.dns.repair(),
self.audio.repair(), self.audio.repair(),
self.cli.repair(), self.cli.repair(),
self.observer.repair(),
self.multicast.repair(), self.multicast.repair(),
] ]
) )

View File

@ -11,12 +11,13 @@ from typing import Awaitable, Optional
import jinja2 import jinja2
from ..const import ATTR_IMAGE, ATTR_VERSION, FILE_HASSIO_AUDIO from ..const import ATTR_IMAGE, ATTR_VERSION
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..docker.audio import DockerAudio from ..docker.audio import DockerAudio
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
from ..exceptions import AudioError, AudioUpdateError, DockerAPIError from ..exceptions import AudioError, AudioUpdateError, DockerAPIError
from ..utils.json import JsonConfig from ..utils.json import JsonConfig
from .const import FILE_HASSIO_AUDIO
from .validate import SCHEMA_AUDIO_CONFIG from .validate import SCHEMA_AUDIO_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -225,8 +226,9 @@ class Audio(JsonConfig, CoreSysAttributes):
_LOGGER.info("Repair Audio %s", self.version) _LOGGER.info("Repair Audio %s", self.version)
try: try:
await self.instance.install(self.version) await self.instance.install(self.version)
except DockerAPIError: except DockerAPIError as err:
_LOGGER.error("Repairing of Audio failed") _LOGGER.error("Repairing of Audio failed")
self.sys_capture_exception(err)
def pulse_client(self, input_profile=None, output_profile=None) -> str: def pulse_client(self, input_profile=None, output_profile=None) -> str:
"""Generate an /etc/pulse/client.conf data.""" """Generate an /etc/pulse/client.conf data."""

View File

@ -8,12 +8,13 @@ import logging
import secrets import secrets
from typing import Awaitable, Optional from typing import Awaitable, Optional
from ..const import ATTR_ACCESS_TOKEN, ATTR_IMAGE, ATTR_VERSION, FILE_HASSIO_CLI from ..const import ATTR_ACCESS_TOKEN, ATTR_IMAGE, ATTR_VERSION
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..docker.cli import DockerCli from ..docker.cli import DockerCli
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
from ..exceptions import CliError, CliUpdateError, DockerAPIError from ..exceptions import CliError, CliUpdateError, DockerAPIError
from ..utils.json import JsonConfig from ..utils.json import JsonConfig
from .const import FILE_HASSIO_CLI
from .validate import SCHEMA_CLI_CONFIG from .validate import SCHEMA_CLI_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -90,7 +91,7 @@ class HaCli(CoreSysAttributes, JsonConfig):
self.image = self.instance.image self.image = self.instance.image
self.save_data() self.save_data()
# Run PulseAudio # Run CLI
with suppress(CliError): with suppress(CliError):
if not await self.instance.is_running(): if not await self.instance.is_running():
await self.start() await self.start()
@ -192,5 +193,6 @@ class HaCli(CoreSysAttributes, JsonConfig):
_LOGGER.info("Repair HA cli %s", self.version) _LOGGER.info("Repair HA cli %s", self.version)
try: try:
await self.instance.install(self.version, latest=True) await self.instance.install(self.version, latest=True)
except DockerAPIError: except DockerAPIError as err:
_LOGGER.error("Repairing of HA cli failed") _LOGGER.error("Repairing of HA cli failed")
self.sys_capture_exception(err)

View File

@ -0,0 +1,10 @@
"""Const for plugins."""
from pathlib import Path
from ..const import SUPERVISOR_DATA
FILE_HASSIO_AUDIO = Path(SUPERVISOR_DATA, "audio.json")
FILE_HASSIO_CLI = Path(SUPERVISOR_DATA, "cli.json")
FILE_HASSIO_DNS = Path(SUPERVISOR_DATA, "dns.json")
FILE_HASSIO_OBSERVER = Path(SUPERVISOR_DATA, "observer.json")
FILE_HASSIO_MULTICAST = Path(SUPERVISOR_DATA, "multicast.json")

View File

@ -13,20 +13,14 @@ import attr
import jinja2 import jinja2
import voluptuous as vol import voluptuous as vol
from ..const import ( from ..const import ATTR_IMAGE, ATTR_SERVERS, ATTR_VERSION, DNS_SUFFIX, LogLevel
ATTR_IMAGE,
ATTR_SERVERS,
ATTR_VERSION,
DNS_SUFFIX,
FILE_HASSIO_DNS,
LogLevel,
)
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..docker.dns import DockerDNS from ..docker.dns import DockerDNS
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
from ..exceptions import CoreDNSError, CoreDNSUpdateError, DockerAPIError from ..exceptions import CoreDNSError, CoreDNSUpdateError, DockerAPIError
from ..utils.json import JsonConfig from ..utils.json import JsonConfig
from ..validate import dns_url from ..validate import dns_url
from .const import FILE_HASSIO_DNS
from .validate import SCHEMA_DNS_CONFIG from .validate import SCHEMA_DNS_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -322,6 +316,7 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
write=False, write=False,
) )
self.add_host(self.sys_docker.network.dns, ["dns"], write=False) self.add_host(self.sys_docker.network.dns, ["dns"], write=False)
self.add_host(self.sys_docker.network.observer, ["observer"], write=False)
def write_hosts(self) -> None: def write_hosts(self) -> None:
"""Write hosts from memory to file.""" """Write hosts from memory to file."""
@ -419,8 +414,9 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
_LOGGER.info("Repair CoreDNS %s", self.version) _LOGGER.info("Repair CoreDNS %s", self.version)
try: try:
await self.instance.install(self.version) await self.instance.install(self.version)
except DockerAPIError: except DockerAPIError as err:
_LOGGER.error("Repairing of CoreDNS failed") _LOGGER.error("Repairing of CoreDNS failed")
self.sys_capture_exception(err)
def _write_resolv(self, resolv_conf: Path) -> None: def _write_resolv(self, resolv_conf: Path) -> None:
"""Update/Write resolv.conf file.""" """Update/Write resolv.conf file."""

View File

@ -7,12 +7,13 @@ from contextlib import suppress
import logging import logging
from typing import Awaitable, Optional from typing import Awaitable, Optional
from ..const import ATTR_IMAGE, ATTR_VERSION, FILE_HASSIO_MULTICAST from ..const import ATTR_IMAGE, ATTR_VERSION
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..docker.multicast import DockerMulticast from ..docker.multicast import DockerMulticast
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
from ..exceptions import DockerAPIError, MulticastError, MulticastUpdateError from ..exceptions import DockerAPIError, MulticastError, MulticastUpdateError
from ..utils.json import JsonConfig from ..utils.json import JsonConfig
from .const import FILE_HASSIO_MULTICAST
from .validate import SCHEMA_MULTICAST_CONFIG from .validate import SCHEMA_MULTICAST_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -205,5 +206,6 @@ class Multicast(JsonConfig, CoreSysAttributes):
_LOGGER.info("Repair Multicast %s", self.version) _LOGGER.info("Repair Multicast %s", self.version)
try: try:
await self.instance.install(self.version) await self.instance.install(self.version)
except DockerAPIError: except DockerAPIError as err:
_LOGGER.error("Repairing of Multicast failed") _LOGGER.error("Repairing of Multicast failed")
self.sys_capture_exception(err)

View File

@ -0,0 +1,188 @@
"""Home Assistant observer plugin.
Code: https://github.com/home-assistant/plugin-observer
"""
import asyncio
from contextlib import suppress
import logging
import secrets
from typing import Awaitable, Optional
from ..const import ATTR_ACCESS_TOKEN, ATTR_IMAGE, ATTR_VERSION
from ..coresys import CoreSys, CoreSysAttributes
from ..docker.observer import DockerObserver
from ..docker.stats import DockerStats
from ..exceptions import DockerAPIError, ObserverError, ObserverUpdateError
from ..utils.json import JsonConfig
from .const import FILE_HASSIO_OBSERVER
from .validate import SCHEMA_OBSERVER_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__)
class Observer(CoreSysAttributes, JsonConfig):
"""Supervisor observer instance."""
def __init__(self, coresys: CoreSys):
"""Initialize observer handler."""
super().__init__(FILE_HASSIO_OBSERVER, SCHEMA_OBSERVER_CONFIG)
self.coresys: CoreSys = coresys
self.instance: DockerObserver = DockerObserver(coresys)
@property
def version(self) -> Optional[str]:
"""Return version of observer."""
return self._data.get(ATTR_VERSION)
@version.setter
def version(self, value: str) -> None:
"""Set current version of observer."""
self._data[ATTR_VERSION] = value
@property
def image(self) -> str:
"""Return current image of observer."""
if self._data.get(ATTR_IMAGE):
return self._data[ATTR_IMAGE]
return f"homeassistant/{self.sys_arch.supervisor}-hassio-observer"
@image.setter
def image(self, value: str) -> None:
"""Return current image of observer."""
self._data[ATTR_IMAGE] = value
@property
def latest_version(self) -> str:
"""Return version of latest observer."""
return self.sys_updater.version_observer
@property
def need_update(self) -> bool:
"""Return true if a observer update is available."""
return self.version != self.latest_version
@property
def supervisor_token(self) -> str:
"""Return an access token for the Observer API."""
return self._data.get(ATTR_ACCESS_TOKEN)
@property
def in_progress(self) -> bool:
"""Return True if a task is in progress."""
return self.instance.in_progress
async def load(self) -> None:
"""Load observer setup."""
# Check observer state
try:
# Evaluate Version if we lost this information
if not self.version:
self.version = await self.instance.get_latest_version()
await self.instance.attach(tag=self.version)
except DockerAPIError:
_LOGGER.info(
"No observer plugin Docker image %s found.", self.instance.image
)
# Install observer
with suppress(ObserverError):
await self.install()
else:
self.version = self.instance.version
self.image = self.instance.image
self.save_data()
# Run Observer
with suppress(ObserverError):
if not await self.instance.is_running():
await self.start()
async def install(self) -> None:
"""Install observer."""
_LOGGER.info("Setup observer plugin")
while True:
# read observer tag and install it
if not self.latest_version:
await self.sys_updater.reload()
if self.latest_version:
with suppress(DockerAPIError):
await self.instance.install(
self.latest_version, image=self.sys_updater.image_observer
)
break
_LOGGER.warning("Error on install observer plugin. Retry in 30sec")
await asyncio.sleep(30)
_LOGGER.info("observer plugin now installed")
self.version = self.instance.version
self.image = self.sys_updater.image_observer
self.save_data()
async def update(self, version: Optional[str] = None) -> None:
"""Update local HA observer."""
version = version or self.latest_version
old_image = self.image
if version == self.version:
_LOGGER.warning("Version %s is already installed for observer", version)
return
try:
await self.instance.update(version, image=self.sys_updater.image_observer)
except DockerAPIError as err:
_LOGGER.error("HA observer update failed")
raise ObserverUpdateError() from err
else:
self.version = version
self.image = self.sys_updater.image_observer
self.save_data()
# Cleanup
with suppress(DockerAPIError):
await self.instance.cleanup(old_image=old_image)
# Start observer
await self.start()
async def start(self) -> None:
"""Run observer."""
# Create new API token
if not self.supervisor_token:
self._data[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
self.save_data()
# Start Instance
_LOGGER.info("Start observer plugin")
try:
await self.instance.run()
except DockerAPIError as err:
_LOGGER.error("Can't start observer plugin")
raise ObserverError() from err
async def stats(self) -> DockerStats:
"""Return stats of observer."""
try:
return await self.instance.stats()
except DockerAPIError as err:
raise ObserverError() from err
def is_running(self) -> Awaitable[bool]:
"""Return True if Docker container is running.
Return a coroutine.
"""
return self.instance.is_running()
async def repair(self) -> None:
"""Repair observer container."""
if await self.instance.exists():
return
_LOGGER.info("Repair HA observer %s", self.version)
try:
await self.instance.install(self.version)
except DockerAPIError as err:
_LOGGER.error("Repairing of HA observer failed")
self.sys_capture_exception(err)

View File

@ -35,3 +35,13 @@ SCHEMA_MULTICAST_CONFIG = vol.Schema(
{vol.Optional(ATTR_VERSION): version_tag, vol.Optional(ATTR_IMAGE): docker_image}, {vol.Optional(ATTR_VERSION): version_tag, vol.Optional(ATTR_IMAGE): docker_image},
extra=vol.REMOVE_EXTRA, extra=vol.REMOVE_EXTRA,
) )
SCHEMA_OBSERVER_CONFIG = vol.Schema(
{
vol.Optional(ATTR_VERSION): version_tag,
vol.Optional(ATTR_IMAGE): docker_image,
vol.Optional(ATTR_ACCESS_TOKEN): token,
},
extra=vol.REMOVE_EXTRA,
)

View File

@ -26,6 +26,7 @@ from ..const import (
CRYPTO_AES128, CRYPTO_AES128,
FOLDER_ADDONS, FOLDER_ADDONS,
FOLDER_HOMEASSISTANT, FOLDER_HOMEASSISTANT,
FOLDER_MEDIA,
FOLDER_SHARE, FOLDER_SHARE,
FOLDER_SSL, FOLDER_SSL,
SNAPSHOT_FULL, SNAPSHOT_FULL,
@ -33,7 +34,13 @@ from ..const import (
) )
from ..validate import docker_image, network_port, repositories, version_tag from ..validate import docker_image, network_port, repositories, version_tag
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL] ALL_FOLDERS = [
FOLDER_HOMEASSISTANT,
FOLDER_SHARE,
FOLDER_ADDONS,
FOLDER_SSL,
FOLDER_MEDIA,
]
def unique_addons(addons_list): def unique_addons(addons_list):

View File

@ -17,6 +17,7 @@ from .const import (
ATTR_HOMEASSISTANT, ATTR_HOMEASSISTANT,
ATTR_IMAGE, ATTR_IMAGE,
ATTR_MULTICAST, ATTR_MULTICAST,
ATTR_OBSERVER,
ATTR_SUPERVISOR, ATTR_SUPERVISOR,
FILE_HASSIO_UPDATER, FILE_HASSIO_UPDATER,
URL_HASSIO_VERSION, URL_HASSIO_VERSION,
@ -79,6 +80,11 @@ class Updater(JsonConfig, CoreSysAttributes):
"""Return latest version of Audio.""" """Return latest version of Audio."""
return self._data.get(ATTR_AUDIO) return self._data.get(ATTR_AUDIO)
@property
def version_observer(self) -> Optional[str]:
"""Return latest version of Observer."""
return self._data.get(ATTR_OBSERVER)
@property @property
def version_multicast(self) -> Optional[str]: def version_multicast(self) -> Optional[str]:
"""Return latest version of Multicast.""" """Return latest version of Multicast."""
@ -123,6 +129,15 @@ class Updater(JsonConfig, CoreSysAttributes):
return None return None
return self._data[ATTR_IMAGE][ATTR_AUDIO].format(arch=self.sys_arch.supervisor) return self._data[ATTR_IMAGE][ATTR_AUDIO].format(arch=self.sys_arch.supervisor)
@property
def image_observer(self) -> Optional[str]:
"""Return latest version of Observer."""
if ATTR_OBSERVER not in self._data[ATTR_IMAGE]:
return None
return self._data[ATTR_IMAGE][ATTR_OBSERVER].format(
arch=self.sys_arch.supervisor
)
@property @property
def image_multicast(self) -> Optional[str]: def image_multicast(self) -> Optional[str]:
"""Return latest version of Multicast.""" """Return latest version of Multicast."""
@ -184,6 +199,7 @@ class Updater(JsonConfig, CoreSysAttributes):
self._data[ATTR_CLI] = data["cli"] self._data[ATTR_CLI] = data["cli"]
self._data[ATTR_DNS] = data["dns"] self._data[ATTR_DNS] = data["dns"]
self._data[ATTR_AUDIO] = data["audio"] self._data[ATTR_AUDIO] = data["audio"]
self._data[ATTR_OBSERVER] = data["observer"]
self._data[ATTR_MULTICAST] = data["multicast"] self._data[ATTR_MULTICAST] = data["multicast"]
# Update images for that versions # Update images for that versions
@ -192,6 +208,7 @@ class Updater(JsonConfig, CoreSysAttributes):
self._data[ATTR_IMAGE][ATTR_AUDIO] = data["image"]["audio"] self._data[ATTR_IMAGE][ATTR_AUDIO] = data["image"]["audio"]
self._data[ATTR_IMAGE][ATTR_CLI] = data["image"]["cli"] self._data[ATTR_IMAGE][ATTR_CLI] = data["image"]["cli"]
self._data[ATTR_IMAGE][ATTR_DNS] = data["image"]["dns"] self._data[ATTR_IMAGE][ATTR_DNS] = data["image"]["dns"]
self._data[ATTR_IMAGE][ATTR_OBSERVER] = data["image"]["observer"]
self._data[ATTR_IMAGE][ATTR_MULTICAST] = data["image"]["multicast"] self._data[ATTR_IMAGE][ATTR_MULTICAST] = data["image"]["multicast"]
except KeyError as err: except KeyError as err:

View File

@ -33,12 +33,20 @@ RE_GVARIANT_STRING_ESC: re.Pattern[Any] = re.compile(
RE_GVARIANT_STRING: re.Pattern[Any] = re.compile( RE_GVARIANT_STRING: re.Pattern[Any] = re.compile(
r"(?<=(?: |{|\[|\(|<))'(.*?)'(?=(?:|]|}|,|\)|>))" r"(?<=(?: |{|\[|\(|<))'(.*?)'(?=(?:|]|}|,|\)|>))"
) )
RE_GVARIANT_BINARY: re.Pattern[Any] = re.compile(r"\[byte (.*?)\]") RE_GVARIANT_BINARY: re.Pattern[Any] = re.compile(
r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|\[byte (.*?)\]"
)
RE_GVARIANT_BINARY_STRING: re.Pattern[Any] = re.compile(
r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|<?b\'(.*?)\'>?"
)
RE_GVARIANT_TUPLE_O: re.Pattern[Any] = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(\()") RE_GVARIANT_TUPLE_O: re.Pattern[Any] = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(\()")
RE_GVARIANT_TUPLE_C: re.Pattern[Any] = re.compile( RE_GVARIANT_TUPLE_C: re.Pattern[Any] = re.compile(
r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(,?\))" r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(,?\))"
) )
RE_BIN_STRING_OCT: re.Pattern[Any] = re.compile(r"\\\\(\d{3})")
RE_BIN_STRING_HEX: re.Pattern[Any] = re.compile(r"\\\\x(\d{2})")
RE_MONITOR_OUTPUT: re.Pattern[Any] = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)") RE_MONITOR_OUTPUT: re.Pattern[Any] = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)")
# Map GDBus to errors # Map GDBus to errors
@ -66,6 +74,13 @@ def _convert_bytes(value: str) -> str:
return f"[{', '.join(str(char) for char in data)}]" return f"[{', '.join(str(char) for char in data)}]"
def _convert_bytes_string(value: str) -> str:
"""Convert bytes to string or byte-array."""
data = RE_BIN_STRING_OCT.sub(lambda x: chr(int(x.group(1), 8)), value)
data = RE_BIN_STRING_HEX.sub(lambda x: chr(int(f"0x{x.group(1)}", 0)), data)
return f"[{', '.join(str(char) for char in list(char for char in data.encode()))}]"
class DBus: class DBus:
"""DBus handler.""" """DBus handler."""
@ -120,15 +135,23 @@ class DBus:
def parse_gvariant(raw: str) -> Any: def parse_gvariant(raw: str) -> Any:
"""Parse GVariant input to python.""" """Parse GVariant input to python."""
# Process first string # Process first string
json_raw = RE_GVARIANT_BINARY.sub(
lambda x: _convert_bytes(x.group(1)),
raw,
)
json_raw = RE_GVARIANT_STRING_ESC.sub( json_raw = RE_GVARIANT_STRING_ESC.sub(
lambda x: x.group(0).replace('"', '\\"'), json_raw lambda x: x.group(0).replace('"', '\\"'), raw
) )
json_raw = RE_GVARIANT_STRING.sub(r'"\1"', json_raw) json_raw = RE_GVARIANT_STRING.sub(r'"\1"', json_raw)
# Handle Bytes
json_raw = RE_GVARIANT_BINARY.sub(
lambda x: x.group(0) if not x.group(1) else _convert_bytes(x.group(1)),
json_raw,
)
json_raw = RE_GVARIANT_BINARY_STRING.sub(
lambda x: x.group(0)
if not x.group(1)
else _convert_bytes_string(x.group(1)),
json_raw,
)
# Remove complex type handling # Remove complex type handling
json_raw: str = RE_GVARIANT_TYPE.sub( json_raw: str = RE_GVARIANT_TYPE.sub(
lambda x: x.group(0) if not x.group(1) else "", json_raw lambda x: x.group(0) if not x.group(1) else "", json_raw

View File

@ -26,6 +26,7 @@ from .const import (
ATTR_LAST_BOOT, ATTR_LAST_BOOT,
ATTR_LOGGING, ATTR_LOGGING,
ATTR_MULTICAST, ATTR_MULTICAST,
ATTR_OBSERVER,
ATTR_PORT, ATTR_PORT,
ATTR_PORTS, ATTR_PORTS,
ATTR_REFRESH_TOKEN, ATTR_REFRESH_TOKEN,
@ -74,9 +75,13 @@ def dns_url(url: str) -> str:
raise vol.Invalid("Doesn't start with dns://") from None raise vol.Invalid("Doesn't start with dns://") from None
address: str = url[6:] # strip the dns:// off address: str = url[6:] # strip the dns:// off
try: try:
ipaddress.ip_address(address) # matches ipv4 or ipv6 addresses ip = ipaddress.ip_address(address) # matches ipv4 or ipv6 addresses
except ValueError: except ValueError:
raise vol.Invalid(f"Invalid DNS URL: {url}") from None raise vol.Invalid(f"Invalid DNS URL: {url}") from None
# Currently only IPv4 work with docker network
if ip.version != 4:
raise vol.Invalid(f"Only IPv4 is working for DNS: {url}") from None
return url return url
@ -137,6 +142,7 @@ SCHEMA_UPDATER_CONFIG = vol.Schema(
vol.Optional(ATTR_CLI): vol.All(version_tag, str), vol.Optional(ATTR_CLI): vol.All(version_tag, str),
vol.Optional(ATTR_DNS): vol.All(version_tag, str), vol.Optional(ATTR_DNS): vol.All(version_tag, str),
vol.Optional(ATTR_AUDIO): vol.All(version_tag, str), vol.Optional(ATTR_AUDIO): vol.All(version_tag, str),
vol.Optional(ATTR_OBSERVER): vol.All(version_tag, str),
vol.Optional(ATTR_MULTICAST): vol.All(version_tag, str), vol.Optional(ATTR_MULTICAST): vol.All(version_tag, str),
vol.Optional(ATTR_IMAGE, default=dict): vol.Schema( vol.Optional(ATTR_IMAGE, default=dict): vol.Schema(
{ {
@ -145,6 +151,7 @@ SCHEMA_UPDATER_CONFIG = vol.Schema(
vol.Optional(ATTR_CLI): docker_image, vol.Optional(ATTR_CLI): docker_image,
vol.Optional(ATTR_DNS): docker_image, vol.Optional(ATTR_DNS): docker_image,
vol.Optional(ATTR_AUDIO): docker_image, vol.Optional(ATTR_AUDIO): docker_image,
vol.Optional(ATTR_OBSERVER): docker_image,
vol.Optional(ATTR_MULTICAST): docker_image, vol.Optional(ATTR_MULTICAST): docker_image,
}, },
extra=vol.REMOVE_EXTRA, extra=vol.REMOVE_EXTRA,

View File

@ -26,6 +26,7 @@ async def test_dns_url_v4_good():
def test_dns_url_v6_good(): def test_dns_url_v6_good():
"""Test the DNS validator with known-good ipv6 DNS URLs.""" """Test the DNS validator with known-good ipv6 DNS URLs."""
with pytest.raises(vol.error.Invalid):
for url in DNS_GOOD_V6: for url in DNS_GOOD_V6:
assert validate.dns_url(url) assert validate.dns_url(url)
@ -37,6 +38,7 @@ def test_dns_server_list_v4():
def test_dns_server_list_v6(): def test_dns_server_list_v6():
"""Test a list with v6 addresses.""" """Test a list with v6 addresses."""
with pytest.raises(vol.error.Invalid):
assert validate.dns_server_list(DNS_GOOD_V6) assert validate.dns_server_list(DNS_GOOD_V6)
@ -44,9 +46,11 @@ def test_dns_server_list_combined():
"""Test a list with both v4 and v6 addresses.""" """Test a list with both v4 and v6 addresses."""
combined = DNS_GOOD_V4 + DNS_GOOD_V6 combined = DNS_GOOD_V4 + DNS_GOOD_V6
# test the matches # test the matches
assert validate.dns_server_list(combined) with pytest.raises(vol.error.Invalid):
validate.dns_server_list(combined)
# test max_length is OK still # test max_length is OK still
assert validate.dns_server_list(combined) with pytest.raises(vol.error.Invalid):
validate.dns_server_list(combined)
# test that it fails when the list is too long # test that it fails when the list is too long
with pytest.raises(vol.error.Invalid): with pytest.raises(vol.error.Invalid):
validate.dns_server_list(combined + combined + combined + combined) validate.dns_server_list(combined + combined + combined + combined)
@ -72,6 +76,7 @@ def test_version_complex():
"""Test version simple with good version.""" """Test version simple with good version."""
for version in ( for version in (
"landingpage", "landingpage",
"dev",
"1c002dd", "1c002dd",
"1.1.1", "1.1.1",
"1.0", "1.0",

View File

@ -404,6 +404,54 @@ def test_networkmanager_binary_data():
] ]
def test_networkmanager_binary_string_data():
"""Test NetworkManager Binary string datastrings."""
raw = "({'802-11-wireless': {'mac-address-blacklist': <@as []>, 'mac-address': <b'*~_\\\\035\\\\311'>, 'mode': <'infrastructure'>, 'security': <'802-11-wireless-security'>, 'seen-bssids': <['7C:2E:BD:98:1B:06']>, 'ssid': <[byte 0x4e, 0x45, 0x54, 0x54]>}, 'connection': {'id': <'NETT'>, 'interface-name': <'wlan0'>, 'permissions': <@as []>, 'timestamp': <uint64 1598526799>, 'type': <'802-11-wireless'>, 'uuid': <'13f9af79-a6e9-4e07-9353-165ad57bf1a8'>}, 'ipv6': {'address-data': <@aa{sv} []>, 'addresses': <@a(ayuay) []>, 'dns': <@aay []>, 'dns-search': <@as []>, 'method': <'auto'>, 'route-data': <@aa{sv} []>, 'routes': <@a(ayuayu) []>}, '802-11-wireless-security': {'auth-alg': <'open'>, 'key-mgmt': <'wpa-psk'>}, 'ipv4': {'address-data': <@aa{sv} []>, 'addresses': <@aau []>, 'dns': <@au []>, 'dns-search': <@as []>, 'method': <'auto'>, 'route-data': <@aa{sv} []>, 'routes': <@aau []>}, 'proxy': {}},)"
data = DBus.parse_gvariant(raw)
assert data == [
{
"802-11-wireless": {
"mac-address": [42, 126, 95, 29, 195, 137],
"mac-address-blacklist": [],
"mode": "infrastructure",
"security": "802-11-wireless-security",
"seen-bssids": ["7C:2E:BD:98:1B:06"],
"ssid": [78, 69, 84, 84],
},
"802-11-wireless-security": {"auth-alg": "open", "key-mgmt": "wpa-psk"},
"connection": {
"id": "NETT",
"interface-name": "wlan0",
"permissions": [],
"timestamp": 1598526799,
"type": "802-11-wireless",
"uuid": "13f9af79-a6e9-4e07-9353-165ad57bf1a8",
},
"ipv4": {
"address-data": [],
"addresses": [],
"dns": [],
"dns-search": [],
"method": "auto",
"route-data": [],
"routes": [],
},
"ipv6": {
"address-data": [],
"addresses": [],
"dns": [],
"dns-search": [],
"method": "auto",
"route-data": [],
"routes": [],
},
"proxy": {},
}
]
def test_v6(): def test_v6():
"""Test IPv6 Property.""" """Test IPv6 Property."""
raw = "({'addresses': <[([byte 0x20, 0x01, 0x04, 0x70, 0x79, 0x2d, 0x00, 0x01, 0x00, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10], uint32 64, [byte 0x20, 0x01, 0x04, 0x70, 0x79, 0x2d, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01])]>, 'dns': <[[byte 0x20, 0x01, 0x04, 0x70, 0x79, 0x2d, 0x00, 0x01, 0x00, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05]]>})" raw = "({'addresses': <[([byte 0x20, 0x01, 0x04, 0x70, 0x79, 0x2d, 0x00, 0x01, 0x00, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10], uint32 64, [byte 0x20, 0x01, 0x04, 0x70, 0x79, 0x2d, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01])]>, 'dns': <[[byte 0x20, 0x01, 0x04, 0x70, 0x79, 0x2d, 0x00, 0x01, 0x00, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05]]>})"