Merge pull request #1421 from home-assistant/dev

Release 194
This commit is contained in:
Pascal Vizeli 2020-01-07 21:09:50 +01:00 committed by GitHub
commit d1b4521290
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 662 additions and 533 deletions

44
API.md
View File

@ -56,12 +56,10 @@ The addons from `addons` are only installed one.
"installed": "INSTALL_VERSION", "installed": "INSTALL_VERSION",
"icon": "bool", "icon": "bool",
"logo": "bool", "logo": "bool",
"state": "started|stopped", "state": "started|stopped"
} }
], ],
"addons_repositories": [ "addons_repositories": ["REPO_URL"]
"REPO_URL"
]
} }
``` ```
@ -85,9 +83,7 @@ Optional:
"debug": "bool", "debug": "bool",
"debug_block": "bool", "debug_block": "bool",
"logging": "debug|info|warning|error|critical", "logging": "debug|info|warning|error|critical",
"addons_repositories": [ "addons_repositories": ["REPO_URL"]
"REPO_URL"
]
} }
``` ```
@ -100,6 +96,7 @@ Reload addons/version.
Output is the raw docker log. Output is the raw docker log.
- GET `/supervisor/stats` - GET `/supervisor/stats`
```json ```json
{ {
"cpu_percent": 0.0, "cpu_percent": 0.0,
@ -140,6 +137,7 @@ Repair overlayfs issue and restore lost images
- POST `/snapshots/new/upload` - POST `/snapshots/new/upload`
return: return:
```json ```json
{ {
"slug": "" "slug": ""
@ -156,6 +154,7 @@ return:
``` ```
return: return:
```json ```json
{ {
"slug": "" "slug": ""
@ -174,6 +173,7 @@ return:
``` ```
return: return:
```json ```json
{ {
"slug": "" "slug": ""
@ -247,7 +247,7 @@ return:
"kernel": "4.15.7|null", "kernel": "4.15.7|null",
"chassis": "specific|null", "chassis": "specific|null",
"deployment": "stable|beta|dev|null", "deployment": "stable|beta|dev|null",
"cpe": "xy|null", "cpe": "xy|null"
} }
``` ```
@ -255,7 +255,7 @@ return:
```json ```json
{ {
"hostname": "", "hostname": ""
} }
``` ```
@ -264,6 +264,7 @@ return:
#### Services #### Services
- GET `/host/services` - GET `/host/services`
```json ```json
{ {
"services": [ "services": [
@ -285,17 +286,20 @@ return:
### HassOS ### HassOS
- GET `/hassos/info` - GET `/hassos/info`
```json ```json
{ {
"version": "2.3", "version": "2.3",
"version_cli": "7", "version_cli": "7",
"version_latest": "2.4", "version_latest": "2.4",
"version_cli_latest": "8", "version_cli_latest": "8",
"board": "ova|rpi" "board": "ova|rpi",
"boot": "rauc boot slot"
} }
``` ```
- POST `/hassos/update` - POST `/hassos/update`
```json ```json
{ {
"version": "optional" "version": "optional"
@ -303,6 +307,7 @@ return:
``` ```
- POST `/hassos/update/cli` - POST `/hassos/update/cli`
```json ```json
{ {
"version": "optional" "version": "optional"
@ -316,6 +321,7 @@ Load host configs from a USB stick.
### Hardware ### Hardware
- GET `/hardware/info` - GET `/hardware/info`
```json ```json
{ {
"serial": ["/dev/xy"], "serial": ["/dev/xy"],
@ -336,6 +342,7 @@ Load host configs from a USB stick.
``` ```
- GET `/hardware/audio` - GET `/hardware/audio`
```json ```json
{ {
"audio": { "audio": {
@ -421,6 +428,7 @@ Proxy to real home-assistant instance.
Proxy to real websocket instance. Proxy to real websocket instance.
- GET `/homeassistant/stats` - GET `/homeassistant/stats`
```json ```json
{ {
"cpu_percent": 0.0, "cpu_percent": 0.0,
@ -568,7 +576,7 @@ This function is not callable by itself.
```json ```json
{ {
"protected": "bool", "protected": "bool"
} }
``` ```
@ -597,6 +605,7 @@ Only supported for local build addons
Write data to add-on stdin Write data to add-on stdin
- GET `/addons/{addon}/stats` - GET `/addons/{addon}/stats`
```json ```json
{ {
"cpu_percent": 0.0, "cpu_percent": 0.0,
@ -647,6 +656,7 @@ Need ingress session as cookie.
### discovery ### discovery
- GET `/discovery` - GET `/discovery`
```json ```json
{ {
"discovery": [ "discovery": [
@ -661,6 +671,7 @@ Need ingress session as cookie.
``` ```
- GET `/discovery/{UUID}` - GET `/discovery/{UUID}`
```json ```json
{ {
"addon": "slug", "addon": "slug",
@ -671,6 +682,7 @@ Need ingress session as cookie.
``` ```
- POST `/discovery` - POST `/discovery`
```json ```json
{ {
"service": "name", "service": "name",
@ -679,6 +691,7 @@ Need ingress session as cookie.
``` ```
return: return:
```json ```json
{ {
"uuid": "uuid" "uuid": "uuid"
@ -690,6 +703,7 @@ return:
### Services ### Services
- GET `/services` - GET `/services`
```json ```json
{ {
"services": [ "services": [
@ -705,6 +719,7 @@ return:
#### MQTT #### MQTT
- GET `/services/mqtt` - GET `/services/mqtt`
```json ```json
{ {
"addon": "name", "addon": "name",
@ -718,6 +733,7 @@ return:
``` ```
- POST `/services/mqtt` - POST `/services/mqtt`
```json ```json
{ {
"host": "xy", "host": "xy",
@ -734,6 +750,7 @@ return:
### Misc ### Misc
- GET `/info` - GET `/info`
```json ```json
{ {
"supervisor": "version", "supervisor": "version",
@ -752,6 +769,7 @@ return:
### DNS ### DNS
- GET `/dns/info` - GET `/dns/info`
```json ```json
{ {
"host": "ip-address", "host": "ip-address",
@ -763,6 +781,7 @@ return:
``` ```
- POST `/dns/options` - POST `/dns/options`
```json ```json
{ {
"servers": ["dns://8.8.8.8"] "servers": ["dns://8.8.8.8"]
@ -770,6 +789,7 @@ return:
``` ```
- POST `/dns/update` - POST `/dns/update`
```json ```json
{ {
"version": "VERSION" "version": "VERSION"
@ -781,6 +801,7 @@ return:
- GET `/dns/logs` - GET `/dns/logs`
- GET `/dns/stats` - GET `/dns/stats`
```json ```json
{ {
"cpu_percent": 0.0, "cpu_percent": 0.0,
@ -802,6 +823,7 @@ supervisor.
You can call post `/auth` You can call post `/auth`
We support: We support:
- Json `{ "user|name": "...", "password": "..." }` - Json `{ "user|name": "...", "password": "..." }`
- application/x-www-form-urlencoded `user|name=...&password=...` - application/x-www-form-urlencoded `user|name=...&password=...`
- BasicAuth - BasicAuth

View File

@ -245,7 +245,7 @@ class AddonManager(CoreSysAttributes):
raise AddonsError() from None raise AddonsError() from None
else: else:
self.data.update(store) self.data.update(store)
_LOGGER.info("Add-on '%s' successfully rebuilded", slug) _LOGGER.info("Add-on '%s' successfully rebuilt", slug)
# restore state # restore state
if last_state == STATE_STARTED: if last_state == STATE_STARTED:

View File

@ -90,12 +90,12 @@ from ..const import (
from ..coresys import CoreSys from ..coresys import CoreSys
from ..discovery.validate import valid_discovery_service from ..discovery.validate import valid_discovery_service
from ..validate import ( from ..validate import (
ALSA_DEVICE, alsa_device,
DOCKER_PORTS, DOCKER_PORTS,
DOCKER_PORTS_DESCRIPTION, DOCKER_PORTS_DESCRIPTION,
NETWORK_PORT, network_port,
TOKEN, token,
UUID_MATCH, uuid_match,
) )
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -182,7 +182,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
), ),
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(), vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any( vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
NETWORK_PORT, vol.Equal(0) network_port, vol.Equal(0)
), ),
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str), vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str), vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
@ -269,8 +269,8 @@ SCHEMA_ADDON_USER = vol.Schema(
{ {
vol.Required(ATTR_VERSION): vol.Coerce(str), vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_IMAGE): vol.Coerce(str), vol.Optional(ATTR_IMAGE): vol.Coerce(str),
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH, vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN, vol.Optional(ATTR_ACCESS_TOKEN): token,
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce( vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
str str
), ),
@ -278,8 +278,8 @@ SCHEMA_ADDON_USER = vol.Schema(
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(), vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): DOCKER_PORTS, vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE, vol.Optional(ATTR_AUDIO_OUTPUT): alsa_device,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE, vol.Optional(ATTR_AUDIO_INPUT): alsa_device,
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(), vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(), vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
}, },
@ -386,7 +386,7 @@ def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
elif typ.startswith(V_URL): elif typ.startswith(V_URL):
return vol.Url()(value) return vol.Url()(value)
elif typ.startswith(V_PORT): elif typ.startswith(V_PORT):
return NETWORK_PORT(value) return network_port(value)
elif typ.startswith(V_MATCH): elif typ.startswith(V_MATCH):
return vol.Match(match.group("match"))(str(value)) return vol.Match(match.group("match"))(str(value))
elif typ.startswith(V_LIST): elif typ.startswith(V_LIST):

View File

@ -90,7 +90,7 @@ from ..const import (
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import ALSA_DEVICE, DOCKER_PORTS from ..validate import alsa_device, DOCKER_PORTS
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -103,8 +103,8 @@ SCHEMA_OPTIONS = vol.Schema(
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS), vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(), vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE, vol.Optional(ATTR_AUDIO_OUTPUT): alsa_device,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE, vol.Optional(ATTR_AUDIO_INPUT): alsa_device,
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(), vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
} }
) )

View File

@ -24,13 +24,13 @@ from ..const import (
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import DNS_SERVER_LIST from ..validate import dns_server_list
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): DNS_SERVER_LIST}) SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)}) SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})

View File

@ -3,11 +3,12 @@ import asyncio
import logging import logging
from typing import Any, Awaitable, Dict from typing import Any, Awaitable, Dict
import voluptuous as vol
from aiohttp import web from aiohttp import web
import voluptuous as vol
from ..const import ( from ..const import (
ATTR_BOARD, ATTR_BOARD,
ATTR_BOOT,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_CLI, ATTR_VERSION_CLI,
ATTR_VERSION_CLI_LATEST, ATTR_VERSION_CLI_LATEST,
@ -33,6 +34,7 @@ class APIHassOS(CoreSysAttributes):
ATTR_VERSION_LATEST: self.sys_hassos.version_latest, ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest, ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
ATTR_BOARD: self.sys_hassos.board, ATTR_BOARD: self.sys_hassos.board,
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
} }
@api_process @api_process

View File

@ -33,7 +33,7 @@ from ..const import (
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import DOCKER_IMAGE, NETWORK_PORT from ..validate import docker_image, network_port
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -42,9 +42,9 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema( SCHEMA_OPTIONS = vol.Schema(
{ {
vol.Optional(ATTR_BOOT): vol.Boolean(), vol.Optional(ATTR_BOOT): vol.Boolean(),
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(DOCKER_IMAGE), vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(docker_image),
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Maybe(vol.Coerce(str)), vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_PORT): NETWORK_PORT, vol.Optional(ATTR_PORT): network_port,
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)), vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_SSL): vol.Boolean(), vol.Optional(ATTR_SSL): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG): vol.Boolean(), vol.Optional(ATTR_WATCHDOG): vol.Boolean(),

View File

@ -41,7 +41,7 @@ from ..const import (
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..utils.validate import validate_timezone from ..utils.validate import validate_timezone
from ..validate import CHANNELS, LOG_LEVEL, REPOSITORIES, WAIT_BOOT from ..validate import channels, log_level, repositories, wait_boot
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -49,11 +49,11 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema( SCHEMA_OPTIONS = vol.Schema(
{ {
vol.Optional(ATTR_CHANNEL): CHANNELS, vol.Optional(ATTR_CHANNEL): channels,
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES, vol.Optional(ATTR_ADDONS_REPOSITORIES): repositories,
vol.Optional(ATTR_TIMEZONE): validate_timezone, vol.Optional(ATTR_TIMEZONE): validate_timezone,
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT, vol.Optional(ATTR_WAIT_BOOT): wait_boot,
vol.Optional(ATTR_LOGGING): LOG_LEVEL, vol.Optional(ATTR_LOGGING): log_level,
vol.Optional(ATTR_DEBUG): vol.Boolean(), vol.Optional(ATTR_DEBUG): vol.Boolean(),
vol.Optional(ATTR_DEBUG_BLOCK): vol.Boolean(), vol.Optional(ATTR_DEBUG_BLOCK): vol.Boolean(),
} }

View File

@ -2,7 +2,7 @@
from pathlib import Path from pathlib import Path
from ipaddress import ip_network from ipaddress import ip_network
HASSIO_VERSION = "193" HASSIO_VERSION = "194"
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons" URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"

View File

@ -79,7 +79,11 @@ class HassIO(CoreSysAttributes):
"""Start Hass.io orchestration.""" """Start Hass.io orchestration."""
await self.sys_api.start() await self.sys_api.start()
# on release channel, try update itself # Mark booted partition as healthy
if self.sys_hassos.available:
await self.sys_hassos.mark_healthy()
# On release channel, try update itself
if self.sys_supervisor.need_update: if self.sys_supervisor.need_update:
try: try:
if self.sys_dev: if self.sys_dev:
@ -92,7 +96,7 @@ class HassIO(CoreSysAttributes):
"future version of Home Assistant!" "future version of Home Assistant!"
) )
# start addon mark as initialize # Start addon mark as initialize
await self.sys_addons.boot(STARTUP_INITIALIZE) await self.sys_addons.boot(STARTUP_INITIALIZE)
try: try:

View File

@ -1,58 +1,17 @@
{ {
"raspberrypi": [ "raspberrypi": ["armhf"],
"armhf" "raspberrypi2": ["armv7", "armhf"],
], "raspberrypi3": ["armv7", "armhf"],
"raspberrypi2": [ "raspberrypi3-64": ["aarch64", "armv7", "armhf"],
"armv7", "raspberrypi4": ["armv7", "armhf"],
"armhf" "raspberrypi4-64": ["aarch64", "armv7", "armhf"],
], "tinker": ["armv7", "armhf"],
"raspberrypi3": [ "odroid-c2": ["aarch64", "armv7", "armhf"],
"armv7", "odroid-n2": ["aarch64", "armv7", "armhf"],
"armhf" "odroid-xu": ["armv7", "armhf"],
], "qemux86": ["i386"],
"raspberrypi3-64": [ "qemux86-64": ["amd64", "i386"],
"aarch64", "qemuarm": ["armhf"],
"armv7", "qemuarm-64": ["aarch64"],
"armhf" "intel-nuc": ["amd64", "i386"]
],
"raspberrypi4": [
"armv7",
"armhf"
],
"raspberrypi4-64": [
"aarch64",
"armv7",
"armhf"
],
"tinker": [
"armv7",
"armhf"
],
"odroid-c2": [
"aarch64"
],
"odroid-xu": [
"armv7",
"armhf"
],
"orangepi-prime": [
"aarch64"
],
"qemux86": [
"i386"
],
"qemux86-64": [
"amd64",
"i386"
],
"qemuarm": [
"armhf"
],
"qemuarm-64": [
"aarch64"
],
"intel-nuc": [
"amd64",
"i386"
]
} }

View File

@ -1,5 +1,7 @@
"""D-Bus interface for rauc.""" """D-Bus interface for rauc."""
import logging import logging
from typing import Optional
from enum import Enum
from .interface import DBusInterface from .interface import DBusInterface
from .utils import dbus_connected from .utils import dbus_connected
@ -12,9 +14,25 @@ DBUS_NAME = "de.pengutronix.rauc"
DBUS_OBJECT = "/" DBUS_OBJECT = "/"
class RaucState(str, Enum):
"""Rauc slot states."""
GOOD = "good"
BAD = "bad"
ACTIVE = "active"
class Rauc(DBusInterface): class Rauc(DBusInterface):
"""Handle D-Bus interface for rauc.""" """Handle D-Bus interface for rauc."""
def __init__(self):
"""Initialize Properties."""
self._operation: Optional[str] = None
self._last_error: Optional[str] = None
self._compatible: Optional[str] = None
self._variant: Optional[str] = None
self._boot_slot: Optional[str] = None
async def connect(self): async def connect(self):
"""Connect to D-Bus.""" """Connect to D-Bus."""
try: try:
@ -24,6 +42,31 @@ class Rauc(DBusInterface):
except DBusInterfaceError: except DBusInterfaceError:
_LOGGER.warning("Host has no rauc support. OTA updates have been disabled.") _LOGGER.warning("Host has no rauc support. OTA updates have been disabled.")
@property
def operation(self) -> Optional[str]:
"""Return the current (global) operation."""
return self._operation
@property
def last_error(self) -> Optional[str]:
"""Return the last message of the last error that occurred."""
return self._last_error
@property
def compatible(self) -> Optional[str]:
"""Return the system compatible string."""
return self._compatible
@property
def variant(self) -> Optional[str]:
"""Return the system variant string."""
return self._variant
@property
def boot_slot(self) -> Optional[str]:
"""Return the used boot slot."""
return self._boot_slot
@dbus_connected @dbus_connected
def install(self, raucb_file): def install(self, raucb_file):
"""Install rauc bundle file. """Install rauc bundle file.
@ -40,14 +83,6 @@ class Rauc(DBusInterface):
""" """
return self.dbus.Installer.GetSlotStatus() return self.dbus.Installer.GetSlotStatus()
@dbus_connected
def get_properties(self):
"""Return rauc informations.
Return a coroutine.
"""
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
@dbus_connected @dbus_connected
def signal_completed(self): def signal_completed(self):
"""Return a signal wrapper for completed signal. """Return a signal wrapper for completed signal.
@ -55,3 +90,25 @@ class Rauc(DBusInterface):
Return a coroutine. Return a coroutine.
""" """
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed") return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
@dbus_connected
def mark(self, state: RaucState, slot_identifier: str):
"""Get slot status.
Return a coroutine.
"""
return self.dbus.Installer.Mark(state, slot_identifier)
@dbus_connected
async def update(self):
"""Update Properties."""
data = await self.dbus.get_properties(f"{DBUS_NAME}.Installer")
if not data:
_LOGGER.warning("Can't get properties for rauc")
return
self._operation = data.get("Operation")
self._last_error = data.get("LastError")
self._compatible = data.get("Compatible")
self._variant = data.get("Variant")
self._boot_slot = data.get("BootSlot")

View File

@ -1,11 +1,11 @@
"""Discovery service for AdGuard.""" """Discovery service for AdGuard."""
import voluptuous as vol import voluptuous as vol
from hassio.validate import NETWORK_PORT from hassio.validate import network_port
from ..const import ATTR_HOST, ATTR_PORT from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema( SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT} {vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): network_port}
) )

View File

@ -1,11 +1,11 @@
"""Discovery service for Almond.""" """Discovery service for Almond."""
import voluptuous as vol import voluptuous as vol
from hassio.validate import NETWORK_PORT from hassio.validate import network_port
from ..const import ATTR_HOST, ATTR_PORT from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema( SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT} {vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): network_port}
) )

View File

@ -1,7 +1,7 @@
"""Discovery service for MQTT.""" """Discovery service for MQTT."""
import voluptuous as vol import voluptuous as vol
from hassio.validate import NETWORK_PORT from hassio.validate import network_port
from ..const import ATTR_HOST, ATTR_PORT, ATTR_API_KEY, ATTR_SERIAL from ..const import ATTR_HOST, ATTR_PORT, ATTR_API_KEY, ATTR_SERIAL
@ -9,7 +9,7 @@ from ..const import ATTR_HOST, ATTR_PORT, ATTR_API_KEY, ATTR_SERIAL
SCHEMA = vol.Schema( SCHEMA = vol.Schema(
{ {
vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_HOST): vol.Coerce(str),
vol.Required(ATTR_PORT): NETWORK_PORT, vol.Required(ATTR_PORT): network_port,
vol.Required(ATTR_SERIAL): vol.Coerce(str), vol.Required(ATTR_SERIAL): vol.Coerce(str),
vol.Required(ATTR_API_KEY): vol.Coerce(str), vol.Required(ATTR_API_KEY): vol.Coerce(str),
} }

View File

@ -1,11 +1,11 @@
"""Discovery service for Home Panel.""" """Discovery service for Home Panel."""
import voluptuous as vol import voluptuous as vol
from hassio.validate import NETWORK_PORT from hassio.validate import network_port
from ..const import ATTR_HOST, ATTR_PORT from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema( SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT} {vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): network_port}
) )

View File

@ -1,7 +1,7 @@
"""Discovery service for MQTT.""" """Discovery service for MQTT."""
import voluptuous as vol import voluptuous as vol
from hassio.validate import NETWORK_PORT from hassio.validate import network_port
from ..const import ( from ..const import (
ATTR_HOST, ATTR_HOST,
@ -16,7 +16,7 @@ from ..const import (
SCHEMA = vol.Schema( SCHEMA = vol.Schema(
{ {
vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_HOST): vol.Coerce(str),
vol.Required(ATTR_PORT): NETWORK_PORT, vol.Required(ATTR_PORT): network_port,
vol.Optional(ATTR_USERNAME): vol.Coerce(str), vol.Optional(ATTR_USERNAME): vol.Coerce(str),
vol.Optional(ATTR_PASSWORD): vol.Coerce(str), vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
vol.Optional(ATTR_SSL, default=False): vol.Boolean(), vol.Optional(ATTR_SSL, default=False): vol.Boolean(),

View File

@ -6,7 +6,7 @@ import voluptuous as vol
from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID
from ..utils.validate import schema_or from ..utils.validate import schema_or
from ..validate import UUID_MATCH from ..validate import uuid_match
def valid_discovery_service(service): def valid_discovery_service(service):
@ -31,7 +31,7 @@ SCHEMA_DISCOVERY = vol.Schema(
[ [
vol.Schema( vol.Schema(
{ {
vol.Required(ATTR_UUID): UUID_MATCH, vol.Required(ATTR_UUID): uuid_match,
vol.Required(ATTR_ADDON): vol.Coerce(str), vol.Required(ATTR_ADDON): vol.Coerce(str),
vol.Required(ATTR_SERVICE): valid_discovery_service, vol.Required(ATTR_SERVICE): valid_discovery_service,
vol.Required(ATTR_CONFIG): vol.Maybe(dict), vol.Required(ATTR_CONFIG): vol.Maybe(dict),

View File

@ -17,7 +17,7 @@ from .docker.stats import DockerStats
from .exceptions import CoreDNSError, CoreDNSUpdateError, DockerAPIError from .exceptions import CoreDNSError, CoreDNSUpdateError, DockerAPIError
from .misc.forwarder import DNSForward from .misc.forwarder import DNSForward
from .utils.json import JsonConfig from .utils.json import JsonConfig
from .validate import DNS_URL, SCHEMA_DNS_CONFIG from .validate import dns_url, SCHEMA_DNS_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -113,17 +113,19 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
self.version = self.instance.version self.version = self.instance.version
self.save_data() self.save_data()
# Fix dns server handling before 194 / Cleanup with version 200
if DNS_SERVERS == self.servers:
self.servers.clear()
self.save_data()
# Start DNS forwarder # Start DNS forwarder
self.sys_create_task(self.forwarder.start(self.sys_docker.network.dns)) self.sys_create_task(self.forwarder.start(self.sys_docker.network.dns))
self._update_local_resolv()
# Reset container configuration
if await self.instance.is_running():
with suppress(DockerAPIError):
await self.instance.stop()
# Run CoreDNS # Run CoreDNS
with suppress(CoreDNSError): with suppress(CoreDNSError):
if await self.instance.is_running():
await self.restart()
else:
await self.start() await self.start()
async def unload(self) -> None: async def unload(self) -> None:
@ -195,8 +197,10 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
raise CoreDNSError() from None raise CoreDNSError() from None
async def reset(self) -> None: async def reset(self) -> None:
"""Reset Config / Hosts.""" """Reset DNS and hosts."""
self.servers = DNS_SERVERS # Reset manually defined DNS
self.servers.clear()
self.save_data()
# Resets hosts # Resets hosts
with suppress(OSError): with suppress(OSError):
@ -216,11 +220,20 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
_LOGGER.error("Can't read coredns template file: %s", err) _LOGGER.error("Can't read coredns template file: %s", err)
raise CoreDNSError() from None raise CoreDNSError() from None
# Prepare DNS serverlist: Prio 1 Local, Prio 2 Manual, Prio 3 Fallback # Prepare DNS serverlist: Prio 1 Manual, Prio 2 Local, Prio 3 Fallback
local_dns: List[str] = self.sys_host.network.dns_servers or ["dns://127.0.0.11"] local_dns: List[str] = self.sys_host.network.dns_servers or ["dns://127.0.0.11"]
for server in local_dns + self.servers + DNS_SERVERS: servers: List[str] = self.servers + local_dns + DNS_SERVERS
_LOGGER.debug(
"config-dns = %s, local-dns = %s , backup-dns = %s",
self.servers,
local_dns,
DNS_SERVERS,
)
for server in servers:
try: try:
DNS_URL(server) dns_url(server)
if server not in dns_servers: if server not in dns_servers:
dns_servers.append(server) dns_servers.append(server)
except vol.Invalid: except vol.Invalid:
@ -346,33 +359,3 @@ class CoreDNS(JsonConfig, CoreSysAttributes):
await self.instance.install(self.version) await self.instance.install(self.version)
except DockerAPIError: except DockerAPIError:
_LOGGER.error("Repairing of CoreDNS fails") _LOGGER.error("Repairing of CoreDNS fails")
def _update_local_resolv(self) -> None:
"""Update local resolv file."""
resolv_lines: List[str] = []
nameserver = f"nameserver {self.sys_docker.network.dns!s}"
# Read resolv config
try:
with RESOLV_CONF.open("r") as resolv:
for line in resolv.readlines():
if not line:
continue
resolv_lines.append(line.strip())
except OSError as err:
_LOGGER.warning("Can't read local resolv: %s", err)
return
if nameserver in resolv_lines:
return
_LOGGER.info("Update resolv from Supervisor")
# Write config back to resolv
resolv_lines.append(nameserver)
try:
with RESOLV_CONF.open("w") as resolv:
for line in resolv_lines:
resolv.write(f"{line}\n")
except OSError as err:
_LOGGER.warning("Can't write local resolv: %s", err)
return

View File

@ -54,4 +54,9 @@ class DockerDNS(DockerInterface, CoreSysAttributes):
) )
self._meta = docker_container.attrs self._meta = docker_container.attrs
_LOGGER.info("Start DNS %s with version %s", self.image, self.version) _LOGGER.info(
"Start DNS %s with version %s - %s",
self.image,
self.version,
self.sys_docker.network.dns,
)

View File

@ -427,9 +427,10 @@ class DockerInterface(CoreSysAttributes):
continue continue
available_version.append(version) available_version.append(version)
assert available_version if not available_version:
raise ValueError()
except (docker.errors.DockerException, AssertionError): except (docker.errors.DockerException, ValueError):
_LOGGER.debug("No version found for %s", self.image) _LOGGER.debug("No version found for %s", self.image)
raise DockerAPIError() raise DockerAPIError()
else: else:

View File

@ -17,6 +17,7 @@ from .exceptions import (
HassOSUpdateError, HassOSUpdateError,
DockerAPIError, DockerAPIError,
) )
from .dbus.rauc import RaucState
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -111,24 +112,27 @@ class HassOS(CoreSysAttributes):
async def load(self) -> None: async def load(self) -> None:
"""Load HassOS data.""" """Load HassOS data."""
try: try:
# Check needed host functions if self.sys_host.info.cpe is None:
assert self.sys_dbus.rauc.is_connected raise TypeError()
assert self.sys_dbus.systemd.is_connected
assert self.sys_dbus.hostname.is_connected
assert self.sys_host.info.cpe is not None
cpe = CPE(self.sys_host.info.cpe) cpe = CPE(self.sys_host.info.cpe)
assert cpe.get_product()[0] == "hassos"
except (AssertionError, NotImplementedError): if cpe.get_product()[0] != "hassos":
raise TypeError()
except TypeError:
_LOGGER.debug("Found no HassOS") _LOGGER.debug("Found no HassOS")
return return
else:
self._available = True
# Store meta data # Store meta data
self._available = True
self._version = cpe.get_version()[0] self._version = cpe.get_version()[0]
self._board = cpe.get_target_hardware()[0] self._board = cpe.get_target_hardware()[0]
_LOGGER.info("Detect HassOS %s on host system", self.version) await self.sys_dbus.rauc.update()
_LOGGER.info(
"Detect HassOS %s / BootSlot %s", self.version, self.sys_dbus.rauc.boot_slot
)
with suppress(DockerAPIError): with suppress(DockerAPIError):
await self.instance.attach(tag="latest") await self.instance.attach(tag="latest")
@ -174,8 +178,8 @@ class HassOS(CoreSysAttributes):
return return
# Update fails # Update fails
rauc_status = await self.sys_dbus.get_properties() await self.sys_dbus.rauc.update()
_LOGGER.error("HassOS update fails with: %s", rauc_status.get("LastError")) _LOGGER.error("HassOS update fails with: %s", self.sys_dbus.rauc.last_error)
raise HassOSUpdateError() raise HassOSUpdateError()
async def update_cli(self, version: Optional[str] = None) -> None: async def update_cli(self, version: Optional[str] = None) -> None:
@ -206,3 +210,12 @@ class HassOS(CoreSysAttributes):
await self.instance.install(self.version_cli, latest=True) await self.instance.install(self.version_cli, latest=True)
except DockerAPIError: except DockerAPIError:
_LOGGER.error("Repairing of HassOS CLI fails") _LOGGER.error("Repairing of HassOS CLI fails")
async def mark_healthy(self):
"""Set booted partition as good for rauc."""
try:
response = await self.sys_dbus.rauc.mark(RaucState.GOOD, "booted")
except DBusError:
_LOGGER.error("Can't mark booted partition as healty!")
else:
_LOGGER.info("Rauc: %s - %s", self.sys_dbus.rauc.boot_slot, response[1])

View File

@ -4,7 +4,7 @@ from typing import Any, Dict
from hassio.addons.addon import Addon from hassio.addons.addon import Addon
from hassio.exceptions import ServicesError from hassio.exceptions import ServicesError
from hassio.validate import NETWORK_PORT from hassio.validate import network_port
import voluptuous as vol import voluptuous as vol
from ..const import ( from ..const import (
@ -26,7 +26,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_SERVICE_MQTT = vol.Schema( SCHEMA_SERVICE_MQTT = vol.Schema(
{ {
vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_HOST): vol.Coerce(str),
vol.Required(ATTR_PORT): NETWORK_PORT, vol.Required(ATTR_PORT): network_port,
vol.Optional(ATTR_USERNAME): vol.Coerce(str), vol.Optional(ATTR_USERNAME): vol.Coerce(str),
vol.Optional(ATTR_PASSWORD): vol.Coerce(str), vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
vol.Optional(ATTR_SSL, default=False): vol.Boolean(), vol.Optional(ATTR_SSL, default=False): vol.Boolean(),

View File

@ -31,7 +31,7 @@ from ..const import (
SNAPSHOT_FULL, SNAPSHOT_FULL,
SNAPSHOT_PARTIAL, SNAPSHOT_PARTIAL,
) )
from ..validate import DOCKER_IMAGE, NETWORK_PORT, REPOSITORIES from ..validate import docker_image, network_port, repositories
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL] ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
@ -59,11 +59,11 @@ SCHEMA_SNAPSHOT = vol.Schema(
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema( vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema(
{ {
vol.Optional(ATTR_VERSION): vol.Coerce(str), vol.Optional(ATTR_VERSION): vol.Coerce(str),
vol.Inclusive(ATTR_IMAGE, "custom_hass"): DOCKER_IMAGE, vol.Inclusive(ATTR_IMAGE, "custom_hass"): docker_image,
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str), vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str),
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(), vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
vol.Optional(ATTR_SSL, default=False): vol.Boolean(), vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT, vol.Optional(ATTR_PORT, default=8123): network_port,
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)), vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)), vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(), vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
@ -90,7 +90,7 @@ SCHEMA_SNAPSHOT = vol.Schema(
], ],
unique_addons, unique_addons,
), ),
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES, vol.Optional(ATTR_REPOSITORIES, default=list): repositories,
}, },
extra=vol.ALLOW_EXTRA, extra=vol.ALLOW_EXTRA,
) )

View File

@ -124,9 +124,10 @@ def secure_path(tar: tarfile.TarFile) -> Generator[tarfile.TarInfo, None, None]:
for member in tar: for member in tar:
file_path = Path(member.name) file_path = Path(member.name)
try: try:
assert not file_path.is_absolute() if file_path.is_absolute():
raise ValueError()
Path("/fake", file_path).resolve().relative_to("/fake") Path("/fake", file_path).resolve().relative_to("/fake")
except (ValueError, RuntimeError, AssertionError): except (ValueError, RuntimeError):
_LOGGER.warning("Issue with file %s", file_path) _LOGGER.warning("Issue with file %s", file_path)
continue continue
else: else:

View File

@ -1,6 +1,7 @@
"""Validate functions.""" """Validate functions."""
import re import re
import uuid import uuid
import ipaddress
import voluptuous as vol import voluptuous as vol
@ -35,27 +36,41 @@ from .const import (
CHANNEL_BETA, CHANNEL_BETA,
CHANNEL_DEV, CHANNEL_DEV,
CHANNEL_STABLE, CHANNEL_STABLE,
DNS_SERVERS,
) )
from .utils.validate import validate_timezone from .utils.validate import validate_timezone
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$") RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535)) # pylint: disable=invalid-name
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60)) network_port = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$") wait_boot = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
ALSA_DEVICE = vol.Maybe(vol.Match(r"\d+,\d+")) docker_image = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
CHANNELS = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV]) alsa_device = vol.Maybe(vol.Match(r"\d+,\d+"))
UUID_MATCH = vol.Match(r"^[0-9a-f]{32}$") channels = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV])
SHA256 = vol.Match(r"^[0-9a-f]{64}$") uuid_match = vol.Match(r"^[0-9a-f]{32}$")
TOKEN = vol.Match(r"^[0-9a-f]{32,256}$") sha256 = vol.Match(r"^[0-9a-f]{64}$")
LOG_LEVEL = vol.In(["debug", "info", "warning", "error", "critical"]) token = vol.Match(r"^[0-9a-f]{32,256}$")
DNS_URL = vol.Match(r"^dns://\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$") log_level = vol.In(["debug", "info", "warning", "error", "critical"])
DNS_SERVER_LIST = vol.All([DNS_URL], vol.Length(max=8))
def validate_repository(repository): def dns_url(url: str) -> str:
""" takes a DNS url (str) and validates that it matches the scheme dns://<ip address>."""
if not url.lower().startswith("dns://"):
raise vol.Invalid("Doesn't start with dns://")
address: str = url[6:] # strip the dns:// off
try:
ipaddress.ip_address(address) # matches ipv4 or ipv6 addresses
except ValueError:
raise vol.Invalid("Invalid DNS URL: {}".format(url))
return url
dns_server_list = vol.All(vol.Length(max=8), [dns_url])
def validate_repository(repository: str) -> str:
"""Validate a valid repository.""" """Validate a valid repository."""
data = RE_REPOSITORY.match(repository) data = RE_REPOSITORY.match(repository)
if not data: if not data:
@ -69,13 +84,13 @@ def validate_repository(repository):
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
REPOSITORIES = vol.All([validate_repository], vol.Unique()) repositories = vol.All([validate_repository], vol.Unique())
DOCKER_PORTS = vol.Schema( DOCKER_PORTS = vol.Schema(
{ {
vol.All(vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")): vol.Maybe( vol.All(vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")): vol.Maybe(
NETWORK_PORT network_port
) )
} }
) )
@ -88,13 +103,13 @@ DOCKER_PORTS_DESCRIPTION = vol.Schema(
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_HASS_CONFIG = vol.Schema( SCHEMA_HASS_CONFIG = vol.Schema(
{ {
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH, vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)), vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN, vol.Optional(ATTR_ACCESS_TOKEN): token,
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(), vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
vol.Inclusive(ATTR_IMAGE, "custom_hass"): DOCKER_IMAGE, vol.Inclusive(ATTR_IMAGE, "custom_hass"): docker_image,
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str), vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str),
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT, vol.Optional(ATTR_PORT, default=8123): network_port,
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)), vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)), vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_SSL, default=False): vol.Boolean(), vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
@ -109,7 +124,7 @@ SCHEMA_HASS_CONFIG = vol.Schema(
SCHEMA_UPDATER_CONFIG = vol.Schema( SCHEMA_UPDATER_CONFIG = vol.Schema(
{ {
vol.Optional(ATTR_CHANNEL, default=CHANNEL_STABLE): CHANNELS, vol.Optional(ATTR_CHANNEL, default=CHANNEL_STABLE): channels,
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str), vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
vol.Optional(ATTR_HASSIO): vol.Coerce(str), vol.Optional(ATTR_HASSIO): vol.Coerce(str),
vol.Optional(ATTR_HASSOS): vol.Coerce(str), vol.Optional(ATTR_HASSOS): vol.Coerce(str),
@ -128,9 +143,9 @@ SCHEMA_HASSIO_CONFIG = vol.Schema(
vol.Optional( vol.Optional(
ATTR_ADDONS_CUSTOM_LIST, ATTR_ADDONS_CUSTOM_LIST,
default=["https://github.com/hassio-addons/repository"], default=["https://github.com/hassio-addons/repository"],
): REPOSITORIES, ): repositories,
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT, vol.Optional(ATTR_WAIT_BOOT, default=5): wait_boot,
vol.Optional(ATTR_LOGGING, default="info"): LOG_LEVEL, vol.Optional(ATTR_LOGGING, default="info"): log_level,
vol.Optional(ATTR_DEBUG, default=False): vol.Boolean(), vol.Optional(ATTR_DEBUG, default=False): vol.Boolean(),
vol.Optional(ATTR_DEBUG_BLOCK, default=False): vol.Boolean(), vol.Optional(ATTR_DEBUG_BLOCK, default=False): vol.Boolean(),
}, },
@ -138,16 +153,16 @@ SCHEMA_HASSIO_CONFIG = vol.Schema(
) )
SCHEMA_AUTH_CONFIG = vol.Schema({SHA256: SHA256}) SCHEMA_AUTH_CONFIG = vol.Schema({sha256: sha256})
SCHEMA_INGRESS_CONFIG = vol.Schema( SCHEMA_INGRESS_CONFIG = vol.Schema(
{ {
vol.Required(ATTR_SESSION, default=dict): vol.Schema( vol.Required(ATTR_SESSION, default=dict): vol.Schema(
{TOKEN: vol.Coerce(float)} {token: vol.Coerce(float)}
), ),
vol.Required(ATTR_PORTS, default=dict): vol.Schema( vol.Required(ATTR_PORTS, default=dict): vol.Schema(
{vol.Coerce(str): NETWORK_PORT} {vol.Coerce(str): network_port}
), ),
}, },
extra=vol.REMOVE_EXTRA, extra=vol.REMOVE_EXTRA,
@ -157,7 +172,7 @@ SCHEMA_INGRESS_CONFIG = vol.Schema(
SCHEMA_DNS_CONFIG = vol.Schema( SCHEMA_DNS_CONFIG = vol.Schema(
{ {
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)), vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_SERVERS, default=DNS_SERVERS): DNS_SERVER_LIST, vol.Optional(ATTR_SERVERS, default=list): dns_server_list,
}, },
extra=vol.REMOVE_EXTRA, extra=vol.REMOVE_EXTRA,
) )

View File

@ -1,16 +1,16 @@
aiohttp==3.6.1 aiohttp==3.6.1
async_timeout==3.0.1 async_timeout==3.0.1
attrs==19.3.0 attrs==19.3.0
cchardet==2.1.4 cchardet==2.1.5
colorlog==4.0.2 colorlog==4.1.0
cpe==1.2.1 cpe==1.2.1
cryptography==2.8 cryptography==2.8
docker==4.1.0 docker==4.1.0
gitpython==3.0.4 gitpython==3.0.5
packaging==19.2 packaging==20.0
pytz==2019.3 pytz==2019.3
pyudev==0.21.0 pyudev==0.21.0
ruamel.yaml==0.15.100 ruamel.yaml==0.15.100
uvloop==0.13.0 uvloop==0.14.0
voluptuous==0.11.7 voluptuous==0.11.7
ptvsd==4.3.2 ptvsd==4.3.2

View File

@ -1,6 +1,6 @@
flake8==3.7.9 flake8==3.7.9
pylint==2.4.3 pylint==2.4.4
pytest==5.2.2 pytest==5.3.2
pytest-timeout==1.3.3 pytest-timeout==1.3.4
pytest-aiohttp==0.3.0 pytest-aiohttp==0.3.0
black==19.10b0 black==19.10b0

View File

@ -120,7 +120,17 @@ async def test_odroid_c2_arch(coresys, sys_machine, sys_supervisor):
await coresys.arch.load() await coresys.arch.load()
assert coresys.arch.default == "aarch64" assert coresys.arch.default == "aarch64"
assert coresys.arch.supported == ["aarch64"] assert coresys.arch.supported == ["aarch64", "armv7", "armhf"]
async def test_odroid_n2_arch(coresys, sys_machine, sys_supervisor):
"""Test arch for odroid-n2."""
sys_machine.return_value = "odroid-n2"
sys_supervisor.arch = "aarch64"
await coresys.arch.load()
assert coresys.arch.default == "aarch64"
assert coresys.arch.supported == ["aarch64", "armv7", "armhf"]
async def test_odroid_xu_arch(coresys, sys_machine, sys_supervisor): async def test_odroid_xu_arch(coresys, sys_machine, sys_supervisor):
@ -133,16 +143,6 @@ async def test_odroid_xu_arch(coresys, sys_machine, sys_supervisor):
assert coresys.arch.supported == ["armv7", "armhf"] assert coresys.arch.supported == ["armv7", "armhf"]
async def test_orangepi_prime_arch(coresys, sys_machine, sys_supervisor):
"""Test arch for orangepi_prime."""
sys_machine.return_value = "orangepi-prime"
sys_supervisor.arch = "aarch64"
await coresys.arch.load()
assert coresys.arch.default == "aarch64"
assert coresys.arch.supported == ["aarch64"]
async def test_intel_nuc_arch(coresys, sys_machine, sys_supervisor): async def test_intel_nuc_arch(coresys, sys_machine, sys_supervisor):
"""Test arch for intel-nuc.""" """Test arch for intel-nuc."""
sys_machine.return_value = "intel-nuc" sys_machine.return_value = "intel-nuc"

67
tests/test_validate.py Normal file
View File

@ -0,0 +1,67 @@
"""Test validators."""
import hassio.validate
import voluptuous.error
import pytest
GOOD_V4 = [
"dns://10.0.0.1", # random local
"dns://254.254.254.254", # random high numbers
"DNS://1.1.1.1", # cloudflare
"dns://9.9.9.9", # quad-9
]
GOOD_V6 = [
"dns://2606:4700:4700::1111", # cloudflare
"DNS://2606:4700:4700::1001", # cloudflare
]
BAD = ["hello world", "https://foo.bar", "", "dns://example.com"]
async def test_dns_url_v4_good():
""" tests the DNS validator with known-good ipv6 DNS URLs """
for url in GOOD_V4:
assert hassio.validate.dns_url(url)
async def test_dns_url_v6_good():
""" tests the DNS validator with known-good ipv6 DNS URLs """
for url in GOOD_V6:
assert hassio.validate.dns_url(url)
async def test_dns_server_list_v4():
""" test a list with v4 addresses """
assert hassio.validate.dns_server_list(GOOD_V4)
async def test_dns_server_list_v6():
""" test a list with v6 addresses """
assert hassio.validate.dns_server_list(GOOD_V6)
async def test_dns_server_list_combined():
""" test a list with both v4 and v6 addresses """
combined = GOOD_V4 + GOOD_V6
# test the matches
assert hassio.validate.dns_server_list(combined)
# test max_length is OK still
assert hassio.validate.dns_server_list(combined)
# test that it fails when the list is too long
with pytest.raises(voluptuous.error.Invalid):
hassio.validate.dns_server_list(combined + combined + combined + combined)
async def test_dns_server_list_bad():
""" test the bad list """
# test the matches
with pytest.raises(voluptuous.error.Invalid):
assert hassio.validate.dns_server_list(BAD)
async def test_dns_server_list_bad_combined():
""" test the bad list, combined with the good """
combined = GOOD_V4 + GOOD_V6 + BAD
with pytest.raises(voluptuous.error.Invalid):
# bad list
assert hassio.validate.dns_server_list(combined)