mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-08 17:56:33 +00:00
Make discovery persistent (#727)
* Make discovery persistent * fix file handling * fix detection * Smooth * Fix ring import * Fix handling * fix schema * fix validate * fix discovery cleanup
This commit is contained in:
parent
e5451973bd
commit
af19e95c81
@ -584,6 +584,13 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def remove_discovery(self):
|
||||||
|
"""Remove all discovery message from add-on."""
|
||||||
|
for message in self.sys_discovery.list_messages:
|
||||||
|
if message.addon != self.slug:
|
||||||
|
continue
|
||||||
|
self.sys_discovery.remove(message)
|
||||||
|
|
||||||
def write_asound(self):
|
def write_asound(self):
|
||||||
"""Write asound config to file and return True on success."""
|
"""Write asound config to file and return True on success."""
|
||||||
asound_config = self.sys_host.alsa.asound(
|
asound_config = self.sys_host.alsa.asound(
|
||||||
@ -704,6 +711,9 @@ class Addon(CoreSysAttributes):
|
|||||||
with suppress(HostAppArmorError):
|
with suppress(HostAppArmorError):
|
||||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||||
|
|
||||||
|
# Remove discovery messages
|
||||||
|
self.remove_discovery()
|
||||||
|
|
||||||
self._set_uninstall()
|
self._set_uninstall()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ from ..const import (
|
|||||||
PRIVILEGED_IPC_LOCK, PRIVILEGED_SYS_TIME, PRIVILEGED_SYS_NICE,
|
PRIVILEGED_IPC_LOCK, PRIVILEGED_SYS_TIME, PRIVILEGED_SYS_NICE,
|
||||||
PRIVILEGED_SYS_RESOURCE, PRIVILEGED_SYS_PTRACE,
|
PRIVILEGED_SYS_RESOURCE, PRIVILEGED_SYS_PTRACE,
|
||||||
ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_MANAGER, ROLE_ADMIN)
|
ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_MANAGER, ROLE_ADMIN)
|
||||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_DEVICE
|
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_DEVICE, UUID_MATCH
|
||||||
from ..services.validate import DISCOVERY_SERVICES
|
from ..services.validate import DISCOVERY_SERVICES
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -185,8 +185,7 @@ SCHEMA_BUILD_CONFIG = vol.Schema({
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_ADDON_USER = vol.Schema({
|
SCHEMA_ADDON_USER = vol.Schema({
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||||
vol.Match(r"^[0-9a-f]{32}$"),
|
|
||||||
vol.Optional(ATTR_ACCESS_TOKEN): vol.Match(r"^[0-9a-f]{64}$"),
|
vol.Optional(ATTR_ACCESS_TOKEN): vol.Match(r"^[0-9a-f]{64}$"),
|
||||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||||
|
@ -7,11 +7,11 @@ from ..const import (
|
|||||||
ATTR_DISCOVERY, ATTR_SERVICE, REQUEST_FROM)
|
ATTR_DISCOVERY, ATTR_SERVICE, REQUEST_FROM)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, APIForbidden
|
from ..exceptions import APIError, APIForbidden
|
||||||
from ..services.validate import SERVICE_ALL
|
from ..validate import SERVICE_ALL
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_DISCOVERY = vol.Schema({
|
SCHEMA_DISCOVERY = vol.Schema({
|
||||||
vol.Required(ATTR_SERVICE): vol.In(SERVICE_ALL),
|
vol.Required(ATTR_SERVICE): SERVICE_ALL,
|
||||||
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_PLATFORM): vol.Maybe(vol.Coerce(str)),
|
vol.Optional(ATTR_PLATFORM): vol.Maybe(vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||||
|
@ -13,8 +13,9 @@ from ..const import (
|
|||||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES, CHANNELS
|
from ..validate import WAIT_BOOT, REPOSITORIES, CHANNELS
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
|
from ..utils.validate import validate_timezone
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ from .snapshots import SnapshotManager
|
|||||||
from .tasks import Tasks
|
from .tasks import Tasks
|
||||||
from .updater import Updater
|
from .updater import Updater
|
||||||
from .services import ServiceManager
|
from .services import ServiceManager
|
||||||
from .services import Discovery
|
from .discovery import Discovery
|
||||||
from .host import HostManager
|
from .host import HostManager
|
||||||
from .dbus import DBusManager
|
from .dbus import DBusManager
|
||||||
from .hassos import HassOS
|
from .hassos import HassOS
|
||||||
|
@ -21,6 +21,7 @@ FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
|||||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||||
|
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
||||||
|
|
||||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||||
|
|
||||||
|
@ -52,6 +52,9 @@ class HassIO(CoreSysAttributes):
|
|||||||
# load services
|
# load services
|
||||||
await self.sys_services.load()
|
await self.sys_services.load()
|
||||||
|
|
||||||
|
# Load discovery
|
||||||
|
await self.sys_discovery.load()
|
||||||
|
|
||||||
# start dns forwarding
|
# start dns forwarding
|
||||||
self.sys_create_task(self.sys_dns.start())
|
self.sys_create_task(self.sys_dns.start())
|
||||||
|
|
||||||
|
@ -7,9 +7,12 @@ import attr
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .validate import DISCOVERY_SERVICES
|
from .const import FILE_HASSIO_DISCOVERY, ATTR_CONFIG, ATTR_DISCOVERY
|
||||||
from ..coresys import CoreSysAttributes
|
from .coresys import CoreSysAttributes
|
||||||
from ..exceptions import DiscoveryError, HomeAssistantAPIError
|
from .exceptions import DiscoveryError, HomeAssistantAPIError
|
||||||
|
from .validate import SCHEMA_DISCOVERY_CONFIG
|
||||||
|
from .utils.json import JsonConfig
|
||||||
|
from .services.validate import DISCOVERY_SERVICES
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -17,18 +20,19 @@ CMD_NEW = 'post'
|
|||||||
CMD_DEL = 'delete'
|
CMD_DEL = 'delete'
|
||||||
|
|
||||||
|
|
||||||
class Discovery(CoreSysAttributes):
|
class Discovery(CoreSysAttributes, JsonConfig):
|
||||||
"""Home Assistant Discovery handler."""
|
"""Home Assistant Discovery handler."""
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
"""Initialize discovery handler."""
|
"""Initialize discovery handler."""
|
||||||
|
super().__init__(FILE_HASSIO_DISCOVERY, SCHEMA_DISCOVERY_CONFIG)
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self.message_obj = {}
|
self.message_obj = {}
|
||||||
|
|
||||||
def load(self):
|
async def load(self):
|
||||||
"""Load exists discovery message into storage."""
|
"""Load exists discovery message into storage."""
|
||||||
messages = {}
|
messages = {}
|
||||||
for message in self._data:
|
for message in self._data[ATTR_DISCOVERY]:
|
||||||
discovery = Message(**message)
|
discovery = Message(**message)
|
||||||
messages[discovery.uuid] = discovery
|
messages[discovery.uuid] = discovery
|
||||||
|
|
||||||
@ -40,19 +44,14 @@ class Discovery(CoreSysAttributes):
|
|||||||
for message in self.message_obj.values():
|
for message in self.message_obj.values():
|
||||||
messages.append(attr.asdict(message))
|
messages.append(attr.asdict(message))
|
||||||
|
|
||||||
self._data.clear()
|
self._data[ATTR_DISCOVERY].clear()
|
||||||
self._data.extend(messages)
|
self._data[ATTR_DISCOVERY].extend(messages)
|
||||||
self.sys_services.data.save_data()
|
self.save_data()
|
||||||
|
|
||||||
def get(self, uuid):
|
def get(self, uuid):
|
||||||
"""Return discovery message."""
|
"""Return discovery message."""
|
||||||
return self.message_obj.get(uuid)
|
return self.message_obj.get(uuid)
|
||||||
|
|
||||||
@property
|
|
||||||
def _data(self):
|
|
||||||
"""Return discovery data."""
|
|
||||||
return self.sys_services.data.discovery
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_messages(self):
|
def list_messages(self):
|
||||||
"""Return list of available discovery messages."""
|
"""Return list of available discovery messages."""
|
||||||
@ -71,7 +70,7 @@ class Discovery(CoreSysAttributes):
|
|||||||
message = Message(addon.slug, service, component, platform, config)
|
message = Message(addon.slug, service, component, platform, config)
|
||||||
|
|
||||||
# Already exists?
|
# Already exists?
|
||||||
for old_message in self.message_obj:
|
for old_message in self.list_messages:
|
||||||
if old_message != message:
|
if old_message != message:
|
||||||
continue
|
continue
|
||||||
_LOGGER.warning("Duplicate discovery message from %s", addon.slug)
|
_LOGGER.warning("Duplicate discovery message from %s", addon.slug)
|
||||||
@ -82,7 +81,7 @@ class Discovery(CoreSysAttributes):
|
|||||||
self.message_obj[message.uuid] = message
|
self.message_obj[message.uuid] = message
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
self.sys_create_task(self._push_discovery(message.uuid, CMD_NEW))
|
self.sys_create_task(self._push_discovery(message, CMD_NEW))
|
||||||
return message
|
return message
|
||||||
|
|
||||||
def remove(self, message):
|
def remove(self, message):
|
||||||
@ -92,21 +91,25 @@ class Discovery(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Delete discovery to Home Assistant %s/%s from %s",
|
_LOGGER.info("Delete discovery to Home Assistant %s/%s from %s",
|
||||||
message.component, message.platform, message.addon)
|
message.component, message.platform, message.addon)
|
||||||
self.sys_create_task(self._push_discovery(message.uuid, CMD_DEL))
|
self.sys_create_task(self._push_discovery(message, CMD_DEL))
|
||||||
|
|
||||||
async def _push_discovery(self, uuid, command):
|
async def _push_discovery(self, message, command):
|
||||||
"""Send a discovery request."""
|
"""Send a discovery request."""
|
||||||
if not await self.sys_homeassistant.check_api_state():
|
if not await self.sys_homeassistant.check_api_state():
|
||||||
_LOGGER.info("Discovery %s mesage ignore", uuid)
|
_LOGGER.info("Discovery %s mesage ignore", message.uuid)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
data = attr.asdict(message)
|
||||||
|
data.pop(ATTR_CONFIG)
|
||||||
|
|
||||||
with suppress(HomeAssistantAPIError):
|
with suppress(HomeAssistantAPIError):
|
||||||
async with self.sys_homeassistant.make_request(
|
async with self.sys_homeassistant.make_request(
|
||||||
command, f"api/hassio_push/discovery/{uuid}"):
|
command, f"api/hassio_push/discovery/{message.uuid}",
|
||||||
_LOGGER.info("Discovery %s message send", uuid)
|
json=data, timeout=10):
|
||||||
|
_LOGGER.info("Discovery %s message send", message.uuid)
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.warning("Discovery %s message fail", uuid)
|
_LOGGER.warning("Discovery %s message fail", message.uuid)
|
||||||
|
|
||||||
|
|
||||||
@attr.s
|
@attr.s
|
||||||
@ -116,5 +119,5 @@ class Message:
|
|||||||
service = attr.ib()
|
service = attr.ib()
|
||||||
component = attr.ib()
|
component = attr.ib()
|
||||||
platform = attr.ib()
|
platform = attr.ib()
|
||||||
config = attr.ib()
|
config = attr.ib(cmp=False)
|
||||||
uuid = attr.ib(factory=lambda: uuid4().hex, cmp=False)
|
uuid = attr.ib(factory=lambda: uuid4().hex, cmp=False)
|
@ -1,5 +1,4 @@
|
|||||||
"""Handle internal services discovery."""
|
"""Handle internal services discovery."""
|
||||||
from .discovery import Discovery # noqa
|
|
||||||
from .mqtt import MQTTService
|
from .mqtt import MQTTService
|
||||||
from .data import ServicesData
|
from .data import ServicesData
|
||||||
from ..const import SERVICE_MQTT
|
from ..const import SERVICE_MQTT
|
||||||
@ -34,10 +33,6 @@ class ServiceManager(CoreSysAttributes):
|
|||||||
for slug, service in AVAILABLE_SERVICES.items():
|
for slug, service in AVAILABLE_SERVICES.items():
|
||||||
self.services_obj[slug] = service(self.coresys)
|
self.services_obj[slug] = service(self.coresys)
|
||||||
|
|
||||||
# Read exists discovery messages
|
|
||||||
self.sys_discovery.load()
|
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
"""Reset available data."""
|
"""Reset available data."""
|
||||||
self.data.reset_data()
|
self.data.reset_data()
|
||||||
self.sys_discovery.load()
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Handle service data for persistent supervisor reboot."""
|
"""Handle service data for persistent supervisor reboot."""
|
||||||
|
|
||||||
from .validate import SCHEMA_SERVICES_FILE
|
from .validate import SCHEMA_SERVICES_CONFIG
|
||||||
from ..const import FILE_HASSIO_SERVICES, ATTR_DISCOVERY, SERVICE_MQTT
|
from ..const import FILE_HASSIO_SERVICES, SERVICE_MQTT
|
||||||
from ..utils.json import JsonConfig
|
from ..utils.json import JsonConfig
|
||||||
|
|
||||||
|
|
||||||
@ -10,12 +10,7 @@ class ServicesData(JsonConfig):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize services data."""
|
"""Initialize services data."""
|
||||||
super().__init__(FILE_HASSIO_SERVICES, SCHEMA_SERVICES_FILE)
|
super().__init__(FILE_HASSIO_SERVICES, SCHEMA_SERVICES_CONFIG)
|
||||||
|
|
||||||
@property
|
|
||||||
def discovery(self):
|
|
||||||
"""Return discovery data for Home Assistant."""
|
|
||||||
return self._data[ATTR_DISCOVERY]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mqtt(self):
|
def mqtt(self):
|
||||||
|
@ -1,42 +1,11 @@
|
|||||||
"""Validate services schema."""
|
"""Validate services schema."""
|
||||||
import re
|
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
SERVICE_MQTT, ATTR_HOST, ATTR_PORT, ATTR_PASSWORD, ATTR_USERNAME, ATTR_SSL,
|
SERVICE_MQTT, ATTR_HOST, ATTR_PORT, ATTR_PASSWORD, ATTR_USERNAME, ATTR_SSL,
|
||||||
ATTR_ADDON, ATTR_PROTOCOL, ATTR_DISCOVERY, ATTR_COMPONENT, ATTR_UUID,
|
ATTR_ADDON, ATTR_PROTOCOL)
|
||||||
ATTR_PLATFORM, ATTR_CONFIG, ATTR_SERVICE)
|
|
||||||
from ..validate import NETWORK_PORT
|
from ..validate import NETWORK_PORT
|
||||||
|
from ..utils.validate import schema_or
|
||||||
UUID_MATCH = re.compile(r"^[0-9a-f]{32}$")
|
|
||||||
|
|
||||||
SERVICE_ALL = [
|
|
||||||
SERVICE_MQTT
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def schema_or(schema):
|
|
||||||
"""Allow schema or empty."""
|
|
||||||
def _wrapper(value):
|
|
||||||
"""Wrapper for validator."""
|
|
||||||
if not value:
|
|
||||||
return value
|
|
||||||
return schema(value)
|
|
||||||
|
|
||||||
return _wrapper
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_DISCOVERY = vol.Schema([
|
|
||||||
vol.Schema({
|
|
||||||
vol.Required(ATTR_UUID): vol.Match(UUID_MATCH),
|
|
||||||
vol.Required(ATTR_ADDON): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_SERVICE): vol.In(SERVICE_ALL),
|
|
||||||
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_PLATFORM): vol.Maybe(vol.Coerce(str)),
|
|
||||||
vol.Required(ATTR_CONFIG): vol.Maybe(dict),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
@ -56,9 +25,8 @@ SCHEMA_CONFIG_MQTT = SCHEMA_SERVICE_MQTT.extend({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_SERVICES_FILE = vol.Schema({
|
SCHEMA_SERVICES_CONFIG = vol.Schema({
|
||||||
vol.Optional(SERVICE_MQTT, default=dict): schema_or(SCHEMA_CONFIG_MQTT),
|
vol.Optional(SERVICE_MQTT, default=dict): schema_or(SCHEMA_CONFIG_MQTT),
|
||||||
vol.Optional(ATTR_DISCOVERY, default=list): schema_or(SCHEMA_DISCOVERY),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
28
hassio/utils/validate.py
Normal file
28
hassio/utils/validate.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
"""Validate utils."""
|
||||||
|
|
||||||
|
import pytz
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
|
||||||
|
def schema_or(schema):
|
||||||
|
"""Allow schema or empty."""
|
||||||
|
def _wrapper(value):
|
||||||
|
"""Wrapper for validator."""
|
||||||
|
if not value:
|
||||||
|
return value
|
||||||
|
return schema(value)
|
||||||
|
|
||||||
|
return _wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def validate_timezone(timezone):
|
||||||
|
"""Validate voluptuous timezone."""
|
||||||
|
try:
|
||||||
|
pytz.timezone(timezone)
|
||||||
|
except pytz.exceptions.UnknownTimeZoneError:
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Invalid time zone passed in. Valid options can be found here: "
|
||||||
|
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||||
|
from None
|
||||||
|
|
||||||
|
return timezone
|
@ -3,15 +3,17 @@ import uuid
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
import pytz
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_TIMEZONE, ATTR_HASSOS,
|
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_TIMEZONE, ATTR_HASSOS,
|
||||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
ATTR_ADDONS_CUSTOM_LIST, ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||||
ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG,
|
ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG, ATTR_CONFIG,
|
||||||
ATTR_WAIT_BOOT, ATTR_UUID, ATTR_REFRESH_TOKEN, ATTR_HASSOS_CLI,
|
ATTR_WAIT_BOOT, ATTR_UUID, ATTR_REFRESH_TOKEN, ATTR_HASSOS_CLI,
|
||||||
ATTR_ACCESS_TOKEN,
|
ATTR_ACCESS_TOKEN, ATTR_DISCOVERY, ATTR_ADDON, ATTR_COMPONENT,
|
||||||
|
ATTR_PLATFORM, ATTR_SERVICE,
|
||||||
|
SERVICE_MQTT,
|
||||||
CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV)
|
CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV)
|
||||||
|
from .utils.validate import schema_or, validate_timezone
|
||||||
|
|
||||||
|
|
||||||
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
|
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
|
||||||
@ -21,6 +23,8 @@ WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
|||||||
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
||||||
ALSA_DEVICE = vol.Maybe(vol.Match(r"\d+,\d+"))
|
ALSA_DEVICE = vol.Maybe(vol.Match(r"\d+,\d+"))
|
||||||
CHANNELS = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV])
|
CHANNELS = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV])
|
||||||
|
UUID_MATCH = vol.Match(r"^[0-9a-f]{32}$")
|
||||||
|
SERVICE_ALL = vol.In([SERVICE_MQTT])
|
||||||
|
|
||||||
|
|
||||||
def validate_repository(repository):
|
def validate_repository(repository):
|
||||||
@ -40,19 +44,6 @@ def validate_repository(repository):
|
|||||||
REPOSITORIES = vol.All([validate_repository], vol.Unique())
|
REPOSITORIES = vol.All([validate_repository], vol.Unique())
|
||||||
|
|
||||||
|
|
||||||
def validate_timezone(timezone):
|
|
||||||
"""Validate voluptuous timezone."""
|
|
||||||
try:
|
|
||||||
pytz.timezone(timezone)
|
|
||||||
except pytz.exceptions.UnknownTimeZoneError:
|
|
||||||
raise vol.Invalid(
|
|
||||||
"Invalid time zone passed in. Valid options can be found here: "
|
|
||||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
|
||||||
from None
|
|
||||||
|
|
||||||
return timezone
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=inconsistent-return-statements
|
# pylint: disable=inconsistent-return-statements
|
||||||
def convert_to_docker_ports(data):
|
def convert_to_docker_ports(data):
|
||||||
"""Convert data into Docker port list."""
|
"""Convert data into Docker port list."""
|
||||||
@ -83,8 +74,7 @@ DOCKER_PORTS = vol.Schema({
|
|||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||||
vol.Match(r"^[0-9a-f]{32}$"),
|
|
||||||
vol.Optional(ATTR_ACCESS_TOKEN): vol.Match(r"^[0-9a-f]{64}$"),
|
vol.Optional(ATTR_ACCESS_TOKEN): vol.Match(r"^[0-9a-f]{64}$"),
|
||||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||||
@ -117,3 +107,19 @@ SCHEMA_HASSIO_CONFIG = vol.Schema({
|
|||||||
]): REPOSITORIES,
|
]): REPOSITORIES,
|
||||||
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
|
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_DISCOVERY = vol.Schema([
|
||||||
|
vol.Schema({
|
||||||
|
vol.Required(ATTR_UUID): UUID_MATCH,
|
||||||
|
vol.Required(ATTR_ADDON): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_SERVICE): SERVICE_ALL,
|
||||||
|
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_PLATFORM): vol.Maybe(vol.Coerce(str)),
|
||||||
|
vol.Required(ATTR_CONFIG): vol.Maybe(dict),
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
])
|
||||||
|
|
||||||
|
SCHEMA_DISCOVERY_CONFIG = vol.Schema({
|
||||||
|
vol.Optional(ATTR_DISCOVERY, default=list): schema_or(SCHEMA_DISCOVERY),
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user