mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-19 14:09:21 +00:00
Compare commits
77 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
21f3c4820b | ||
![]() |
214c6f919e | ||
![]() |
d9d438d571 | ||
![]() |
cf60d1f55c | ||
![]() |
f9aa12cbad | ||
![]() |
76266cc18b | ||
![]() |
50b9506ff3 | ||
![]() |
754cd64213 | ||
![]() |
113b62ee77 | ||
![]() |
d9874c4c3e | ||
![]() |
ca44e858c5 | ||
![]() |
c7ca4de307 | ||
![]() |
b77146a4e0 | ||
![]() |
45b4800378 | ||
![]() |
7f9232d2b9 | ||
![]() |
d90426f745 | ||
![]() |
c2deabb672 | ||
![]() |
ead5993f3e | ||
![]() |
1bcd74e8fa | ||
![]() |
118da3c275 | ||
![]() |
d7bb9013d4 | ||
![]() |
812c46d82b | ||
![]() |
c0462b28cd | ||
![]() |
82b2f66920 | ||
![]() |
01da42e1b6 | ||
![]() |
d652d22547 | ||
![]() |
baea84abe6 | ||
![]() |
c2d705a42a | ||
![]() |
f10b433e1f | ||
![]() |
67f562a846 | ||
![]() |
1edec61133 | ||
![]() |
c13a33bf71 | ||
![]() |
2ae93ae7b1 | ||
![]() |
8451020afe | ||
![]() |
a48e568efc | ||
![]() |
dee2808cb5 | ||
![]() |
06a2ab26a2 | ||
![]() |
45de0f2f39 | ||
![]() |
bac5f704dc | ||
![]() |
79669a5d04 | ||
![]() |
a6e712c9ea | ||
![]() |
069fe99699 | ||
![]() |
4754f067ad | ||
![]() |
dce9818812 | ||
![]() |
d054b6dbb7 | ||
![]() |
3093165325 | ||
![]() |
fd9c5bd412 | ||
![]() |
9a8850fecd | ||
![]() |
b12175ab9a | ||
![]() |
b52f90187b | ||
![]() |
4eb02f474d | ||
![]() |
dfdcddfd0b | ||
![]() |
0391277bad | ||
![]() |
73643b9bfe | ||
![]() |
93a52b8382 | ||
![]() |
7a91bb1f6c | ||
![]() |
26efa998a1 | ||
![]() |
fc9f3fee0a | ||
![]() |
ec19bd570b | ||
![]() |
3335bad9e1 | ||
![]() |
71ae334e24 | ||
![]() |
0807651fbd | ||
![]() |
7026d42d77 | ||
![]() |
31047b9ec2 | ||
![]() |
714791de8f | ||
![]() |
c544fff2b2 | ||
![]() |
fc45670686 | ||
![]() |
5cefa0a2ee | ||
![]() |
a1910d4135 | ||
![]() |
f1fecdde3a | ||
![]() |
9ba4ea7d18 | ||
![]() |
58a455d639 | ||
![]() |
3ea85f6a28 | ||
![]() |
4e1469ada4 | ||
![]() |
5778f78f28 | ||
![]() |
227125cc0b | ||
![]() |
b36e178c45 |
@@ -1,6 +0,0 @@
|
||||
sudo: true
|
||||
dist: xenial
|
||||
install: pip install -U tox
|
||||
language: python
|
||||
python: 3.7
|
||||
script: tox
|
36
API.md
36
API.md
@@ -22,7 +22,7 @@ On success / Code 200:
|
||||
}
|
||||
```
|
||||
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with envoriment `HASSIO_TOKEN`.
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with environment `HASSIO_TOKEN`.
|
||||
|
||||
### Hass.io
|
||||
|
||||
@@ -41,6 +41,8 @@ The addons from `addons` are only installed one.
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"ip_address": "ip address",
|
||||
"wait_boot": "int",
|
||||
"addons": [
|
||||
{
|
||||
@@ -78,6 +80,7 @@ Optional:
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
@@ -348,6 +351,7 @@ Load host configs from a USB stick.
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "arch",
|
||||
"machine": "Image machine type",
|
||||
"ip_address": "ip address",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
@@ -376,6 +380,7 @@ Output is the raw Docker log.
|
||||
- POST `/homeassistant/check`
|
||||
- POST `/homeassistant/start`
|
||||
- POST `/homeassistant/stop`
|
||||
- POST `/homeassistant/rebuild`
|
||||
|
||||
- POST `/homeassistant/options`
|
||||
|
||||
@@ -468,6 +473,7 @@ Get all available addons.
|
||||
"available": "bool",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"machine": "[raspberrypi2, tinker]",
|
||||
"homeassistant": "null|min Home Assistant version",
|
||||
"repository": "12345678|null",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
@@ -476,6 +482,7 @@ Get all available addons.
|
||||
"build": "bool",
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"network_description": "{}|null",
|
||||
"host_network": "bool",
|
||||
"host_pid": "bool",
|
||||
"host_ipc": "bool",
|
||||
@@ -504,7 +511,12 @@ Get all available addons.
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0",
|
||||
"services_role": "['service:access']",
|
||||
"discovery": "['service']"
|
||||
"discovery": "['service']",
|
||||
"ip_address": "ip address",
|
||||
"ingress": "bool",
|
||||
"ingress_entry": "null|/api/hassio_ingress/slug",
|
||||
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
||||
"ingress_port": "null|int"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -578,6 +590,23 @@ Write data to add-on stdin
|
||||
}
|
||||
```
|
||||
|
||||
### ingress
|
||||
|
||||
- POST `/ingress/session`
|
||||
|
||||
Create a new Session for access to ingress service.
|
||||
|
||||
```json
|
||||
{
|
||||
"session": "token"
|
||||
}
|
||||
```
|
||||
|
||||
- VIEW `/ingress/{token}`
|
||||
|
||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
||||
Need ingress session as cookie.
|
||||
|
||||
### discovery
|
||||
|
||||
- GET `/discovery`
|
||||
@@ -677,7 +706,8 @@ return:
|
||||
"machine": "type",
|
||||
"arch": "arch",
|
||||
"supported_arch": ["arch1", "arch2"],
|
||||
"channel": "stable|beta|dev"
|
||||
"channel": "stable|beta|dev",
|
||||
"logging": "debug|info|warning|error|critical"
|
||||
}
|
||||
```
|
||||
|
||||
|
45
azure-pipelines.yml
Normal file
45
azure-pipelines.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
# Python package
|
||||
# Create and test a Python package on multiple Python versions.
|
||||
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
|
||||
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
|
||||
|
||||
trigger:
|
||||
- master
|
||||
- dev
|
||||
|
||||
pr:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
|
||||
- job: "Tox"
|
||||
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python $(python.version)'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
|
||||
- script: pip install tox
|
||||
displayName: 'Install Tox'
|
||||
|
||||
- script: tox
|
||||
displayName: 'Run Tox'
|
||||
|
||||
|
||||
- job: "JQ"
|
||||
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: 'Install JQ'
|
||||
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: 'Run JQ'
|
@@ -13,7 +13,8 @@ def initialize_event_loop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
import uvloop
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
|
||||
uvloop.install()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
@@ -1,42 +1,106 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
import json
|
||||
from distutils.version import StrictVersion
|
||||
from ipaddress import IPv4Address, ip_address
|
||||
import logging
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
import secrets
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Awaitable, Dict, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_AUTH_API, ATTR_AUTO_UART, ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT, ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
|
||||
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
|
||||
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE, ATTR_MAP,
|
||||
ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS, ATTR_PORTS, ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA, ATTR_SERVICES, ATTR_SLUG,
|
||||
ATTR_STARTUP, ATTR_STATE, ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT,
|
||||
ATTR_TMPFS, ATTR_URL, ATTR_USER, ATTR_UUID, ATTR_VERSION, ATTR_WEBUI,
|
||||
SECURITY_DEFAULT, SECURITY_DISABLE, SECURITY_PROFILE, STATE_NONE,
|
||||
STATE_STARTED, STATE_STOPPED)
|
||||
from ..coresys import CoreSysAttributes
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_URL,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_WEBUI,
|
||||
SECURITY_DEFAULT,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
STATE_NONE,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..exceptions import HostAppArmorError
|
||||
from ..utils import create_token
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import (
|
||||
AddonsError,
|
||||
AddonsNotSupportedError,
|
||||
DockerAPIError,
|
||||
HostAppArmorError,
|
||||
JsonFileError,
|
||||
)
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from .utils import check_installed, remove_data
|
||||
from .validate import (
|
||||
MACHINE_ALL, RE_SERVICE, RE_VOLUME, SCHEMA_ADDON_SNAPSHOT,
|
||||
validate_options)
|
||||
MACHINE_ALL,
|
||||
RE_SERVICE,
|
||||
RE_VOLUME,
|
||||
SCHEMA_ADDON_SNAPSHOT,
|
||||
validate_options,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -48,26 +112,28 @@ RE_WEBUI = re.compile(
|
||||
class Addon(CoreSysAttributes):
|
||||
"""Hold data for add-on inside Hass.io."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
self.coresys = coresys
|
||||
self.instance = DockerAddon(coresys, slug)
|
||||
self.coresys: CoreSys = coresys
|
||||
self.instance: DockerAddon = DockerAddon(coresys, slug)
|
||||
self._id: str = slug
|
||||
|
||||
self._id = slug
|
||||
|
||||
async def load(self):
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
if not self.is_installed:
|
||||
return
|
||||
await self.instance.attach()
|
||||
|
||||
# NOTE: Can't be removed after soon
|
||||
if ATTR_IMAGE not in self._data.user[self._id]:
|
||||
self._data.user[self._id][ATTR_IMAGE] = self.image_name
|
||||
self.save_data()
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.attach()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP of Add-on instance."""
|
||||
if not self.is_installed:
|
||||
return ip_address("0.0.0.0")
|
||||
return self.instance.ip_address
|
||||
|
||||
@property
|
||||
def slug(self) -> str:
|
||||
"""Return slug/id of add-on."""
|
||||
return self._id
|
||||
|
||||
@@ -82,30 +148,41 @@ class Addon(CoreSysAttributes):
|
||||
return self.sys_addons.data
|
||||
|
||||
@property
|
||||
def is_installed(self):
|
||||
def is_installed(self) -> bool:
|
||||
"""Return True if an add-on is installed."""
|
||||
return self._id in self._data.system
|
||||
|
||||
@property
|
||||
def is_detached(self):
|
||||
def is_detached(self) -> bool:
|
||||
"""Return True if add-on is detached."""
|
||||
return self._id not in self._data.cache
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
def available(self) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
if self.is_detached:
|
||||
addon_data = self._data.system.get(self._id)
|
||||
else:
|
||||
addon_data = self._data.cache.get(self._id)
|
||||
|
||||
# Architecture
|
||||
if not self.sys_arch.is_supported(self.supported_arch):
|
||||
if not self.sys_arch.is_supported(addon_data[ATTR_ARCH]):
|
||||
return False
|
||||
|
||||
# Machine / Hardware
|
||||
if self.sys_machine not in self.supported_machine:
|
||||
machine = addon_data.get(ATTR_MACHINE) or MACHINE_ALL
|
||||
if self.sys_machine not in machine:
|
||||
return False
|
||||
|
||||
# Home Assistant
|
||||
version = addon_data.get(ATTR_HOMEASSISTANT) or self.sys_homeassistant.version
|
||||
if StrictVersion(self.sys_homeassistant.version) < StrictVersion(version):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def version_installed(self):
|
||||
def version_installed(self) -> Optional[str]:
|
||||
"""Return installed version."""
|
||||
return self._data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
|
||||
@@ -128,7 +205,10 @@ class Addon(CoreSysAttributes):
|
||||
def _set_update(self, image: str, version: str) -> None:
|
||||
"""Update version of add-on."""
|
||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||
self._data.user[self._id][ATTR_VERSION] = version
|
||||
self._data.user[self._id].update({
|
||||
ATTR_VERSION: version,
|
||||
ATTR_IMAGE: image,
|
||||
})
|
||||
self.save_data()
|
||||
|
||||
def _restore_data(self, user: Dict[str, Any], system: Dict[str, Any], image: str) -> None:
|
||||
@@ -205,6 +285,20 @@ class Addon(CoreSysAttributes):
|
||||
return self._data.user[self._id].get(ATTR_ACCESS_TOKEN)
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_token(self):
|
||||
"""Return access token for Hass.io API."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id].get(ATTR_INGRESS_TOKEN)
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_entry(self):
|
||||
"""Return ingress external URL."""
|
||||
if self.is_installed and self.with_ingress:
|
||||
return f"/api/hassio_ingress/{self.ingress_token}"
|
||||
return None
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Return description of add-on."""
|
||||
@@ -229,7 +323,7 @@ class Addon(CoreSysAttributes):
|
||||
return self._mesh[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
def latest_version(self):
|
||||
"""Return version of add-on."""
|
||||
if self._id in self._data.cache:
|
||||
return self._data.cache[self._id][ATTR_VERSION]
|
||||
@@ -271,10 +365,15 @@ class Addon(CoreSysAttributes):
|
||||
"""Return list of discoverable components/platforms."""
|
||||
return self._mesh.get(ATTR_DISCOVERY, [])
|
||||
|
||||
@property
|
||||
def ports_description(self):
|
||||
"""Return descriptions of ports."""
|
||||
return self._mesh.get(ATTR_PORTS_DESCRIPTION)
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Return ports of add-on."""
|
||||
if self.host_network or ATTR_PORTS not in self._mesh:
|
||||
if ATTR_PORTS not in self._mesh:
|
||||
return None
|
||||
|
||||
if not self.is_installed or \
|
||||
@@ -287,13 +386,26 @@ class Addon(CoreSysAttributes):
|
||||
"""Set custom ports of add-on."""
|
||||
if value is None:
|
||||
self._data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
else:
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
return
|
||||
|
||||
self._data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
# Secure map ports to value
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
|
||||
self._data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
@property
|
||||
def ingress_url(self):
|
||||
"""Return URL to ingress url."""
|
||||
if not self.is_installed or not self.with_ingress:
|
||||
return None
|
||||
|
||||
webui = f"/api/hassio_ingress/{self.ingress_token}/"
|
||||
if ATTR_INGRESS_ENTRY in self._mesh:
|
||||
return f"{webui}{self._mesh[ATTR_INGRESS_ENTRY]}"
|
||||
return webui
|
||||
|
||||
@property
|
||||
def webui(self):
|
||||
@@ -326,6 +438,17 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
return f"{proto}://[HOST]:{port}{s_suffix}"
|
||||
|
||||
@property
|
||||
def ingress_port(self):
|
||||
"""Return Ingress port."""
|
||||
if not self.is_installed or not self.with_ingress:
|
||||
return None
|
||||
|
||||
port = self._mesh[ATTR_INGRESS_PORT]
|
||||
if port == 0:
|
||||
return self.sys_ingress.get_dynamic_port(self.slug)
|
||||
return port
|
||||
|
||||
@property
|
||||
def host_network(self):
|
||||
"""Return True if add-on run on host network."""
|
||||
@@ -410,6 +533,11 @@ class Addon(CoreSysAttributes):
|
||||
"""Return True if the add-on access use stdin input."""
|
||||
return self._mesh[ATTR_STDIN]
|
||||
|
||||
@property
|
||||
def with_ingress(self):
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return self._mesh[ATTR_INGRESS]
|
||||
|
||||
@property
|
||||
def with_gpio(self):
|
||||
"""Return True if the add-on access to GPIO interface."""
|
||||
@@ -440,6 +568,11 @@ class Addon(CoreSysAttributes):
|
||||
"""Return True if the add-on access to audio."""
|
||||
return self._mesh[ATTR_AUDIO]
|
||||
|
||||
@property
|
||||
def homeassistant_version(self) -> Optional[str]:
|
||||
"""Return min Home Assistant version they needed by Add-on."""
|
||||
return self._mesh.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA config for output or None."""
|
||||
@@ -511,19 +644,20 @@ class Addon(CoreSysAttributes):
|
||||
def image(self):
|
||||
"""Return image name of add-on."""
|
||||
if self.is_installed:
|
||||
# NOTE: cleanup
|
||||
if ATTR_IMAGE in self._data.user[self._id]:
|
||||
return self._data.user[self._id][ATTR_IMAGE]
|
||||
return self.image_name
|
||||
return self._data.user[self._id].get(ATTR_IMAGE)
|
||||
return self.image_next
|
||||
|
||||
@property
|
||||
def image_name(self):
|
||||
def image_next(self):
|
||||
"""Return image name for install/update."""
|
||||
if self.is_detached:
|
||||
addon_data = self._data.system.get(self._id)
|
||||
else:
|
||||
addon_data = self._data.cache.get(self._id)
|
||||
return self._get_image(addon_data)
|
||||
|
||||
def _get_image(self, addon_data) -> str:
|
||||
"""Generate image name from data."""
|
||||
# Repository with Dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
arch = self.sys_arch.match(addon_data[ATTR_ARCH])
|
||||
@@ -616,8 +750,8 @@ class Addon(CoreSysAttributes):
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Add-on %s have wrong options: %s", self._id,
|
||||
humanize_error(options, ex))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Add-on %s can't write options: %s", self._id, err)
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Add-on %s can't write options", self._id)
|
||||
else:
|
||||
return True
|
||||
|
||||
@@ -644,7 +778,7 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
async def _install_apparmor(self):
|
||||
async def _install_apparmor(self) -> None:
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||
exists_addon = self.path_apparmor.exists()
|
||||
@@ -666,7 +800,7 @@ class Addon(CoreSysAttributes):
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
def schema(self) -> vol.Schema:
|
||||
"""Create a schema for add-on options."""
|
||||
raw_schema = self._mesh[ATTR_SCHEMA]
|
||||
|
||||
@@ -674,7 +808,7 @@ class Addon(CoreSysAttributes):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
|
||||
def test_update_schema(self):
|
||||
def test_update_schema(self) -> bool:
|
||||
"""Check if the existing configuration is valid after update."""
|
||||
if not self.is_installed or self.is_detached:
|
||||
return True
|
||||
@@ -704,17 +838,17 @@ class Addon(CoreSysAttributes):
|
||||
return False
|
||||
return True
|
||||
|
||||
async def install(self):
|
||||
async def install(self) -> None:
|
||||
"""Install an add-on."""
|
||||
if not self.available:
|
||||
_LOGGER.error(
|
||||
"Add-on %s not supported on %s with %s architecture",
|
||||
self._id, self.sys_machine, self.sys_arch.supported)
|
||||
return False
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
if self.is_installed:
|
||||
_LOGGER.error("Add-on %s is already installed", self._id)
|
||||
return False
|
||||
_LOGGER.warning("Add-on %s is already installed", self._id)
|
||||
return
|
||||
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
@@ -724,18 +858,20 @@ class Addon(CoreSysAttributes):
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
|
||||
if not await self.instance.install(
|
||||
self.last_version, self.image_name):
|
||||
return False
|
||||
|
||||
self._set_install(self.image_name, self.last_version)
|
||||
return True
|
||||
try:
|
||||
await self.instance.install(self.latest_version, self.image_next)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self._set_install(self.image_next, self.latest_version)
|
||||
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
async def uninstall(self) -> None:
|
||||
"""Remove an add-on."""
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
try:
|
||||
await self.instance.remove()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
@@ -752,13 +888,11 @@ class Addon(CoreSysAttributes):
|
||||
with suppress(HostAppArmorError):
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
|
||||
# Remove discovery messages
|
||||
# Cleanup internal data
|
||||
self.remove_discovery()
|
||||
|
||||
self._set_uninstall()
|
||||
return True
|
||||
|
||||
async def state(self):
|
||||
async def state(self) -> str:
|
||||
"""Return running state of add-on."""
|
||||
if not self.is_installed:
|
||||
return STATE_NONE
|
||||
@@ -768,47 +902,58 @@ class Addon(CoreSysAttributes):
|
||||
return STATE_STOPPED
|
||||
|
||||
@check_installed
|
||||
async def start(self):
|
||||
async def start(self) -> None:
|
||||
"""Set options and start add-on."""
|
||||
if await self.instance.is_running():
|
||||
_LOGGER.warning("%s already running!", self.slug)
|
||||
return
|
||||
|
||||
# Access Token
|
||||
self._data.user[self._id][ATTR_ACCESS_TOKEN] = create_token()
|
||||
self._data.user[self._id][ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_data()
|
||||
|
||||
# Options
|
||||
if not self.write_options():
|
||||
return False
|
||||
raise AddonsError()
|
||||
|
||||
# Sound
|
||||
if self.with_audio and not self.write_asound():
|
||||
return False
|
||||
raise AddonsError()
|
||||
|
||||
return await self.instance.run()
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
@check_installed
|
||||
def stop(self):
|
||||
"""Stop add-on.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stop()
|
||||
async def stop(self) -> None:
|
||||
"""Stop add-on."""
|
||||
try:
|
||||
return await self.instance.stop()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
@check_installed
|
||||
async def update(self):
|
||||
async def update(self) -> None:
|
||||
"""Update add-on."""
|
||||
last_state = await self.state()
|
||||
|
||||
if self.last_version == self.version_installed:
|
||||
if self.latest_version == self.version_installed:
|
||||
_LOGGER.warning("No update available for add-on %s", self._id)
|
||||
return False
|
||||
return
|
||||
|
||||
if not await self.instance.update(
|
||||
self.last_version, self.image_name):
|
||||
return False
|
||||
self._set_update(self.image_name, self.last_version)
|
||||
# Check if available, Maybe something have changed
|
||||
if not self.available:
|
||||
_LOGGER.error(
|
||||
"Add-on %s not supported on %s with %s architecture",
|
||||
self._id, self.sys_machine, self.sys_arch.supported)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# Update instance
|
||||
last_state = await self.state()
|
||||
try:
|
||||
await self.instance.update(self.latest_version, self.image_next)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
self._set_update(self.image_next, self.latest_version)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
@@ -816,16 +961,16 @@ class Addon(CoreSysAttributes):
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.start()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def restart(self):
|
||||
async def restart(self) -> None:
|
||||
"""Restart add-on."""
|
||||
await self.stop()
|
||||
return await self.start()
|
||||
with suppress(AddonsError):
|
||||
await self.stop()
|
||||
await self.start()
|
||||
|
||||
@check_installed
|
||||
def logs(self):
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Return add-ons log output.
|
||||
|
||||
Return a coroutine.
|
||||
@@ -833,33 +978,37 @@ class Addon(CoreSysAttributes):
|
||||
return self.instance.logs()
|
||||
|
||||
@check_installed
|
||||
def stats(self):
|
||||
"""Return stats of container.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
||||
async def stats(self) -> DockerStats:
|
||||
"""Return stats of container."""
|
||||
try:
|
||||
return await self.instance.stats()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
@check_installed
|
||||
async def rebuild(self):
|
||||
async def rebuild(self) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
last_state = await self.state()
|
||||
|
||||
if not self.need_build:
|
||||
_LOGGER.error("Can't rebuild a none local build add-on!")
|
||||
return False
|
||||
_LOGGER.error("Can't rebuild a image based add-on")
|
||||
raise AddonsNotSupportedError()
|
||||
if self.version_installed != self.latest_version:
|
||||
_LOGGER.error("Version changed, use Update instead Rebuild")
|
||||
raise AddonsError()
|
||||
|
||||
# remove docker container but not addon config
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
|
||||
if not await self.instance.install(self.version_installed):
|
||||
return False
|
||||
try:
|
||||
await self.instance.remove()
|
||||
await self.instance.install(self.version_installed)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self._set_update(self.image, self.version_installed)
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.start()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def write_stdin(self, data):
|
||||
@@ -869,18 +1018,23 @@ class Addon(CoreSysAttributes):
|
||||
"""
|
||||
if not self.with_stdin:
|
||||
_LOGGER.error("Add-on don't support write to stdin!")
|
||||
return False
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
return await self.instance.write_stdin(data)
|
||||
try:
|
||||
return await self.instance.write_stdin(data)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
async def snapshot(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Snapshot state of an add-on."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.instance.export_image(Path(temp, 'image.tar')):
|
||||
return False
|
||||
if self.need_build:
|
||||
try:
|
||||
await self.instance.export_image(Path(temp, 'image.tar'))
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
data = {
|
||||
ATTR_USER: self._data.user.get(self._id, {}),
|
||||
@@ -892,9 +1046,9 @@ class Addon(CoreSysAttributes):
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(Path(temp, 'addon.json'), data)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't save meta for %s: %s", self._id, err)
|
||||
return False
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Can't save meta for %s", self._id)
|
||||
raise AddonsError() from None
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
@@ -903,7 +1057,7 @@ class Addon(CoreSysAttributes):
|
||||
self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't backup AppArmor profile")
|
||||
return False
|
||||
raise AddonsError() from None
|
||||
|
||||
# write into tarfile
|
||||
def _write_tarfile():
|
||||
@@ -917,12 +1071,11 @@ class Addon(CoreSysAttributes):
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
raise AddonsError() from None
|
||||
|
||||
_LOGGER.info("Finish snapshot for addon %s", self._id)
|
||||
return True
|
||||
|
||||
async def restore(self, tar_file):
|
||||
async def restore(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||
# extract snapshot
|
||||
@@ -935,13 +1088,13 @@ class Addon(CoreSysAttributes):
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
raise AddonsError() from None
|
||||
|
||||
# Read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, 'addon.json'))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't read addon.json: %s", err)
|
||||
except JsonFileError:
|
||||
raise AddonsError() from None
|
||||
|
||||
# Validate
|
||||
try:
|
||||
@@ -949,25 +1102,33 @@ class Addon(CoreSysAttributes):
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate %s, snapshot data: %s",
|
||||
self._id, humanize_error(data, err))
|
||||
return False
|
||||
raise AddonsError() from None
|
||||
|
||||
# Restore data or reload add-on
|
||||
# Restore local add-on informations
|
||||
_LOGGER.info("Restore config for addon %s", self._id)
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM], self.image_name)
|
||||
restore_image = self._get_image(data[ATTR_SYSTEM])
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM], restore_image)
|
||||
|
||||
# Check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore image for addon %s", self._id)
|
||||
_LOGGER.info("Restore/Install image for addon %s", self._id)
|
||||
|
||||
image_file = Path(temp, 'image.tar')
|
||||
if image_file.is_file():
|
||||
await self.instance.import_image(image_file, version)
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.import_image(image_file, version)
|
||||
else:
|
||||
if await self.instance.install(version, self.image_name):
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.install(version, restore_image)
|
||||
await self.instance.cleanup()
|
||||
elif self.instance.version != version or self.legacy:
|
||||
_LOGGER.info("Restore/Update image for addon %s", self._id)
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.update(version, restore_image)
|
||||
else:
|
||||
await self.instance.stop()
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.stop()
|
||||
|
||||
# Restore data
|
||||
def _restore_data():
|
||||
@@ -981,7 +1142,7 @@ class Addon(CoreSysAttributes):
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data: %s", err)
|
||||
return False
|
||||
raise AddonsError() from None
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(temp, 'apparmor.txt')
|
||||
@@ -991,11 +1152,10 @@ class Addon(CoreSysAttributes):
|
||||
self.slug, profile_file)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't restore AppArmor profile")
|
||||
return False
|
||||
raise AddonsError() from None
|
||||
|
||||
# Run add-on
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
return await self.start()
|
||||
|
||||
_LOGGER.info("Finish restore for add-on %s", self._id)
|
||||
return True
|
||||
|
@@ -1,19 +1,25 @@
|
||||
"""Init file for Hass.io add-on data."""
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import (
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
ATTR_LOCATON,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SLUG,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_USER,
|
||||
FILE_HASSIO_ADDONS,
|
||||
REPOSITORY_CORE,
|
||||
REPOSITORY_LOCAL,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import JsonFileError
|
||||
from ..utils.json import JsonConfig, read_json_file
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -54,12 +60,10 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
self._repositories = {}
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
self._read_addons_folder(self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
|
||||
# read local repository
|
||||
self._read_addons_folder(
|
||||
self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
self._read_addons_folder(self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
@@ -76,15 +80,12 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
# exists repository json
|
||||
repository_file = Path(path, "repository.json")
|
||||
try:
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(
|
||||
read_json_file(repository_file)
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(read_json_file(repository_file))
|
||||
except JsonFileError:
|
||||
_LOGGER.warning(
|
||||
"Can't read repository information from %s", repository_file
|
||||
)
|
||||
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read repository information from %s",
|
||||
repository_file)
|
||||
return
|
||||
|
||||
except vol.Invalid:
|
||||
_LOGGER.warning("Repository parse error %s", repository_file)
|
||||
return
|
||||
@@ -98,23 +99,21 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
for addon in path.glob("**/config.json"):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read %s from repository %s", addon, repository)
|
||||
continue
|
||||
|
||||
# validate
|
||||
try:
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s: %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
_LOGGER.warning(
|
||||
"Can't read %s: %s", addon, humanize_error(addon_config, ex)
|
||||
)
|
||||
continue
|
||||
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(
|
||||
repository, addon_config[ATTR_SLUG])
|
||||
addon_slug = "{}_{}".format(repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
@@ -126,14 +125,12 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
try:
|
||||
builtin_file = Path(__file__).parent.joinpath("built-in.json")
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read built-in json: %s", err)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read built-in json")
|
||||
return
|
||||
|
||||
# core repository
|
||||
self._repositories[REPOSITORY_CORE] = \
|
||||
builtin_data[REPOSITORY_CORE]
|
||||
self._repositories[REPOSITORY_CORE] = builtin_data[REPOSITORY_CORE]
|
||||
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
self._repositories[REPOSITORY_LOCAL] = builtin_data[REPOSITORY_LOCAL]
|
||||
|
@@ -20,6 +20,7 @@ from ..const import (
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
from ..exceptions import AddonsNotSupportedError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
@@ -107,7 +108,7 @@ def check_installed(method):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
||||
return False
|
||||
raise AddonsNotSupportedError()
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
||||
|
@@ -1,29 +1,95 @@
|
||||
"""Validate add-ons options schema."""
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ARCH_ALL, ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_ARGS,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART, ATTR_AUTO_UPDATE, ATTR_BOOT, ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
|
||||
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
|
||||
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE,
|
||||
ATTR_MAINTAINER, ATTR_MAP, ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS,
|
||||
ATTR_PORTS, ATTR_PRIVILEGED, ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA,
|
||||
ATTR_SERVICES, ATTR_SLUG, ATTR_SQUASH, ATTR_STARTUP, ATTR_STATE,
|
||||
ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT, ATTR_TMPFS, ATTR_URL, ATTR_USER,
|
||||
ATTR_UUID, ATTR_VERSION, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
|
||||
PRIVILEGED_ALL, ROLE_ALL, ROLE_DEFAULT, STARTUP_ALL, STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES, STATE_STARTED, STATE_STOPPED)
|
||||
from ..services.validate import DISCOVERY_SERVICES
|
||||
ARCH_ALL,
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_ARGS,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAINTAINER,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SQUASH,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_URL,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
PRIVILEGED_ALL,
|
||||
ROLE_ALL,
|
||||
ROLE_DEFAULT,
|
||||
STARTUP_ALL,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
)
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..validate import (
|
||||
ALSA_DEVICE, DOCKER_PORTS, NETWORK_PORT, SHA256, UUID_MATCH)
|
||||
ALSA_DEVICE,
|
||||
DOCKER_PORTS,
|
||||
DOCKER_PORTS_DESCRIPTION,
|
||||
NETWORK_PORT,
|
||||
TOKEN,
|
||||
UUID_MATCH,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -79,16 +145,21 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(NETWORK_PORT, vol.Equal(0)),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
@@ -114,7 +185,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [vol.In(DISCOVERY_SERVICES)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||
@@ -156,9 +227,10 @@ SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(str),
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
|
@@ -1,23 +1,25 @@
|
||||
"""Init file for Hass.io RESTful API."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .addons import APIAddons
|
||||
from .auth import APIAuth
|
||||
from .discovery import APIDiscovery
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .hardware import APIHardware
|
||||
from .host import APIHost
|
||||
from .hassos import APIHassOS
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .info import APIInfo
|
||||
from .ingress import APIIngress
|
||||
from .proxy import APIProxy
|
||||
from .supervisor import APISupervisor
|
||||
from .snapshots import APISnapshots
|
||||
from .services import APIServices
|
||||
from .security import SecurityMiddleware
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .services import APIServices
|
||||
from .snapshots import APISnapshots
|
||||
from .supervisor import APISupervisor
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,18 +27,18 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.security = SecurityMiddleware(coresys)
|
||||
self.webapp = web.Application(
|
||||
self.coresys: CoreSys = coresys
|
||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
||||
self.webapp: web.Application = web.Application(
|
||||
middlewares=[self.security.token_validation])
|
||||
|
||||
# service stuff
|
||||
self._runner = web.AppRunner(self.webapp)
|
||||
self._site = None
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||
self._site: Optional[web.TCPSite] = None
|
||||
|
||||
async def load(self):
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
@@ -46,13 +48,14 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_ingress()
|
||||
self._register_snapshots()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
self._register_info()
|
||||
self._register_auth()
|
||||
|
||||
def _register_host(self):
|
||||
def _register_host(self) -> None:
|
||||
"""Register hostcontrol functions."""
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
@@ -72,7 +75,7 @@ class RestAPI(CoreSysAttributes):
|
||||
api_host.service_reload),
|
||||
])
|
||||
|
||||
def _register_hassos(self):
|
||||
def _register_hassos(self) -> None:
|
||||
"""Register HassOS functions."""
|
||||
api_hassos = APIHassOS()
|
||||
api_hassos.coresys = self.coresys
|
||||
@@ -84,7 +87,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
||||
])
|
||||
|
||||
def _register_hardware(self):
|
||||
def _register_hardware(self) -> None:
|
||||
"""Register hardware functions."""
|
||||
api_hardware = APIHardware()
|
||||
api_hardware.coresys = self.coresys
|
||||
@@ -94,7 +97,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get('/hardware/audio', api_hardware.audio),
|
||||
])
|
||||
|
||||
def _register_info(self):
|
||||
def _register_info(self) -> None:
|
||||
"""Register info functions."""
|
||||
api_info = APIInfo()
|
||||
api_info.coresys = self.coresys
|
||||
@@ -103,7 +106,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get('/info', api_info.info),
|
||||
])
|
||||
|
||||
def _register_auth(self):
|
||||
def _register_auth(self) -> None:
|
||||
"""Register auth functions."""
|
||||
api_auth = APIAuth()
|
||||
api_auth.coresys = self.coresys
|
||||
@@ -112,7 +115,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post('/auth', api_auth.auth),
|
||||
])
|
||||
|
||||
def _register_supervisor(self):
|
||||
def _register_supervisor(self) -> None:
|
||||
"""Register Supervisor functions."""
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
@@ -127,7 +130,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post('/supervisor/options', api_supervisor.options),
|
||||
])
|
||||
|
||||
def _register_homeassistant(self):
|
||||
def _register_homeassistant(self) -> None:
|
||||
"""Register Home Assistant functions."""
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
@@ -142,9 +145,10 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post('/homeassistant/stop', api_hass.stop),
|
||||
web.post('/homeassistant/start', api_hass.start),
|
||||
web.post('/homeassistant/check', api_hass.check),
|
||||
web.post('/homeassistant/rebuild', api_hass.rebuild),
|
||||
])
|
||||
|
||||
def _register_proxy(self):
|
||||
def _register_proxy(self) -> None:
|
||||
"""Register Home Assistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
@@ -158,7 +162,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get('/homeassistant/api/', api_proxy.api),
|
||||
])
|
||||
|
||||
def _register_addons(self):
|
||||
def _register_addons(self) -> None:
|
||||
"""Register Add-on functions."""
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
@@ -184,7 +188,17 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
||||
])
|
||||
|
||||
def _register_snapshots(self):
|
||||
def _register_ingress(self) -> None:
|
||||
"""Register Ingress functions."""
|
||||
api_ingress = APIIngress()
|
||||
api_ingress.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.post('/ingress/session', api_ingress.create_session),
|
||||
web.view('/ingress/{token}/{path:.*}', api_ingress.handler),
|
||||
])
|
||||
|
||||
def _register_snapshots(self) -> None:
|
||||
"""Register snapshots functions."""
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
@@ -204,7 +218,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
||||
])
|
||||
|
||||
def _register_services(self):
|
||||
def _register_services(self) -> None:
|
||||
"""Register services functions."""
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
@@ -216,7 +230,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.delete('/services/{service}', api_services.del_service),
|
||||
])
|
||||
|
||||
def _register_discovery(self):
|
||||
def _register_discovery(self) -> None:
|
||||
"""Register discovery functions."""
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
@@ -228,7 +242,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post('/discovery', api_discovery.set_discovery),
|
||||
])
|
||||
|
||||
def _register_panel(self):
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
|
||||
@@ -256,7 +270,7 @@ class RestAPI(CoreSysAttributes):
|
||||
# This route is for HA > 0.70
|
||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
||||
|
||||
async def start(self):
|
||||
async def start(self) -> None:
|
||||
"""Run RESTful API webserver."""
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(
|
||||
@@ -270,7 +284,7 @@ class RestAPI(CoreSysAttributes):
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
async def stop(self):
|
||||
async def stop(self) -> None:
|
||||
"""Stop RESTful API webserver."""
|
||||
if not self._site:
|
||||
return
|
||||
|
@@ -1,31 +1,91 @@
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict, List
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
|
||||
ATTR_FULL_ACCESS, ATTR_PROTECTED, ATTR_RATING, ATTR_HOST_PID,
|
||||
ATTR_HASSIO_ROLE, ATTR_MACHINE, ATTR_AVAILABLE, ATTR_AUTH_API,
|
||||
ATTR_ADDONS,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_AVAILABLE,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD,
|
||||
ATTR_CHANGELOG,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DETACHED,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_ICON,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_URL,
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_KERNEL_MODULES,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT, REQUEST_FROM)
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_LOGO,
|
||||
ATTR_LONG_DESCRIPTION,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAINTAINER,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_NETWORK_DESCRIPTION,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SOURCE,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
CONTENT_TYPE_BINARY,
|
||||
CONTENT_TYPE_PNG,
|
||||
CONTENT_TYPE_TEXT,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS, ALSA_DEVICE
|
||||
from ..exceptions import APIError
|
||||
from ..validate import ALSA_DEVICE, DOCKER_PORTS
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -51,7 +111,7 @@ SCHEMA_SECURITY = vol.Schema({
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
def _extract_addon(self, request: web.Request, check_installed: bool = True) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon_slug = request.match_info.get('addon')
|
||||
|
||||
@@ -69,7 +129,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
async def list(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = []
|
||||
for addon in self.sys_addons.list_addons:
|
||||
@@ -77,7 +137,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
@@ -104,13 +164,12 @@ class APIAddons(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
async def reload(self, request: web.Request) -> None:
|
||||
"""Reload all add-on data."""
|
||||
await asyncio.shield(self.sys_addons.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
@@ -122,7 +181,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LAST_VERSION: addon.last_version,
|
||||
ATTR_LAST_VERSION: addon.latest_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
@@ -130,11 +189,13 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_MACHINE: addon.supported_machine,
|
||||
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_NETWORK_DESCRIPTION: addon.ports_description,
|
||||
ATTR_HOST_NETWORK: addon.host_network,
|
||||
ATTR_HOST_PID: addon.host_pid,
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
@@ -161,17 +222,21 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_SERVICES: _pretty_services(addon),
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
ATTR_IP_ADDRESS: str(addon.ip_address),
|
||||
ATTR_INGRESS: addon.with_ingress,
|
||||
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
||||
ATTR_INGRESS_URL: addon.ingress_url,
|
||||
ATTR_INGRESS_PORT: addon.ingress_port,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Store user options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||
})
|
||||
|
||||
body = await api_validate(addon_schema, request)
|
||||
|
||||
if ATTR_OPTIONS in body:
|
||||
@@ -188,10 +253,9 @@ class APIAddons(CoreSysAttributes):
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def security(self, request):
|
||||
async def security(self, request: web.Request) -> None:
|
||||
"""Store security options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
body = await api_validate(SCHEMA_SECURITY, request)
|
||||
@@ -201,17 +265,13 @@ class APIAddons(CoreSysAttributes):
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
addon = self._extract_addon(request)
|
||||
stats = await addon.stats()
|
||||
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
@@ -223,19 +283,19 @@ class APIAddons(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
def install(self, request):
|
||||
def install(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Install add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request):
|
||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Uninstall add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
def start(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Start add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
@@ -249,29 +309,29 @@ class APIAddons(CoreSysAttributes):
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def update(self, request):
|
||||
def update(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Update add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if addon.last_version == addon.version_installed:
|
||||
if addon.latest_version == addon.version_installed:
|
||||
raise APIError("No update available!")
|
||||
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request):
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Rebuild local build add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.need_build:
|
||||
@@ -280,13 +340,13 @@ class APIAddons(CoreSysAttributes):
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return logs from add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def icon(self, request):
|
||||
async def icon(self, request: web.Request) -> bytes:
|
||||
"""Return icon from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_icon:
|
||||
@@ -296,7 +356,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request):
|
||||
async def logo(self, request: web.Request) -> bytes:
|
||||
"""Return logo from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
@@ -306,7 +366,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def changelog(self, request):
|
||||
async def changelog(self, request: web.Request) -> str:
|
||||
"""Return changelog from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_changelog:
|
||||
@@ -316,17 +376,17 @@ class APIAddons(CoreSysAttributes):
|
||||
return changelog.read()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request):
|
||||
async def stdin(self, request: web.Request) -> None:
|
||||
"""Write to stdin of add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError("STDIN not supported by add-on")
|
||||
|
||||
data = await request.read()
|
||||
return await asyncio.shield(addon.write_stdin(data))
|
||||
await asyncio.shield(addon.write_stdin(data))
|
||||
|
||||
|
||||
def _pretty_devices(addon):
|
||||
def _pretty_devices(addon: Addon) -> List[str]:
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
@@ -334,7 +394,7 @@ def _pretty_devices(addon):
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
|
||||
|
||||
def _pretty_services(addon):
|
||||
def _pretty_services(addon: Addon) -> List[str]:
|
||||
"""Return a simplified services role list."""
|
||||
services = []
|
||||
for name, access in addon.services_role.items():
|
||||
|
@@ -3,17 +3,24 @@ import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDON, ATTR_UUID, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE,
|
||||
REQUEST_FROM)
|
||||
ATTR_ADDON,
|
||||
ATTR_UUID,
|
||||
ATTR_CONFIG,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_SERVICE,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from ..validate import SERVICE_ALL
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema({
|
||||
vol.Required(ATTR_SERVICE): SERVICE_ALL,
|
||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||
})
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
@@ -21,7 +28,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self.sys_discovery.get(request.match_info.get('uuid'))
|
||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||
if not message:
|
||||
raise APIError("Discovery message not found")
|
||||
return message
|
||||
@@ -38,12 +45,14 @@ class APIDiscovery(CoreSysAttributes):
|
||||
|
||||
discovery = []
|
||||
for message in self.sys_discovery.list_messages:
|
||||
discovery.append({
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
})
|
||||
discovery.append(
|
||||
{
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
)
|
||||
|
||||
return {ATTR_DISCOVERY: discovery}
|
||||
|
||||
|
@@ -1,27 +1,31 @@
|
||||
"""Init file for Hass.io HassOS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST, ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST)
|
||||
ATTR_BOARD,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for HassOS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HassOS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
@@ -32,7 +36,7 @@ class APIHassOS(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HassOS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
@@ -40,7 +44,7 @@ class APIHassOS(CoreSysAttributes):
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request):
|
||||
async def update_cli(self, request: web.Request) -> None:
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
@@ -48,6 +52,6 @@ class APIHassOS(CoreSysAttributes):
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request):
|
||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger config reload on HassOS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
||||
|
@@ -1,15 +1,35 @@
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Coroutine, Dict, Any
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARCH, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_BOOT, ATTR_CPU_PERCENT,
|
||||
ATTR_CUSTOM, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_MACHINE, ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_PASSWORD,
|
||||
ATTR_PORT, ATTR_REFRESH_TOKEN, ATTR_SSL, ATTR_VERSION, ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG, CONTENT_TYPE_BINARY)
|
||||
ATTR_ARCH,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_CUSTOM,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SSL,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_IP_ADDRESS,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import DOCKER_IMAGE, NETWORK_PORT
|
||||
@@ -18,42 +38,34 @@ from .utils import api_process, api_process_raw, api_validate
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_PORT):
|
||||
NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL):
|
||||
vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG):
|
||||
vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
})
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle RESTful API for Home Assistant functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.latest_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_IP_ADDRESS: str(self.sys_homeassistant.ip_address),
|
||||
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
@@ -65,13 +77,13 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set Home Assistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||
self.sys_homeassistant.latest_version = body[ATTR_LAST_VERSION]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
@@ -81,6 +93,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
|
||||
if ATTR_PASSWORD in body:
|
||||
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
self.sys_homeassistant.refresh_token = None
|
||||
|
||||
if ATTR_SSL in body:
|
||||
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||
@@ -97,7 +110,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
async def stats(self, request: web.Request) -> Dict[Any, str]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_homeassistant.stats()
|
||||
if not stats:
|
||||
@@ -114,35 +127,40 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Home Assistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
def stop(self, request: web.Request) -> Coroutine:
|
||||
"""Stop Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.stop())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
def start(self, request: web.Request) -> Coroutine:
|
||||
"""Start Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.start())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
def restart(self, request: web.Request) -> Coroutine:
|
||||
"""Restart Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Coroutine:
|
||||
"""Rebuild Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
def logs(self, request: web.Request) -> Coroutine:
|
||||
"""Return Home Assistant Docker logs."""
|
||||
return self.sys_homeassistant.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request):
|
||||
async def check(self, request: web.Request) -> None:
|
||||
"""Check configuration of Home Assistant."""
|
||||
result = await self.sys_homeassistant.check_config()
|
||||
if not result.valid:
|
||||
|
@@ -1,9 +1,20 @@
|
||||
"""Init file for Hass.io info RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from ..const import (ATTR_ARCH, ATTR_CHANNEL, ATTR_HASSOS, ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME, ATTR_MACHINE, ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED_ARCH)
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_HASSOS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_LOGGING,
|
||||
ATTR_MACHINE,
|
||||
ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED_ARCH,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
@@ -14,7 +25,7 @@ class APIInfo(CoreSysAttributes):
|
||||
"""Handle RESTful API for info functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show system info."""
|
||||
return {
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
@@ -25,4 +36,5 @@ class APIInfo(CoreSysAttributes):
|
||||
ATTR_ARCH: self.sys_arch.default,
|
||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_LOGGING: self.sys_config.logging,
|
||||
}
|
||||
|
234
hassio/api/ingress.py
Normal file
234
hassio/api/ingress.py
Normal file
@@ -0,0 +1,234 @@
|
||||
"""Hass.io Add-on ingress service."""
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs, web
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadGateway,
|
||||
HTTPServiceUnavailable,
|
||||
HTTPUnauthorized,
|
||||
)
|
||||
from multidict import CIMultiDict, istr
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import ATTR_SESSION, HEADER_TOKEN, REQUEST_FROM, COOKIE_INGRESS
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIIngress(CoreSysAttributes):
|
||||
"""Ingress view to handle add-on webui routing."""
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info.get("token")
|
||||
|
||||
# Find correct add-on
|
||||
addon = self.sys_ingress.get(token)
|
||||
if not addon:
|
||||
_LOGGER.warning("Ingress for %s not available", token)
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
return addon
|
||||
|
||||
def _check_ha_access(self, request: web.Request) -> None:
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
_LOGGER.warning("Ingress is only available behind Home Assistant")
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
def _create_url(self, addon: Addon, path: str) -> str:
|
||||
"""Create URL to container."""
|
||||
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
|
||||
|
||||
@api_process
|
||||
async def create_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
session = self.sys_ingress.create_session()
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
"""Route data to Hass.io ingress service."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path")
|
||||
try:
|
||||
# Websocket
|
||||
if _is_websocket(request):
|
||||
return await self._handle_websocket(request, addon, path)
|
||||
|
||||
# Request
|
||||
return await self._handle_request(request, addon, path)
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Ingress error: %s", err)
|
||||
|
||||
raise HTTPBadGateway() from None
|
||||
|
||||
async def _handle_websocket(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> web.WebSocketResponse:
|
||||
"""Ingress route for websocket."""
|
||||
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||
req_protocols = [
|
||||
str(proto.strip())
|
||||
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||
]
|
||||
else:
|
||||
req_protocols = ()
|
||||
|
||||
ws_server = web.WebSocketResponse(
|
||||
protocols=req_protocols, autoclose=False, autoping=False
|
||||
)
|
||||
await ws_server.prepare(request)
|
||||
|
||||
# Preparing
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
# Support GET query
|
||||
if request.query_string:
|
||||
url = "{}?{}".format(url, request.query_string)
|
||||
|
||||
# Start proxy
|
||||
async with self.sys_websession.ws_connect(
|
||||
url,
|
||||
headers=source_header,
|
||||
protocols=req_protocols,
|
||||
autoclose=False,
|
||||
autoping=False,
|
||||
) as ws_client:
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
_websocket_forward(ws_server, ws_client),
|
||||
_websocket_forward(ws_client, ws_server),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
|
||||
return ws_server
|
||||
|
||||
async def _handle_request(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> Union[web.Response, web.StreamResponse]:
|
||||
"""Ingress route for request."""
|
||||
url = self._create_url(addon, path)
|
||||
data = await request.read()
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
async with self.sys_websession.request(
|
||||
request.method, url, headers=source_header, params=request.query, data=data
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
|
||||
# Simple request
|
||||
if (
|
||||
hdrs.CONTENT_LENGTH in result.headers
|
||||
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||
):
|
||||
# Return Response
|
||||
body = await result.read()
|
||||
return web.Response(
|
||||
headers=headers,
|
||||
status=result.status,
|
||||
content_type=result.content_type,
|
||||
body=body,
|
||||
)
|
||||
|
||||
# Stream response
|
||||
response = web.StreamResponse(status=result.status, headers=headers)
|
||||
response.content_type = result.content_type
|
||||
|
||||
try:
|
||||
await response.prepare(request)
|
||||
async for data in result.content.iter_chunked(4096):
|
||||
await response.write(data)
|
||||
|
||||
except (aiohttp.ClientError, aiohttp.ClientPayloadError) as err:
|
||||
_LOGGER.error("Stream error with %s: %s", url, err)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: str
|
||||
) -> Union[CIMultiDict, Dict[str, str]]:
|
||||
"""Create initial header."""
|
||||
headers = {}
|
||||
|
||||
# filter flags
|
||||
for name, value in request.headers.items():
|
||||
if name in (hdrs.CONTENT_LENGTH, hdrs.CONTENT_ENCODING, istr(HEADER_TOKEN)):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
# Update X-Forwarded-For
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse) -> Dict[str, str]:
|
||||
"""Create response header."""
|
||||
headers = {}
|
||||
|
||||
for name, value in response.headers.items():
|
||||
if name in (
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_TYPE,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _is_websocket(request: web.Request) -> bool:
|
||||
"""Return True if request is a websocket."""
|
||||
headers = request.headers
|
||||
|
||||
if (
|
||||
headers.get(hdrs.CONNECTION) == "Upgrade"
|
||||
and headers.get(hdrs.UPGRADE) == "websocket"
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
async def _websocket_forward(ws_from, ws_to):
|
||||
"""Handle websocket message directly."""
|
||||
try:
|
||||
async for msg in ws_from:
|
||||
if msg.type == aiohttp.WSMsgType.TEXT:
|
||||
await ws_to.send_str(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.BINARY:
|
||||
await ws_to.send_bytes(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.PING:
|
||||
await ws_to.ping()
|
||||
elif msg.type == aiohttp.WSMsgType.PONG:
|
||||
await ws_to.pong()
|
||||
elif ws_to.closed:
|
||||
await ws_to.close(code=ws_to.close_code, message=msg.extra)
|
||||
except RuntimeError:
|
||||
_LOGGER.warning("Ingress Websocket runtime error")
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
1
hassio/api/panel/chunk.0d97375a72c194aae1ac.js
Normal file
1
hassio/api/panel/chunk.0d97375a72c194aae1ac.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.0d97375a72c194aae1ac.js.gz
Normal file
BIN
hassio/api/panel/chunk.0d97375a72c194aae1ac.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.352e12b12df29c09b455.js
Normal file
1
hassio/api/panel/chunk.352e12b12df29c09b455.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.352e12b12df29c09b455.js.gz
Normal file
BIN
hassio/api/panel/chunk.352e12b12df29c09b455.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.3a63ad36bccf4ea567fa.js
Normal file
1
hassio/api/panel/chunk.3a63ad36bccf4ea567fa.js
Normal file
@@ -0,0 +1 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{102:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(121),i=t.n(e),o=t(123),u=t.n(o),a=i.a,c=u.a}}]);
|
BIN
hassio/api/panel/chunk.3a63ad36bccf4ea567fa.js.gz
Normal file
BIN
hassio/api/panel/chunk.3a63ad36bccf4ea567fa.js.gz
Normal file
Binary file not shown.
3
hassio/api/panel/chunk.510634470d399e194ace.js
Normal file
3
hassio/api/panel/chunk.510634470d399e194ace.js
Normal file
File diff suppressed because one or more lines are too long
10
hassio/api/panel/chunk.510634470d399e194ace.js.LICENSE
Normal file
10
hassio/api/panel/chunk.510634470d399e194ace.js.LICENSE
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.510634470d399e194ace.js.gz
Normal file
BIN
hassio/api/panel/chunk.510634470d399e194ace.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.510634470d399e194ace.js.map
Normal file
1
hassio/api/panel/chunk.510634470d399e194ace.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.510634470d399e194ace.js","sourceRoot":""}
|
3
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js
Normal file
3
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js
Normal file
File diff suppressed because one or more lines are too long
21
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.LICENSE
Normal file
21
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.gz
Normal file
BIN
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.map
Normal file
1
hassio/api/panel/chunk.564a2f7b1c38ddaa4ce0.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.564a2f7b1c38ddaa4ce0.js","sourceRoot":""}
|
1
hassio/api/panel/chunk.5d31a1778f717ac8b063.js
Normal file
1
hassio/api/panel/chunk.5d31a1778f717ac8b063.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.5d31a1778f717ac8b063.js.gz
Normal file
BIN
hassio/api/panel/chunk.5d31a1778f717ac8b063.js.gz
Normal file
Binary file not shown.
3
hassio/api/panel/chunk.659084fef4e3b7b66a76.js
Normal file
3
hassio/api/panel/chunk.659084fef4e3b7b66a76.js
Normal file
File diff suppressed because one or more lines are too long
32
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.LICENSE
Normal file
32
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.LICENSE
Normal file
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @fileoverview
|
||||
* @suppress {checkPrototypalTypes}
|
||||
* @license Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt The complete set of authors may be found
|
||||
* at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
|
||||
* be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
|
||||
* Google as part of the polymer project is also subject to an additional IP
|
||||
* rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.gz
Normal file
BIN
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.map
Normal file
1
hassio/api/panel/chunk.659084fef4e3b7b66a76.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.659084fef4e3b7b66a76.js","sourceRoot":""}
|
3
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js
Normal file
3
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js
Normal file
File diff suppressed because one or more lines are too long
31
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.LICENSE
Normal file
31
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.LICENSE
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.gz
Normal file
BIN
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.map
Normal file
1
hassio/api/panel/chunk.6e9c87e51920a9c354e5.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.6e9c87e51920a9c354e5.js","sourceRoot":""}
|
1
hassio/api/panel/chunk.739b67c99ab56cdbd75d.js
Normal file
1
hassio/api/panel/chunk.739b67c99ab56cdbd75d.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.739b67c99ab56cdbd75d.js.gz
Normal file
BIN
hassio/api/panel/chunk.739b67c99ab56cdbd75d.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{100:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(89),i=t.n(e),o=t(91),u=t.n(o),a=i.a,c=u.a}}]);
|
||||
//# sourceMappingURL=chunk.9e3883f96f68b3ce89f5.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,IAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,IAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.9e3883f96f68b3ce89f5.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}
|
1
hassio/api/panel/chunk.a571dfa106202cc57af6.js
Normal file
1
hassio/api/panel/chunk.a571dfa106202cc57af6.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.a571dfa106202cc57af6.js.gz
Normal file
BIN
hassio/api/panel/chunk.a571dfa106202cc57af6.js.gz
Normal file
Binary file not shown.
3
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js
Normal file
3
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js
Normal file
File diff suppressed because one or more lines are too long
180
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.LICENSE
Normal file
180
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.LICENSE
Normal file
@@ -0,0 +1,180 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2019 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright 2018 Google Inc. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2016 Google Inc.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2018 Google Inc.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.gz
Normal file
BIN
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.map
Normal file
1
hassio/api/panel/chunk.a7e5fb452cd1b3a5faef.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.a7e5fb452cd1b3a5faef.js","sourceRoot":""}
|
1
hassio/api/panel/chunk.b3340b3df270d20af4a1.js
Normal file
1
hassio/api/panel/chunk.b3340b3df270d20af4a1.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.b3340b3df270d20af4a1.js.gz
Normal file
BIN
hassio/api/panel/chunk.b3340b3df270d20af4a1.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,820 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
3
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js
Normal file
3
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js
Normal file
File diff suppressed because one or more lines are too long
10
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.LICENSE
Normal file
10
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.LICENSE
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.gz
Normal file
BIN
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.map
Normal file
1
hassio/api/panel/chunk.f15d7f41c0d302cbbc7a.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.f15d7f41c0d302cbbc7a.js","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
@@ -1,471 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +1 @@
|
||||
!function(e){function n(n){for(var t,o,i=n[0],u=n[1],c=0,f=[];c<i.length;c++)o=i[c],r[o]&&f.push(r[o][0]),r[o]=0;for(t in u)Object.prototype.hasOwnProperty.call(u,t)&&(e[t]=u[t]);for(a&&a(n);f.length;)f.shift()()}var t={},r={1:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var u,c=document.getElementsByTagName("head")[0],a=document.createElement("script");a.charset="utf-8",a.timeout=120,o.nc&&a.setAttribute("nonce",o.nc),a.src=function(e){return o.p+"chunk."+{0:"f32f3c841cc3e1d081f7",2:"8c049a124b9397e54c16",3:"d0eb7b86b775838caf5e",4:"9e3883f96f68b3ce89f5",5:"0cb8b788b03dcc48da14",6:"c1ac97370d72bce0a835",7:"0853908528652fbc5d4f"}[e]+".js"}(e),u=function(n){a.onerror=a.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,t[1](u)}r[e]=void 0}};var f=setTimeout(function(){u({type:"timeout",target:a})},12e4);a.onerror=a.onload=u,c.appendChild(a)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=n,i=i.slice();for(var c=0;c<i.length;c++)n(i[c]);var a=u;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(2)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(6),t.e(3)]).then(t.bind(null,1))})}]);
|
||||
//# sourceMappingURL=entrypoint.js.map
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,u=[];c<a.length;c++)o=a[c],r[o]&&u.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(f&&f(n);u.length;)u.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"564a2f7b1c38ddaa4ce0",1:"659084fef4e3b7b66a76",2:"510634470d399e194ace",3:"f15d7f41c0d302cbbc7a",5:"5d31a1778f717ac8b063",6:"0d97375a72c194aae1ac",7:"3a63ad36bccf4ea567fa",8:"a571dfa106202cc57af6",9:"a7e5fb452cd1b3a5faef",10:"b3340b3df270d20af4a1",11:"6e9c87e51920a9c354e5",12:"352e12b12df29c09b455",13:"739b67c99ab56cdbd75d"}[e]+".js"}(e),i=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var f=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var f=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(9),t.e(6)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
Binary file not shown.
@@ -35,7 +35,7 @@ class APIProxy(CoreSysAttributes):
|
||||
elif not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Not permitted API access: %s", addon.slug)
|
||||
else:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
_LOGGER.debug("%s access from %s", request.path, addon.slug)
|
||||
return
|
||||
|
||||
raise HTTPUnauthorized()
|
||||
|
@@ -6,12 +6,19 @@ from aiohttp.web import middleware
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized, HTTPForbidden
|
||||
|
||||
from ..const import (
|
||||
HEADER_TOKEN, REQUEST_FROM, ROLE_ADMIN, ROLE_DEFAULT, ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER, ROLE_BACKUP)
|
||||
HEADER_TOKEN,
|
||||
REQUEST_FROM,
|
||||
ROLE_ADMIN,
|
||||
ROLE_DEFAULT,
|
||||
ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER,
|
||||
ROLE_BACKUP,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# fmt: off
|
||||
|
||||
# Block Anytime
|
||||
BLACKLIST = re.compile(
|
||||
@@ -65,7 +72,7 @@ ADDONS_ROLE_ACCESS = {
|
||||
r"|/hardware/.+"
|
||||
r"|/hassos/.+"
|
||||
r"|/supervisor/.+"
|
||||
r"|/addons(?:/[^/]+/(?!security).+)?"
|
||||
r"|/addons(?:/[^/]+/(?!security).+|/reload)?"
|
||||
r"|/snapshots.*"
|
||||
r")$"
|
||||
),
|
||||
@@ -74,6 +81,8 @@ ADDONS_ROLE_ACCESS = {
|
||||
),
|
||||
}
|
||||
|
||||
# fmt: off
|
||||
|
||||
|
||||
class SecurityMiddleware(CoreSysAttributes):
|
||||
"""Security middleware functions."""
|
||||
@@ -104,9 +113,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Home-Assistant
|
||||
# UUID check need removed with 131
|
||||
if hassio_token in (self.sys_homeassistant.uuid,
|
||||
self.sys_homeassistant.hassio_token):
|
||||
if hassio_token == self.sys_homeassistant.hassio_token:
|
||||
_LOGGER.debug("%s access from Home Assistant", request.path)
|
||||
request_from = self.sys_homeassistant
|
||||
|
||||
|
@@ -1,34 +1,59 @@
|
||||
"""Init file for Hass.io Supervisor RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||
ATTR_ADDONS,
|
||||
ATTR_ADDONS_REPOSITORIES,
|
||||
ATTR_ARCH,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_ICON,
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_LOGGING,
|
||||
ATTR_LOGO,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SLUG,
|
||||
ATTR_STATE,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
CONTENT_TYPE_BINARY,
|
||||
HASSIO_VERSION,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import WAIT_BOOT, REPOSITORIES, CHANNELS
|
||||
from ..exceptions import APIError
|
||||
from ..utils.validate import validate_timezone
|
||||
from ..validate import CHANNELS, LOG_LEVEL, REPOSITORIES, WAIT_BOOT
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
})
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
vol.Optional(ATTR_LOGGING): LOG_LEVEL,
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APISupervisor(CoreSysAttributes):
|
||||
@@ -40,36 +65,40 @@ class APISupervisor(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self.sys_addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
list_addons.append(
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_ARCH: self.sys_supervisor.arch,
|
||||
ATTR_IP_ADDRESS: str(self.sys_supervisor.ip_address),
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
ATTR_LOGGING: self.sys_config.logging,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set Supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
@@ -82,20 +111,20 @@ class APISupervisor(CoreSysAttributes):
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_LOGGING in body:
|
||||
self.sys_config.logging = body[ATTR_LOGGING]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.sys_addons.load_repositories(new))
|
||||
|
||||
self.sys_updater.save_data()
|
||||
self.sys_config.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_supervisor.stats()
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
@@ -108,31 +137,21 @@ class APISupervisor(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
|
||||
|
||||
if version == self.sys_supervisor.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(self.sys_supervisor.update(version))
|
||||
await asyncio.shield(self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload add-ons, configuration, etc."""
|
||||
tasks = [
|
||||
self.sys_updater.reload(),
|
||||
]
|
||||
results, _ = await asyncio.shield(asyncio.wait(tasks))
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
raise APIError("Some reload task fails!")
|
||||
|
||||
return True
|
||||
return asyncio.shield(self.sys_updater.reload())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return supervisor Docker logs."""
|
||||
return self.sys_supervisor.logs()
|
||||
|
@@ -3,22 +3,27 @@
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi2": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"tinker": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"odroid-c2": [
|
||||
"aarch64"
|
||||
],
|
||||
"odroid-xu": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"orangepi-prime": [
|
||||
|
@@ -1,11 +1,10 @@
|
||||
"""Handle Arch for underlay maschine/platforms."""
|
||||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
from .coresys import CoreSysAttributes, CoreSys
|
||||
from .exceptions import HassioArchNotFound
|
||||
from .exceptions import HassioArchNotFound, JsonFileError
|
||||
from .utils.json import read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -38,10 +37,9 @@ class CpuArch(CoreSysAttributes):
|
||||
async def load(self) -> None:
|
||||
"""Load data and initialize default arch."""
|
||||
try:
|
||||
arch_file = Path(__file__).parent.joinpath("arch.json")
|
||||
arch_data = read_json_file(arch_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read arch json: %s", err)
|
||||
arch_data = read_json_file(Path(__file__).parent.joinpath("arch.json"))
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read arch json")
|
||||
return
|
||||
|
||||
# Evaluate current CPU/Platform
|
||||
|
@@ -19,6 +19,7 @@ from .discovery import Discovery
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
@@ -49,6 +50,7 @@ async def initialize_coresys():
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.ingress = Ingress(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
@@ -71,8 +73,9 @@ def initialize_system_data(coresys):
|
||||
|
||||
# Home Assistant configuration folder
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info("Create Home Assistant configuration folder %s",
|
||||
config.path_homeassistant)
|
||||
_LOGGER.info(
|
||||
"Create Home Assistant configuration folder %s", config.path_homeassistant
|
||||
)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
@@ -82,18 +85,19 @@ def initialize_system_data(coresys):
|
||||
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s",
|
||||
config.path_addons_data)
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s", config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on local repository folder %s",
|
||||
config.path_addons_local)
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on local repository folder %s", config.path_addons_local
|
||||
)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on git repositories folder %s",
|
||||
config.path_addons_git)
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on git repositories folder %s", config.path_addons_git
|
||||
)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
@@ -154,7 +158,8 @@ def initialize_logging():
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def check_environment():
|
||||
@@ -188,19 +193,16 @@ def check_environment():
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM and SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGTERM,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGHUP,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGINT,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
@@ -8,15 +8,21 @@ from pathlib import Path, PurePath
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||
FILE_HASSIO_CONFIG,
|
||||
HASSIO_DATA,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_LOGGING,
|
||||
)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HOMEASSISTANT_CONFIG = PurePath('homeassistant')
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
|
||||
@@ -45,7 +51,7 @@ class CoreConfig(JsonConfig):
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
config_file = Path(self.path_homeassistant, 'configuration.yaml')
|
||||
config_file = Path(self.path_homeassistant, "configuration.yaml")
|
||||
try:
|
||||
assert config_file.exists()
|
||||
configuration = config_file.read_text()
|
||||
@@ -53,7 +59,7 @@ class CoreConfig(JsonConfig):
|
||||
data = RE_TIMEZONE.search(configuration)
|
||||
assert data
|
||||
|
||||
timezone = data.group('timezone')
|
||||
timezone = data.group("timezone")
|
||||
pytz.timezone(timezone)
|
||||
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
|
||||
_LOGGER.debug("Can't parse Home Assistant timezone")
|
||||
@@ -67,15 +73,25 @@ class CoreConfig(JsonConfig):
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
def wait_boot(self) -> int:
|
||||
"""Return wait time for auto boot stages."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
def wait_boot(self, value: int):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def logging(self) -> str:
|
||||
"""Return log level of system."""
|
||||
return self._data[ATTR_LOGGING]
|
||||
|
||||
@logging.setter
|
||||
def logging(self, value: str):
|
||||
"""Set system log level."""
|
||||
self._data[ATTR_LOGGING] = value
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
"""Return last boot datetime."""
|
||||
@@ -99,7 +115,7 @@ class CoreConfig(JsonConfig):
|
||||
@property
|
||||
def path_extern_hassio(self):
|
||||
"""Return Hass.io data path external for Docker."""
|
||||
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
||||
return PurePath(os.environ["SUPERVISOR_SHARE"])
|
||||
|
||||
@property
|
||||
def path_extern_homeassistant(self):
|
||||
|
@@ -2,14 +2,17 @@
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = "145"
|
||||
|
||||
HASSIO_VERSION = "158"
|
||||
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
URL_HASSIO_APPARMOR = "https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
||||
|
||||
URL_HASSOS_OTA = ("https://github.com/home-assistant/hassos/releases/download/"
|
||||
"{version}/hassos_{board}-{version}.raucb")
|
||||
URL_HASSOS_OTA = (
|
||||
"https://github.com/home-assistant/hassos/releases/download/"
|
||||
"{version}/hassos_{board}-{version}.raucb"
|
||||
)
|
||||
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
@@ -20,6 +23,7 @@ FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
||||
FILE_HASSIO_INGRESS = Path(HASSIO_DATA, "ingress.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
|
||||
@@ -49,8 +53,9 @@ CONTENT_TYPE_JSON = "application/json"
|
||||
CONTENT_TYPE_TEXT = "text/plain"
|
||||
CONTENT_TYPE_TAR = "application/tar"
|
||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||
HEADER_HA_ACCESS = "x-ha-access"
|
||||
HEADER_TOKEN = "x-hassio-key"
|
||||
HEADER_HA_ACCESS = "X-Ha-Access"
|
||||
HEADER_TOKEN = "X-Hassio-Key"
|
||||
COOKIE_INGRESS = "ingress_session"
|
||||
|
||||
ENV_TOKEN = "HASSIO_TOKEN"
|
||||
ENV_TIME = "TZ"
|
||||
@@ -62,6 +67,7 @@ ATTR_WAIT_BOOT = "wait_boot"
|
||||
ATTR_DEPLOYMENT = "deployment"
|
||||
ATTR_WATCHDOG = "watchdog"
|
||||
ATTR_CHANGELOG = "changelog"
|
||||
ATTR_LOGGING = "logging"
|
||||
ATTR_DATE = "date"
|
||||
ATTR_ARCH = "arch"
|
||||
ATTR_LONG_DESCRIPTION = "long_description"
|
||||
@@ -87,6 +93,7 @@ ATTR_DESCRIPTON = "description"
|
||||
ATTR_STARTUP = "startup"
|
||||
ATTR_BOOT = "boot"
|
||||
ATTR_PORTS = "ports"
|
||||
ATTR_PORTS_DESCRIPTION = "ports_description"
|
||||
ATTR_PORT = "port"
|
||||
ATTR_SSL = "ssl"
|
||||
ATTR_MAP = "map"
|
||||
@@ -117,6 +124,7 @@ ATTR_HOST_PID = "host_pid"
|
||||
ATTR_HOST_IPC = "host_ipc"
|
||||
ATTR_HOST_DBUS = "host_dbus"
|
||||
ATTR_NETWORK = "network"
|
||||
ATTR_NETWORK_DESCRIPTION = "network_description"
|
||||
ATTR_TMPFS = "tmpfs"
|
||||
ATTR_PRIVILEGED = "privileged"
|
||||
ATTR_USER = "user"
|
||||
@@ -157,7 +165,6 @@ ATTR_ADDON = "addon"
|
||||
ATTR_AVAILABLE = "available"
|
||||
ATTR_HOST = "host"
|
||||
ATTR_USERNAME = "username"
|
||||
ATTR_PROTOCOL = "protocol"
|
||||
ATTR_DISCOVERY = "discovery"
|
||||
ATTR_CONFIG = "config"
|
||||
ATTR_SERVICES = "services"
|
||||
@@ -186,8 +193,14 @@ ATTR_SUPERVISOR = "supervisor"
|
||||
ATTR_AUTH_API = "auth_api"
|
||||
ATTR_KERNEL_MODULES = "kernel_modules"
|
||||
ATTR_SUPPORTED_ARCH = "supported_arch"
|
||||
ATTR_INGRESS = "ingress"
|
||||
ATTR_INGRESS_PORT = "ingress_port"
|
||||
ATTR_INGRESS_ENTRY = "ingress_entry"
|
||||
ATTR_INGRESS_TOKEN = "ingress_token"
|
||||
ATTR_INGRESS_URL = "ingress_url"
|
||||
ATTR_IP_ADDRESS = "ip_address"
|
||||
ATTR_SESSION = "session"
|
||||
|
||||
SERVICE_MQTT = "mqtt"
|
||||
PROVIDE_SERVICE = "provide"
|
||||
NEED_SERVICE = "need"
|
||||
WANT_SERVICE = "want"
|
||||
@@ -199,8 +212,11 @@ STARTUP_APPLICATION = "application"
|
||||
STARTUP_ONCE = "once"
|
||||
|
||||
STARTUP_ALL = [
|
||||
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION
|
||||
STARTUP_ONCE,
|
||||
STARTUP_INITIALIZE,
|
||||
STARTUP_SYSTEM,
|
||||
STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION,
|
||||
]
|
||||
|
||||
BOOT_AUTO = "auto"
|
||||
@@ -281,13 +297,7 @@ ROLE_BACKUP = "backup"
|
||||
ROLE_MANAGER = "manager"
|
||||
ROLE_ADMIN = "admin"
|
||||
|
||||
ROLE_ALL = [
|
||||
ROLE_DEFAULT,
|
||||
ROLE_HOMEASSISTANT,
|
||||
ROLE_BACKUP,
|
||||
ROLE_MANAGER,
|
||||
ROLE_ADMIN,
|
||||
]
|
||||
ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN]
|
||||
|
||||
CHAN_ID = "chan_id"
|
||||
CHAN_TYPE = "chan_type"
|
||||
|
@@ -6,8 +6,12 @@ import logging
|
||||
import async_timeout
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION,
|
||||
STARTUP_INITIALIZE)
|
||||
from .const import (
|
||||
STARTUP_SYSTEM,
|
||||
STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_INITIALIZE,
|
||||
)
|
||||
from .exceptions import HassioError, HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -58,6 +62,9 @@ class HassIO(CoreSysAttributes):
|
||||
# Load discovery
|
||||
await self.sys_discovery.load()
|
||||
|
||||
# Load ingress
|
||||
await self.sys_ingress.load()
|
||||
|
||||
# start dns forwarding
|
||||
self.sys_create_task(self.sys_dns.start())
|
||||
|
||||
@@ -108,7 +115,7 @@ class HassIO(CoreSysAttributes):
|
||||
await self.sys_tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.sys_homeassistant.version == 'landingpage':
|
||||
if self.sys_homeassistant.version == "landingpage":
|
||||
self.sys_create_task(self.sys_homeassistant.install())
|
||||
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
@@ -121,12 +128,15 @@ class HassIO(CoreSysAttributes):
|
||||
# process async stop tasks
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await asyncio.wait([
|
||||
self.sys_api.stop(),
|
||||
self.sys_dns.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close()
|
||||
])
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_api.stop(),
|
||||
self.sys_dns.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close(),
|
||||
self.sys_ingress.unload(),
|
||||
]
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Force Shutdown!")
|
||||
|
||||
|
@@ -23,6 +23,7 @@ if TYPE_CHECKING:
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
@@ -63,6 +64,7 @@ class CoreSys:
|
||||
self._snapshots: SnapshotManager = None
|
||||
self._tasks: Tasks = None
|
||||
self._host: HostManager = None
|
||||
self._ingress: Ingress = None
|
||||
self._dbus: DBusManager = None
|
||||
self._hassos: HassOS = None
|
||||
self._services: ServiceManager = None
|
||||
@@ -293,6 +295,18 @@ class CoreSys:
|
||||
raise RuntimeError("HostManager already set!")
|
||||
self._host = value
|
||||
|
||||
@property
|
||||
def ingress(self) -> Ingress:
|
||||
"""Return Ingress object."""
|
||||
return self._ingress
|
||||
|
||||
@ingress.setter
|
||||
def ingress(self, value: Ingress):
|
||||
"""Set a Ingress object."""
|
||||
if self._ingress:
|
||||
raise RuntimeError("Ingress already set!")
|
||||
self._ingress = value
|
||||
|
||||
@property
|
||||
def hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
@@ -441,6 +455,11 @@ class CoreSysAttributes:
|
||||
"""Return HostManager object."""
|
||||
return self.coresys.host
|
||||
|
||||
@property
|
||||
def sys_ingress(self) -> Ingress:
|
||||
"""Return Ingress object."""
|
||||
return self.coresys.ingress
|
||||
|
||||
@property
|
||||
def sys_hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
|
@@ -1,35 +1,50 @@
|
||||
"""Handle discover message for Home Assistant."""
|
||||
import logging
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from uuid import uuid4
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional, TYPE_CHECKING
|
||||
from uuid import uuid4, UUID
|
||||
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .const import FILE_HASSIO_DISCOVERY, ATTR_CONFIG, ATTR_DISCOVERY
|
||||
from .coresys import CoreSysAttributes
|
||||
from .exceptions import DiscoveryError, HomeAssistantAPIError
|
||||
from .validate import SCHEMA_DISCOVERY_CONFIG
|
||||
from .utils.json import JsonConfig
|
||||
from .services.validate import DISCOVERY_SERVICES
|
||||
from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DiscoveryError, HomeAssistantAPIError
|
||||
from ..utils.json import JsonConfig
|
||||
from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..addons.addon import Addon
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CMD_NEW = 'post'
|
||||
CMD_DEL = 'delete'
|
||||
CMD_NEW = "post"
|
||||
CMD_DEL = "delete"
|
||||
|
||||
|
||||
@attr.s
|
||||
class Message:
|
||||
"""Represent a single Discovery message."""
|
||||
|
||||
addon: str = attr.ib()
|
||||
service: str = attr.ib()
|
||||
config: Dict[str, Any] = attr.ib(cmp=False)
|
||||
uuid: UUID = attr.ib(factory=lambda: uuid4().hex, cmp=False)
|
||||
|
||||
|
||||
class Discovery(CoreSysAttributes, JsonConfig):
|
||||
"""Home Assistant Discovery handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize discovery handler."""
|
||||
super().__init__(FILE_HASSIO_DISCOVERY, SCHEMA_DISCOVERY_CONFIG)
|
||||
self.coresys = coresys
|
||||
self.message_obj = {}
|
||||
self.coresys: CoreSys = coresys
|
||||
self.message_obj: Dict[str, Message] = {}
|
||||
|
||||
async def load(self):
|
||||
async def load(self) -> None:
|
||||
"""Load exists discovery message into storage."""
|
||||
messages = {}
|
||||
for message in self._data[ATTR_DISCOVERY]:
|
||||
@@ -39,9 +54,9 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
||||
_LOGGER.info("Load %d messages", len(messages))
|
||||
self.message_obj = messages
|
||||
|
||||
def save(self):
|
||||
def save(self) -> None:
|
||||
"""Write discovery message into data file."""
|
||||
messages = []
|
||||
messages: List[Dict[str, Any]] = []
|
||||
for message in self.list_messages:
|
||||
messages.append(attr.asdict(message))
|
||||
|
||||
@@ -49,22 +64,21 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
||||
self._data[ATTR_DISCOVERY].extend(messages)
|
||||
self.save_data()
|
||||
|
||||
def get(self, uuid):
|
||||
def get(self, uuid: str) -> Optional[Message]:
|
||||
"""Return discovery message."""
|
||||
return self.message_obj.get(uuid)
|
||||
|
||||
@property
|
||||
def list_messages(self):
|
||||
def list_messages(self) -> List[Message]:
|
||||
"""Return list of available discovery messages."""
|
||||
return list(self.message_obj.values())
|
||||
|
||||
def send(self, addon, service, config):
|
||||
def send(self, addon: Addon, service: str, config: Dict[str, Any]) -> Message:
|
||||
"""Send a discovery message to Home Assistant."""
|
||||
try:
|
||||
config = DISCOVERY_SERVICES[service](config)
|
||||
config = valid_discovery_config(service, config)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error(
|
||||
"Invalid discovery %s config", humanize_error(config, err))
|
||||
_LOGGER.error("Invalid discovery %s config", humanize_error(config, err))
|
||||
raise DiscoveryError() from None
|
||||
|
||||
# Create message
|
||||
@@ -77,24 +91,26 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
||||
_LOGGER.info("Duplicate discovery message from %s", addon.slug)
|
||||
return old_message
|
||||
|
||||
_LOGGER.info("Send discovery to Home Assistant %s from %s",
|
||||
service, addon.slug)
|
||||
_LOGGER.info("Send discovery to Home Assistant %s from %s", service, addon.slug)
|
||||
self.message_obj[message.uuid] = message
|
||||
self.save()
|
||||
|
||||
self.sys_create_task(self._push_discovery(message, CMD_NEW))
|
||||
return message
|
||||
|
||||
def remove(self, message):
|
||||
def remove(self, message: Message) -> None:
|
||||
"""Remove a discovery message from Home Assistant."""
|
||||
self.message_obj.pop(message.uuid, None)
|
||||
self.save()
|
||||
|
||||
_LOGGER.info("Delete discovery to Home Assistant %s from %s",
|
||||
message.service, message.addon)
|
||||
_LOGGER.info(
|
||||
"Delete discovery to Home Assistant %s from %s",
|
||||
message.service,
|
||||
message.addon,
|
||||
)
|
||||
self.sys_create_task(self._push_discovery(message, CMD_DEL))
|
||||
|
||||
async def _push_discovery(self, message, command):
|
||||
async def _push_discovery(self, message: Message, command: str) -> None:
|
||||
"""Send a discovery request."""
|
||||
if not await self.sys_homeassistant.check_api_state():
|
||||
_LOGGER.info("Discovery %s mesage ignore", message.uuid)
|
||||
@@ -105,18 +121,12 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
||||
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.sys_homeassistant.make_request(
|
||||
command, f"api/hassio_push/discovery/{message.uuid}",
|
||||
json=data, timeout=10):
|
||||
command,
|
||||
f"api/hassio_push/discovery/{message.uuid}",
|
||||
json=data,
|
||||
timeout=10,
|
||||
):
|
||||
_LOGGER.info("Discovery %s message send", message.uuid)
|
||||
return
|
||||
|
||||
_LOGGER.warning("Discovery %s message fail", message.uuid)
|
||||
|
||||
|
||||
@attr.s
|
||||
class Message:
|
||||
"""Represent a single Discovery message."""
|
||||
addon = attr.ib()
|
||||
service = attr.ib()
|
||||
config = attr.ib(cmp=False)
|
||||
uuid = attr.ib(factory=lambda: uuid4().hex, cmp=False)
|
10
hassio/discovery/const.py
Normal file
10
hassio/discovery/const.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Discovery static data."""
|
||||
|
||||
ATTR_HOST = "host"
|
||||
ATTR_PASSWORD = "password"
|
||||
ATTR_PORT = "port"
|
||||
ATTR_PROTOCOL = "protocol"
|
||||
ATTR_SSL = "ssl"
|
||||
ATTR_USERNAME = "username"
|
||||
ATTR_API_KEY = "api_key"
|
||||
ATTR_SERIAL = "serial"
|
1
hassio/discovery/services/__init__.py
Normal file
1
hassio/discovery/services/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Discovery service modules."""
|
16
hassio/discovery/services/deconz.py
Normal file
16
hassio/discovery/services/deconz.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Discovery service for MQTT."""
|
||||
import voluptuous as vol
|
||||
|
||||
from hassio.validate import NETWORK_PORT
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT, ATTR_API_KEY, ATTR_SERIAL
|
||||
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_HOST): vol.Coerce(str),
|
||||
vol.Required(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Required(ATTR_SERIAL): vol.Coerce(str),
|
||||
vol.Required(ATTR_API_KEY): vol.Coerce(str),
|
||||
}
|
||||
)
|
27
hassio/discovery/services/mqtt.py
Normal file
27
hassio/discovery/services/mqtt.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Discovery service for MQTT."""
|
||||
import voluptuous as vol
|
||||
|
||||
from hassio.validate import NETWORK_PORT
|
||||
|
||||
from ..const import (
|
||||
ATTR_HOST,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_PROTOCOL,
|
||||
ATTR_SSL,
|
||||
ATTR_USERNAME,
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_HOST): vol.Coerce(str),
|
||||
vol.Required(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_USERNAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PROTOCOL, default="3.1.1"): vol.All(
|
||||
vol.Coerce(str), vol.In(["3.1", "3.1.1"])
|
||||
),
|
||||
}
|
||||
)
|
47
hassio/discovery/validate.py
Normal file
47
hassio/discovery/validate.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Validate services schema."""
|
||||
from pathlib import Path
|
||||
from importlib import import_module
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID
|
||||
from ..utils.validate import schema_or
|
||||
from ..validate import UUID_MATCH
|
||||
|
||||
|
||||
def valid_discovery_service(service):
|
||||
"""Validate service name."""
|
||||
service_file = Path(__file__).parent.joinpath(f"services/{service}.py")
|
||||
if not service_file.exists():
|
||||
raise vol.Invalid(f"Service {service} not found")
|
||||
return service
|
||||
|
||||
|
||||
def valid_discovery_config(service, config):
|
||||
"""Validate service name."""
|
||||
try:
|
||||
service_mod = import_module(f".services.{service}", "hassio.discovery")
|
||||
except ImportError:
|
||||
raise vol.Invalid(f"Service {service} not found")
|
||||
|
||||
return service_mod.SCHEMA(config)
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
[
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_UUID): UUID_MATCH,
|
||||
vol.Required(ATTR_ADDON): vol.Coerce(str),
|
||||
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||
vol.Required(ATTR_CONFIG): vol.Maybe(dict),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
SCHEMA_DISCOVERY_CONFIG = vol.Schema(
|
||||
{vol.Optional(ATTR_DISCOVERY, default=list): schema_or(SCHEMA_DISCOVERY)},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
@@ -1,12 +1,14 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import attr
|
||||
import docker
|
||||
|
||||
from .network import DockerNetwork
|
||||
from ..const import SOCKET_DOCKER
|
||||
from ..exceptions import DockerAPIError
|
||||
from .network import DockerNetwork
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,8 +16,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
@attr.s(frozen=True)
|
||||
class CommandReturn:
|
||||
"""Return object from command run."""
|
||||
exit_code = attr.ib()
|
||||
output = attr.ib()
|
||||
|
||||
exit_code: int = attr.ib()
|
||||
output: bytes = attr.ib()
|
||||
|
||||
|
||||
class DockerAPI:
|
||||
@@ -26,75 +29,87 @@ class DockerAPI:
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.docker = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
|
||||
version='auto', timeout=900)
|
||||
self.network = DockerNetwork(self.docker)
|
||||
self.docker: docker.DockerClient = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version="auto", timeout=900
|
||||
)
|
||||
self.network: DockerNetwork = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
def images(self):
|
||||
def images(self) -> docker.models.images.ImageCollection:
|
||||
"""Return API images."""
|
||||
return self.docker.images
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
def containers(self) -> docker.models.containers.ContainerCollection:
|
||||
"""Return API containers."""
|
||||
return self.docker.containers
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
def api(self) -> docker.APIClient:
|
||||
"""Return API containers."""
|
||||
return self.docker.api
|
||||
|
||||
def run(self, image, **kwargs):
|
||||
def run(
|
||||
self, image: str, **kwargs: Dict[str, Any]
|
||||
) -> docker.models.containers.Container:
|
||||
""""Create a Docker container and run it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
name = kwargs.get('name', image)
|
||||
network_mode = kwargs.get('network_mode')
|
||||
hostname = kwargs.get('hostname')
|
||||
name = kwargs.get("name", image)
|
||||
network_mode = kwargs.get("network_mode")
|
||||
hostname = kwargs.get("hostname")
|
||||
|
||||
# Setup network
|
||||
kwargs['dns_search'] = ["."]
|
||||
kwargs["dns_search"] = ["."]
|
||||
if network_mode:
|
||||
kwargs['dns'] = [str(self.network.supervisor)]
|
||||
kwargs['dns_opt'] = ["ndots:0"]
|
||||
kwargs["dns"] = [str(self.network.supervisor)]
|
||||
kwargs["dns_opt"] = ["ndots:0"]
|
||||
else:
|
||||
kwargs['network'] = None
|
||||
kwargs["network"] = None
|
||||
|
||||
# Create container
|
||||
try:
|
||||
container = self.docker.containers.create(
|
||||
image, use_config_proxy=False, **kwargs)
|
||||
image, use_config_proxy=False, **kwargs
|
||||
)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
# attach network
|
||||
# Attach network
|
||||
if not network_mode:
|
||||
alias = [hostname] if hostname else None
|
||||
if self.network.attach_container(container, alias=alias):
|
||||
self.network.detach_default_bridge(container)
|
||||
else:
|
||||
try:
|
||||
self.network.attach_container(container, alias=alias)
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||
else:
|
||||
with suppress(DockerAPIError):
|
||||
self.network.detach_default_bridge(container)
|
||||
|
||||
# run container
|
||||
# Run container
|
||||
try:
|
||||
container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", name, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
return True
|
||||
# Update metadata
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.reload()
|
||||
|
||||
def run_command(self, image, command=None, **kwargs):
|
||||
return container
|
||||
|
||||
def run_command(
|
||||
self, image: str, command: Optional[str] = None, **kwargs: Dict[str, Any]
|
||||
) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
stdout = kwargs.get('stdout', True)
|
||||
stderr = kwargs.get('stderr', True)
|
||||
stdout = kwargs.get("stdout", True)
|
||||
stderr = kwargs.get("stderr", True)
|
||||
|
||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||
try:
|
||||
@@ -112,11 +127,11 @@ class DockerAPI:
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command: %s", err)
|
||||
return CommandReturn(None, b"")
|
||||
raise DockerAPIError() from None
|
||||
|
||||
finally:
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return CommandReturn(result.get('StatusCode'), output)
|
||||
return CommandReturn(result.get("StatusCode"), output)
|
||||
|
@@ -1,15 +1,35 @@
|
||||
"""Init file for Hass.io add-on Docker object."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from ipaddress import IPv4Address, ip_address
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, Union, Awaitable
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE,
|
||||
ENV_TOKEN, ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..const import (
|
||||
ENV_TIME,
|
||||
ENV_TOKEN,
|
||||
MAP_ADDONS,
|
||||
MAP_BACKUP,
|
||||
MAP_CONFIG,
|
||||
MAP_SHARE,
|
||||
MAP_SSL,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..exceptions import DockerAPIError
|
||||
from ..utils import process_lock
|
||||
from .interface import DockerInterface
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..addons.addon import Addon
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,64 +39,77 @@ AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
class DockerAddon(DockerInterface):
|
||||
"""Docker Hass.io wrapper for Home Assistant."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize Docker Home Assistant wrapper."""
|
||||
super().__init__(coresys)
|
||||
self._id = slug
|
||||
self._id: str = slug
|
||||
|
||||
@property
|
||||
def addon(self):
|
||||
def addon(self) -> Addon:
|
||||
"""Return add-on of Docker image."""
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
def image(self) -> str:
|
||||
"""Return name of Docker image."""
|
||||
return self.addon.image
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP address of this container."""
|
||||
if self.addon.host_network:
|
||||
return self.sys_docker.network.gateway
|
||||
|
||||
# Extract IP-Address
|
||||
try:
|
||||
return ip_address(
|
||||
self._meta["NetworkSettings"]["Networks"]["hassio"]["IPAddress"])
|
||||
except (KeyError, TypeError, ValueError):
|
||||
return ip_address("0.0.0.0")
|
||||
|
||||
@property
|
||||
def timeout(self) -> int:
|
||||
"""Return timeout for Docker actions."""
|
||||
return self.addon.timeout
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
def version(self) -> str:
|
||||
"""Return version of Docker image."""
|
||||
if self.addon.legacy:
|
||||
return self.addon.version_installed
|
||||
return super().version
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
def arch(self) -> str:
|
||||
"""Return arch of Docker image."""
|
||||
if self.addon.legacy:
|
||||
return self.sys_arch.default
|
||||
return super().arch
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return name of Docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
return f"addon_{self.addon.slug}"
|
||||
|
||||
@property
|
||||
def ipc(self):
|
||||
def ipc(self) -> Optional[str]:
|
||||
"""Return the IPC namespace."""
|
||||
if self.addon.host_ipc:
|
||||
return 'host'
|
||||
return "host"
|
||||
return None
|
||||
|
||||
@property
|
||||
def full_access(self):
|
||||
def full_access(self) -> bool:
|
||||
"""Return True if full access is enabled."""
|
||||
return not self.addon.protected and self.addon.with_full_access
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
def hostname(self) -> str:
|
||||
"""Return slug/id of add-on."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
return self.addon.slug.replace("_", "-")
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
def environment(self) -> Dict[str, str]:
|
||||
"""Return environment for Docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
@@ -86,8 +119,7 @@ class DockerAddon(DockerInterface):
|
||||
if isinstance(value, (int, str)):
|
||||
addon_env[key] = value
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Can not set nested option %s as Docker env", key)
|
||||
_LOGGER.warning("Can not set nested option %s as Docker env", key)
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
@@ -96,7 +128,7 @@ class DockerAddon(DockerInterface):
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
def devices(self) -> List[str]:
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
@@ -113,9 +145,9 @@ class DockerAddon(DockerInterface):
|
||||
return devices or None
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
def ports(self) -> Optional[Dict[str, Union[str, int, None]]]:
|
||||
"""Filter None from add-on ports."""
|
||||
if not self.addon.ports:
|
||||
if self.addon.host_network or not self.addon.ports:
|
||||
return None
|
||||
|
||||
return {
|
||||
@@ -125,7 +157,7 @@ class DockerAddon(DockerInterface):
|
||||
}
|
||||
|
||||
@property
|
||||
def security_opt(self):
|
||||
def security_opt(self) -> List[str]:
|
||||
"""Controlling security options."""
|
||||
security = []
|
||||
|
||||
@@ -143,7 +175,7 @@ class DockerAddon(DockerInterface):
|
||||
return security
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
def tmpfs(self) -> Optional[Dict[str, str]]:
|
||||
"""Return tmpfs for Docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
@@ -151,156 +183,148 @@ class DockerAddon(DockerInterface):
|
||||
return None
|
||||
|
||||
@property
|
||||
def network_mapping(self):
|
||||
def network_mapping(self) -> Dict[str, str]:
|
||||
"""Return hosts mapping."""
|
||||
return {
|
||||
'homeassistant': self.sys_docker.network.gateway,
|
||||
'hassio': self.sys_docker.network.supervisor,
|
||||
"homeassistant": self.sys_docker.network.gateway,
|
||||
"hassio": self.sys_docker.network.supervisor,
|
||||
}
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
def network_mode(self) -> Optional[str]:
|
||||
"""Return network mode for add-on."""
|
||||
if self.addon.host_network:
|
||||
return 'host'
|
||||
return "host"
|
||||
return None
|
||||
|
||||
@property
|
||||
def pid_mode(self):
|
||||
def pid_mode(self) -> Optional[str]:
|
||||
"""Return PID mode for add-on."""
|
||||
if not self.addon.protected and self.addon.host_pid:
|
||||
return 'host'
|
||||
return "host"
|
||||
return None
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
def volumes(self) -> Dict[str, Dict[str, str]]:
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': "/data",
|
||||
'mode': 'rw'
|
||||
}
|
||||
}
|
||||
volumes = {str(self.addon.path_extern_data): {"bind": "/data", "mode": "rw"}}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
# setup config mappings
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
'bind': "/config",
|
||||
'mode': addon_mapping[MAP_CONFIG]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
"bind": "/config",
|
||||
"mode": addon_mapping[MAP_CONFIG],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
'bind': "/ssl",
|
||||
'mode': addon_mapping[MAP_SSL]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
"bind": "/ssl",
|
||||
"mode": addon_mapping[MAP_SSL],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
'bind': "/addons",
|
||||
'mode': addon_mapping[MAP_ADDONS]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
"bind": "/addons",
|
||||
"mode": addon_mapping[MAP_ADDONS],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
'bind': "/backup",
|
||||
'mode': addon_mapping[MAP_BACKUP]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
"bind": "/backup",
|
||||
"mode": addon_mapping[MAP_BACKUP],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_share): {
|
||||
'bind': "/share",
|
||||
'mode': addon_mapping[MAP_SHARE]
|
||||
volumes.update(
|
||||
{
|
||||
str(self.sys_config.path_extern_share): {
|
||||
"bind": "/share",
|
||||
"mode": addon_mapping[MAP_SHARE],
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
# Init other hardware mappings
|
||||
|
||||
# GPIO support
|
||||
if self.addon.with_gpio and self.sys_hardware.support_gpio:
|
||||
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
|
||||
volumes.update({
|
||||
gpio_path: {
|
||||
'bind': gpio_path,
|
||||
'mode': 'rw'
|
||||
},
|
||||
})
|
||||
volumes.update({gpio_path: {"bind": gpio_path, "mode": "rw"}})
|
||||
|
||||
# DeviceTree support
|
||||
if self.addon.with_devicetree:
|
||||
volumes.update({
|
||||
"/sys/firmware/devicetree/base": {
|
||||
'bind': "/device-tree",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
volumes.update(
|
||||
{
|
||||
"/sys/firmware/devicetree/base": {
|
||||
"bind": "/device-tree",
|
||||
"mode": "ro",
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
# Kernel Modules support
|
||||
if self.addon.with_kernel_modules:
|
||||
volumes.update({
|
||||
"/lib/modules": {
|
||||
'bind': "/lib/modules",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
volumes.update({"/lib/modules": {"bind": "/lib/modules", "mode": "ro"}})
|
||||
|
||||
# Docker API support
|
||||
if not self.addon.protected and self.addon.access_docker_api:
|
||||
volumes.update({
|
||||
"/var/run/docker.sock": {
|
||||
'bind': "/var/run/docker.sock",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
volumes.update(
|
||||
{"/var/run/docker.sock": {"bind": "/var/run/docker.sock", "mode": "ro"}}
|
||||
)
|
||||
|
||||
# Host D-Bus system
|
||||
if self.addon.host_dbus:
|
||||
volumes.update({
|
||||
"/var/run/dbus": {
|
||||
'bind': "/var/run/dbus",
|
||||
'mode': 'rw'
|
||||
}
|
||||
})
|
||||
volumes.update({"/var/run/dbus": {"bind": "/var/run/dbus", "mode": "rw"}})
|
||||
|
||||
# ALSA configuration
|
||||
if self.addon.with_audio:
|
||||
volumes.update({
|
||||
str(self.addon.path_extern_asound): {
|
||||
'bind': "/etc/asound.conf",
|
||||
'mode': 'ro'
|
||||
volumes.update(
|
||||
{
|
||||
str(self.addon.path_extern_asound): {
|
||||
"bind": "/etc/asound.conf",
|
||||
"mode": "ro",
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
def _run(self) -> None:
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return True
|
||||
return
|
||||
|
||||
# Security check
|
||||
if not self.addon.protected:
|
||||
_LOGGER.warning("%s run with disabled protected mode!",
|
||||
self.addon.name)
|
||||
_LOGGER.warning("%s run with disabled protected mode!", self.addon.name)
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
self._stop()
|
||||
|
||||
ret = self.sys_docker.run(
|
||||
# Create & Run container
|
||||
docker_container = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
@@ -318,25 +342,23 @@ class DockerAddon(DockerInterface):
|
||||
security_opt=self.security_opt,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs)
|
||||
tmpfs=self.tmpfs,
|
||||
)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image,
|
||||
self.version)
|
||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image, self.version)
|
||||
self._meta = docker_container.attrs
|
||||
|
||||
return ret
|
||||
|
||||
def _install(self, tag, image=None):
|
||||
def _install(self, tag: str, image: Optional[str] = None) -> None:
|
||||
"""Pull Docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
self._build(tag)
|
||||
else:
|
||||
super()._install(tag, image)
|
||||
|
||||
return super()._install(tag, image)
|
||||
|
||||
def _build(self, tag):
|
||||
def _build(self, tag: str) -> None:
|
||||
"""Build a Docker container.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -346,27 +368,27 @@ class DockerAddon(DockerInterface):
|
||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||
try:
|
||||
image, log = self.sys_docker.images.build(
|
||||
use_config_proxy=False, **build_env.get_docker_args(tag))
|
||||
use_config_proxy=False, **build_env.get_docker_args(tag)
|
||||
)
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||
image.tag(self.image, tag='latest')
|
||||
image.tag(self.image, tag="latest")
|
||||
|
||||
# Update meta data
|
||||
self._meta = image.attrs
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def export_image(self, path):
|
||||
def export_image(self, tar_file: Path) -> Awaitable[None]:
|
||||
"""Export current images into a tar file."""
|
||||
return self.sys_run_in_executor(self._export_image, path)
|
||||
return self.sys_run_in_executor(self._export_image, tar_file)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
def _export_image(self, tar_file: Path) -> None:
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -375,7 +397,7 @@ class DockerAddon(DockerInterface):
|
||||
image = self.sys_docker.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
try:
|
||||
@@ -384,17 +406,16 @@ class DockerAddon(DockerInterface):
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Export image %s done", self.image)
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def import_image(self, path, tag):
|
||||
def import_image(self, tar_file: Path, tag: str) -> Awaitable[None]:
|
||||
"""Import a tar file as image."""
|
||||
return self.sys_run_in_executor(self._import_image, path, tag)
|
||||
return self.sys_run_in_executor(self._import_image, tar_file, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
def _import_image(self, tar_file: Path, tag: str) -> None:
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -403,37 +424,38 @@ class DockerAddon(DockerInterface):
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.sys_docker.api.load_image(read_tar, quiet=True)
|
||||
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
docker_image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self._meta = image.attrs
|
||||
self._cleanup()
|
||||
return True
|
||||
self._meta = docker_image.attrs
|
||||
|
||||
with suppress(DockerAPIError):
|
||||
self._cleanup()
|
||||
|
||||
@process_lock
|
||||
def write_stdin(self, data):
|
||||
def write_stdin(self, data: bytes) -> Awaitable[None]:
|
||||
"""Write to add-on stdin."""
|
||||
return self.sys_run_in_executor(self._write_stdin, data)
|
||||
|
||||
def _write_stdin(self, data):
|
||||
def _write_stdin(self, data: bytes) -> None:
|
||||
"""Write to add-on stdin.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self._is_running():
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
try:
|
||||
# Load needed docker objects
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||
socket = container.attach_socket(params={"stdin": 1, "stream": 1})
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
try:
|
||||
# Write to stdin
|
||||
@@ -442,6 +464,4 @@ class DockerAddon(DockerInterface):
|
||||
socket.close()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
raise DockerAPIError() from None
|
||||
|
@@ -17,7 +17,7 @@ class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Return name of HassOS CLI image."""
|
||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
||||
|
||||
def _stop(self):
|
||||
def _stop(self, remove_container=True):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
@@ -33,5 +33,6 @@ class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info("Found HassOS CLI %s with version %s", self.image,
|
||||
self.version)
|
||||
_LOGGER.info(
|
||||
"Found HassOS CLI %s with version %s", self.image, self.version
|
||||
)
|
||||
|
@@ -1,14 +1,18 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from contextlib import suppress
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from typing import Awaitable
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..const import ENV_TOKEN, ENV_TIME, LABEL_MACHINE
|
||||
from ..const import ENV_TIME, ENV_TOKEN, LABEL_MACHINE
|
||||
from ..exceptions import DockerAPIError
|
||||
from .interface import CommandReturn, DockerInterface
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
HASS_DOCKER_NAME = "homeassistant"
|
||||
|
||||
|
||||
class DockerHomeAssistant(DockerInterface):
|
||||
@@ -17,8 +21,8 @@ class DockerHomeAssistant(DockerInterface):
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return machine of Home Assistant Docker image."""
|
||||
if self._meta and LABEL_MACHINE in self._meta['Config']['Labels']:
|
||||
return self._meta['Config']['Labels'][LABEL_MACHINE]
|
||||
if self._meta and LABEL_MACHINE in self._meta["Config"]["Labels"]:
|
||||
return self._meta["Config"]["Labels"][LABEL_MACHINE]
|
||||
return None
|
||||
|
||||
@property
|
||||
@@ -39,18 +43,25 @@ class DockerHomeAssistant(DockerInterface):
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
return devices or None
|
||||
|
||||
def _run(self):
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP address of this container."""
|
||||
return self.sys_docker.network.gateway
|
||||
|
||||
def _run(self) -> None:
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return False
|
||||
return
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
self._stop()
|
||||
|
||||
ret = self.sys_docker.run(
|
||||
# Create & Run container
|
||||
docker_container = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
@@ -58,29 +69,29 @@ class DockerHomeAssistant(DockerInterface):
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
network_mode="host",
|
||||
environment={
|
||||
'HASSIO': self.sys_docker.network.supervisor,
|
||||
"HASSIO": self.sys_docker.network.supervisor,
|
||||
ENV_TIME: self.sys_timezone,
|
||||
ENV_TOKEN: self.sys_homeassistant.hassio_token,
|
||||
},
|
||||
volumes={
|
||||
str(self.sys_config.path_extern_homeassistant):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.sys_config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.sys_config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
}
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
"bind": "/config",
|
||||
"mode": "rw",
|
||||
},
|
||||
str(self.sys_config.path_extern_ssl): {"bind": "/ssl", "mode": "ro"},
|
||||
str(self.sys_config.path_extern_share): {
|
||||
"bind": "/share",
|
||||
"mode": "rw",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start homeassistant %s with version %s",
|
||||
self.image, self.version)
|
||||
_LOGGER.info("Start homeassistant %s with version %s", self.image, self.version)
|
||||
self._meta = docker_container.attrs
|
||||
|
||||
return ret
|
||||
|
||||
def _execute_command(self, command):
|
||||
def _execute_command(self, command: str) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -94,31 +105,37 @@ class DockerHomeAssistant(DockerInterface):
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={
|
||||
ENV_TIME: self.sys_timezone,
|
||||
},
|
||||
environment={ENV_TIME: self.sys_timezone},
|
||||
volumes={
|
||||
str(self.sys_config.path_extern_homeassistant):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.sys_config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.sys_config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'ro'},
|
||||
}
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
"bind": "/config",
|
||||
"mode": "rw",
|
||||
},
|
||||
str(self.sys_config.path_extern_ssl): {"bind": "/ssl", "mode": "ro"},
|
||||
str(self.sys_config.path_extern_share): {
|
||||
"bind": "/share",
|
||||
"mode": "ro",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def is_initialize(self):
|
||||
def is_initialize(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker container exists."""
|
||||
return self.sys_run_in_executor(self._is_initialize)
|
||||
|
||||
def _is_initialize(self):
|
||||
def _is_initialize(self) -> bool:
|
||||
"""Return True if docker container exists.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.sys_docker.containers.get(self.name)
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# we run on an old image, stop and start it
|
||||
if docker_container.image.id != docker_image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -2,13 +2,16 @@
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any, Dict, Optional, Awaitable
|
||||
|
||||
import docker
|
||||
|
||||
from .stats import DockerStats
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..const import LABEL_ARCH, LABEL_VERSION
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DockerAPIError
|
||||
from ..utils import process_lock
|
||||
from .stats import DockerStats
|
||||
from . import CommandReturn
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -16,60 +19,60 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerInterface(CoreSysAttributes):
|
||||
"""Docker Hass.io interface."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self._meta = None
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
self.coresys: CoreSys = coresys
|
||||
self._meta: Optional[Dict[str, Any]] = None
|
||||
self.lock: asyncio.Lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
def timeout(self) -> str:
|
||||
"""Return timeout for Docker actions."""
|
||||
return 30
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> Optional[str]:
|
||||
"""Return name of Docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def meta_config(self):
|
||||
def meta_config(self) -> Dict[str, Any]:
|
||||
"""Return meta data of configuration for container/image."""
|
||||
if not self._meta:
|
||||
return {}
|
||||
return self._meta.get('Config', {})
|
||||
return self._meta.get("Config", {})
|
||||
|
||||
@property
|
||||
def meta_labels(self):
|
||||
def meta_labels(self) -> Dict[str, str]:
|
||||
"""Return meta data of labels for container/image."""
|
||||
return self.meta_config.get('Labels') or {}
|
||||
return self.meta_config.get("Labels") or {}
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
def image(self) -> Optional[str]:
|
||||
"""Return name of Docker image."""
|
||||
return self.meta_config.get('Image')
|
||||
return self.meta_config.get("Image")
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
def version(self) -> Optional[str]:
|
||||
"""Return version of Docker image."""
|
||||
return self.meta_labels.get(LABEL_VERSION)
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
def arch(self) -> Optional[str]:
|
||||
"""Return arch of Docker image."""
|
||||
return self.meta_labels.get(LABEL_ARCH)
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
def in_progress(self) -> bool:
|
||||
"""Return True if a task is in progress."""
|
||||
return self.lock.locked()
|
||||
|
||||
@process_lock
|
||||
def install(self, tag, image=None):
|
||||
def install(self, tag: str, image: Optional[str] = None):
|
||||
"""Pull docker image."""
|
||||
return self.sys_run_in_executor(self._install, tag, image)
|
||||
|
||||
def _install(self, tag, image=None):
|
||||
def _install(self, tag: str, image: Optional[str] = None) -> None:
|
||||
"""Pull Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -80,20 +83,19 @@ class DockerInterface(CoreSysAttributes):
|
||||
_LOGGER.info("Pull image %s tag %s.", image, tag)
|
||||
docker_image = self.sys_docker.images.pull(f"{image}:{tag}")
|
||||
|
||||
docker_image.tag(image, tag='latest')
|
||||
self._meta = docker_image.attrs
|
||||
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
|
||||
docker_image.tag(image, tag="latest")
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", image, tag, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
else:
|
||||
self._meta = docker_image.attrs
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
def exists(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker image exists in local repository."""
|
||||
return self.sys_run_in_executor(self._exists)
|
||||
|
||||
def _exists(self):
|
||||
def _exists(self) -> bool:
|
||||
"""Return True if Docker image exists in local repository.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -106,14 +108,14 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
def is_running(self):
|
||||
def is_running(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
def _is_running(self) -> bool:
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -125,7 +127,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != 'running':
|
||||
if docker_container.status != "running":
|
||||
return False
|
||||
|
||||
# we run on an old image, stop and start it
|
||||
@@ -139,7 +141,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""Attach to running Docker container."""
|
||||
return self.sys_run_in_executor(self._attach)
|
||||
|
||||
def _attach(self):
|
||||
def _attach(self) -> None:
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -147,22 +149,21 @@ class DockerInterface(CoreSysAttributes):
|
||||
try:
|
||||
if self.image:
|
||||
self._meta = self.sys_docker.images.get(self.image).attrs
|
||||
else:
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
pass
|
||||
|
||||
_LOGGER.info("Attach to image %s with version %s", self.image,
|
||||
self.version)
|
||||
|
||||
return True
|
||||
# Successfull?
|
||||
if not self._meta:
|
||||
raise DockerAPIError() from None
|
||||
_LOGGER.info("Attach to %s with version %s", self.image, self.version)
|
||||
|
||||
@process_lock
|
||||
def run(self):
|
||||
def run(self) -> Awaitable[None]:
|
||||
"""Run Docker image."""
|
||||
return self.sys_run_in_executor(self._run)
|
||||
|
||||
def _run(self):
|
||||
def _run(self) -> None:
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -170,96 +171,117 @@ class DockerInterface(CoreSysAttributes):
|
||||
raise NotImplementedError()
|
||||
|
||||
@process_lock
|
||||
def stop(self):
|
||||
def stop(self, remove_container=True) -> Awaitable[None]:
|
||||
"""Stop/remove Docker container."""
|
||||
return self.sys_run_in_executor(self._stop)
|
||||
return self.sys_run_in_executor(self._stop, remove_container)
|
||||
|
||||
def _stop(self):
|
||||
"""Stop/remove and remove docker container.
|
||||
def _stop(self, remove_container=True) -> None:
|
||||
"""Stop/remove Docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
if docker_container.status == 'running':
|
||||
_LOGGER.info("Stop %s Docker application", self.image)
|
||||
if docker_container.status == "running":
|
||||
_LOGGER.info("Stop %s application", self.name)
|
||||
with suppress(docker.errors.DockerException):
|
||||
docker_container.stop(timeout=self.timeout)
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Clean %s Docker application", self.image)
|
||||
docker_container.remove(force=True)
|
||||
|
||||
return True
|
||||
if remove_container:
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Clean %s application", self.name)
|
||||
docker_container.remove(force=True)
|
||||
|
||||
@process_lock
|
||||
def remove(self):
|
||||
def start(self) -> Awaitable[None]:
|
||||
"""Start Docker container."""
|
||||
return self.sys_run_in_executor(self._start)
|
||||
|
||||
def _start(self) -> None:
|
||||
"""Start docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Start %s", self.image)
|
||||
try:
|
||||
docker_container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", self.image, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
@process_lock
|
||||
def remove(self) -> Awaitable[None]:
|
||||
"""Remove Docker images."""
|
||||
return self.sys_run_in_executor(self._remove)
|
||||
|
||||
def _remove(self):
|
||||
def _remove(self) -> None:
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
# Cleanup container
|
||||
self._stop()
|
||||
with suppress(DockerAPIError):
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove Docker %s with latest and %s", self.image,
|
||||
self.version)
|
||||
_LOGGER.info("Remove image %s with latest and %s", self.image, self.version)
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:latest", force=True)
|
||||
self.sys_docker.images.remove(image=f"{self.image}:latest", force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:{self.version}", force=True)
|
||||
image=f"{self.image}:{self.version}", force=True
|
||||
)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
self._meta = None
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def update(self, tag, image=None):
|
||||
def update(self, tag: str, image: Optional[str] = None) -> Awaitable[None]:
|
||||
"""Update a Docker image."""
|
||||
return self.sys_run_in_executor(self._update, tag, image)
|
||||
|
||||
def _update(self, tag, image=None):
|
||||
def _update(self, tag: str, image: Optional[str] = None) -> None:
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
image = image or self.image
|
||||
|
||||
_LOGGER.info("Update Docker %s:%s to %s:%s", self.image, self.version,
|
||||
image, tag)
|
||||
_LOGGER.info(
|
||||
"Update image %s:%s to %s:%s", self.image, self.version, image, tag
|
||||
)
|
||||
|
||||
# Update docker image
|
||||
if not self._install(tag, image):
|
||||
return False
|
||||
self._install(tag, image)
|
||||
|
||||
# Stop container & cleanup
|
||||
self._stop()
|
||||
self._cleanup()
|
||||
with suppress(DockerAPIError):
|
||||
try:
|
||||
self._stop()
|
||||
finally:
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
def logs(self):
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._logs)
|
||||
|
||||
def _logs(self):
|
||||
def _logs(self) -> bytes:
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -275,11 +297,11 @@ class DockerInterface(CoreSysAttributes):
|
||||
_LOGGER.warning("Can't grep logs from %s: %s", self.image, err)
|
||||
|
||||
@process_lock
|
||||
def cleanup(self):
|
||||
def cleanup(self) -> Awaitable[None]:
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self.sys_run_in_executor(self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
def _cleanup(self) -> None:
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -288,35 +310,55 @@ class DockerInterface(CoreSysAttributes):
|
||||
latest = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
for image in self.sys_docker.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup Docker images: %s", image.tags)
|
||||
_LOGGER.info("Cleanup images: %s", image.tags)
|
||||
self.sys_docker.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
@process_lock
|
||||
def restart(self) -> Awaitable[None]:
|
||||
"""Restart docker container."""
|
||||
return self.sys_loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self) -> None:
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s: %s", self.image, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
@process_lock
|
||||
def execute_command(self, command):
|
||||
def execute_command(self, command: str) -> Awaitable[CommandReturn]:
|
||||
"""Create a temporary container and run command."""
|
||||
return self.sys_run_in_executor(self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
def _execute_command(self, command: str) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def stats(self):
|
||||
def stats(self) -> Awaitable[DockerStats]:
|
||||
"""Read and return stats from container."""
|
||||
return self.sys_run_in_executor(self._stats)
|
||||
|
||||
def _stats(self):
|
||||
def _stats(self) -> DockerStats:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -324,11 +366,38 @@ class DockerInterface(CoreSysAttributes):
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return None
|
||||
raise DockerAPIError() from None
|
||||
|
||||
try:
|
||||
stats = docker_container.stats(stream=False)
|
||||
return DockerStats(stats)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||
return None
|
||||
raise DockerAPIError() from None
|
||||
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker is failing state.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_fails)
|
||||
|
||||
def _is_fails(self) -> bool:
|
||||
"""Return True if Docker is failing state.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != "exited":
|
||||
return False
|
||||
|
||||
# Check return value
|
||||
if int(docker_container.attrs["State"]["ExitCode"]) != 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
@@ -1,9 +1,12 @@
|
||||
"""Internal network manager for Hass.io."""
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
import docker
|
||||
|
||||
from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||
from ..const import DOCKER_NETWORK, DOCKER_NETWORK_MASK, DOCKER_NETWORK_RANGE
|
||||
from ..exceptions import DockerAPIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,32 +17,32 @@ class DockerNetwork:
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self, dock):
|
||||
def __init__(self, docker_client: docker.DockerClient):
|
||||
"""Initialize internal Hass.io network."""
|
||||
self.docker = dock
|
||||
self.network = self._get_network()
|
||||
self.docker: docker.DockerClient = docker_client
|
||||
self.network: docker.models.networks.Network = self._get_network()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return name of network."""
|
||||
return DOCKER_NETWORK
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
def containers(self) -> List[docker.models.containers.Container]:
|
||||
"""Return of connected containers from network."""
|
||||
return self.network.containers
|
||||
|
||||
@property
|
||||
def gateway(self):
|
||||
def gateway(self) -> IPv4Address:
|
||||
"""Return gateway of the network."""
|
||||
return DOCKER_NETWORK_MASK[1]
|
||||
|
||||
@property
|
||||
def supervisor(self):
|
||||
def supervisor(self) -> IPv4Address:
|
||||
"""Return supervisor of the network."""
|
||||
return DOCKER_NETWORK_MASK[2]
|
||||
|
||||
def _get_network(self):
|
||||
def _get_network(self) -> docker.models.networks.Network:
|
||||
"""Get HassIO network."""
|
||||
try:
|
||||
return self.docker.networks.get(DOCKER_NETWORK)
|
||||
@@ -49,18 +52,25 @@ class DockerNetwork:
|
||||
ipam_pool = docker.types.IPAMPool(
|
||||
subnet=str(DOCKER_NETWORK_MASK),
|
||||
gateway=str(self.gateway),
|
||||
iprange=str(DOCKER_NETWORK_RANGE)
|
||||
iprange=str(DOCKER_NETWORK_RANGE),
|
||||
)
|
||||
|
||||
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||
|
||||
return self.docker.networks.create(
|
||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
|
||||
enable_ipv6=False, options={
|
||||
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||
})
|
||||
DOCKER_NETWORK,
|
||||
driver="bridge",
|
||||
ipam=ipam_config,
|
||||
enable_ipv6=False,
|
||||
options={"com.docker.network.bridge.name": DOCKER_NETWORK},
|
||||
)
|
||||
|
||||
def attach_container(self, container, alias=None, ipv4=None):
|
||||
def attach_container(
|
||||
self,
|
||||
container: docker.models.containers.Container,
|
||||
alias: Optional[List[str]] = None,
|
||||
ipv4: Optional[IPv4Address] = None,
|
||||
) -> None:
|
||||
"""Attach container to Hass.io network.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -71,23 +81,24 @@ class DockerNetwork:
|
||||
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't link container to hassio-net: %s", err)
|
||||
return False
|
||||
raise DockerAPIError() from None
|
||||
|
||||
self.network.reload()
|
||||
return True
|
||||
|
||||
def detach_default_bridge(self, container):
|
||||
def detach_default_bridge(
|
||||
self, container: docker.models.containers.Container
|
||||
) -> None:
|
||||
"""Detach default Docker bridge.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
default_network = self.docker.networks.get('bridge')
|
||||
default_network = self.docker.networks.get("bridge")
|
||||
default_network.disconnect(container)
|
||||
|
||||
except docker.errors.NotFound:
|
||||
return
|
||||
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning(
|
||||
"Can't disconnect container from default: %s", err)
|
||||
_LOGGER.warning("Can't disconnect container from default: %s", err)
|
||||
raise DockerAPIError() from None
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user