mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-12 18:49:20 +00:00
Compare commits
81 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f4e9dd0f1c | ||
![]() |
7f074142bf | ||
![]() |
b6df37628d | ||
![]() |
7867eded50 | ||
![]() |
311abb8a90 | ||
![]() |
21303f4b05 | ||
![]() |
da3270af67 | ||
![]() |
35aae69f23 | ||
![]() |
118a2e1951 | ||
![]() |
9053341581 | ||
![]() |
27532a8a00 | ||
![]() |
7fdfa630b5 | ||
![]() |
3974d5859f | ||
![]() |
aa1c765c4b | ||
![]() |
e78385e7ea | ||
![]() |
9d59b56c94 | ||
![]() |
9d72dcabfc | ||
![]() |
a0b5d0b67e | ||
![]() |
2b5520405f | ||
![]() |
ca376b3fcd | ||
![]() |
11e3c0c547 | ||
![]() |
9da136e037 | ||
![]() |
9b3e59d876 | ||
![]() |
7a592795b5 | ||
![]() |
5b92137699 | ||
![]() |
7520cdfeb4 | ||
![]() |
0ada791e3a | ||
![]() |
73afced4dc | ||
![]() |
633a2e93bf | ||
![]() |
07c4058a8c | ||
![]() |
b6f3938b14 | ||
![]() |
57534fac96 | ||
![]() |
4a03e72983 | ||
![]() |
ddb29ea9b1 | ||
![]() |
95179c30f7 | ||
![]() |
f49970ce2c | ||
![]() |
790818d1aa | ||
![]() |
62f675e613 | ||
![]() |
f33434fb01 | ||
![]() |
254d6aee32 | ||
![]() |
a5ecd597ed | ||
![]() |
0fab3e940a | ||
![]() |
60fbebc16b | ||
![]() |
ec366d8112 | ||
![]() |
b8818788c9 | ||
![]() |
e23f6f6998 | ||
![]() |
05b58d76b9 | ||
![]() |
644d13e3fa | ||
![]() |
9de71472d4 | ||
![]() |
bf28227b91 | ||
![]() |
4c1ee49068 | ||
![]() |
6e7cf5e4c9 | ||
![]() |
11f8c97347 | ||
![]() |
a1461fd518 | ||
![]() |
fa5c2e37d3 | ||
![]() |
1f091b20ad | ||
![]() |
d3b4a03851 | ||
![]() |
fb12fee59b | ||
![]() |
7a87d2334a | ||
![]() |
9591e71138 | ||
![]() |
cecad526a2 | ||
![]() |
53dab4ee45 | ||
![]() |
8abbba46c7 | ||
![]() |
0f01ac1b59 | ||
![]() |
aa8ab593c0 | ||
![]() |
84f791220e | ||
![]() |
cee2c5469f | ||
![]() |
6e75964a8b | ||
![]() |
5ab5036504 | ||
![]() |
000a3c1f7e | ||
![]() |
8ea123eb94 | ||
![]() |
571c42ef7d | ||
![]() |
8443da0b9f | ||
![]() |
7dbbcf24c8 | ||
![]() |
468cb0c36b | ||
![]() |
78e093df96 | ||
![]() |
ec4d7dab21 | ||
![]() |
d00ee0adea | ||
![]() |
55d5ee4ed4 | ||
![]() |
0e51d74265 | ||
![]() |
916f3caedd |
29
.github/ISSUE_TEMPLATE.md
vendored
Normal file
29
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with a Add-on, make a issue on there repository.
|
||||
-->
|
||||
|
||||
**Home Assistant release with the issue:**
|
||||
<!--
|
||||
- Frontend -> Developer tools -> Info
|
||||
- Or use this command: hass --version
|
||||
-->
|
||||
|
||||
**Operating environment (HassOS/Generic):**
|
||||
<!--
|
||||
Please provide details about your environment.
|
||||
-->
|
||||
|
||||
**Supervisor logs:**
|
||||
<!--
|
||||
- Frontend -> Hass.io -> System
|
||||
- Or use this command: hassio su logs
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
1
.gitmodules
vendored
1
.gitmodules
vendored
@@ -1,3 +1,4 @@
|
||||
[submodule "home-assistant-polymer"]
|
||||
path = home-assistant-polymer
|
||||
url = https://github.com/home-assistant/home-assistant-polymer
|
||||
branch = dev
|
||||
|
32
API.md
32
API.md
@@ -314,9 +314,10 @@ Load host configs from a USB stick.
|
||||
"CARD_ID": {
|
||||
"name": "xy",
|
||||
"type": "microphone",
|
||||
"devices": {
|
||||
"DEV_ID": "type of device"
|
||||
}
|
||||
"devices": [
|
||||
"chan_id": "channel ID",
|
||||
"chan_type": "type of device"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -345,6 +346,7 @@ Load host configs from a USB stick.
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "arch",
|
||||
"machine": "Image machine type",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image",
|
||||
@@ -352,7 +354,7 @@ Load host configs from a USB stick.
|
||||
"port": 8123,
|
||||
"ssl": "bool",
|
||||
"watchdog": "bool",
|
||||
"startup_time": 600
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
@@ -386,7 +388,7 @@ Output is the raw Docker log.
|
||||
"password": "",
|
||||
"refresh_token": "",
|
||||
"watchdog": "bool",
|
||||
"startup_time": 600
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
@@ -415,7 +417,7 @@ Proxy to real websocket instance.
|
||||
|
||||
### RESTful for API addons
|
||||
|
||||
If a add-on will call itself, you can use `/addons/self/...`.
|
||||
If an add-on will call itself, you can use `/addons/self/...`.
|
||||
|
||||
- GET `/addons`
|
||||
|
||||
@@ -488,12 +490,14 @@ Get all available addons.
|
||||
"hassio_api": "bool",
|
||||
"hassio_role": "default|homeassistant|manager|admin",
|
||||
"homeassistant_api": "bool",
|
||||
"auth_api": "bool",
|
||||
"full_access": "bool",
|
||||
"protected": "bool",
|
||||
"rating": "1-6",
|
||||
"stdin": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"gpio": "bool",
|
||||
"kernel_modules": "bool",
|
||||
"devicetree": "bool",
|
||||
"docker_api": "bool",
|
||||
"audio": "bool",
|
||||
@@ -663,14 +667,28 @@ return:
|
||||
|
||||
### Misc
|
||||
|
||||
- GET `/version`
|
||||
- GET `/info`
|
||||
```json
|
||||
{
|
||||
"supervisor": "version",
|
||||
"homeassistant": "version",
|
||||
"hassos": "null|version",
|
||||
"hostname": "name",
|
||||
"machine": "type",
|
||||
"arch": "arch",
|
||||
"supported_arch": ["arch1", "arch2"],
|
||||
"channel": "stable|beta|dev"
|
||||
}
|
||||
```
|
||||
|
||||
### Auth / SSO API
|
||||
|
||||
You can use the user system on homeassistant. We handle this auth system on
|
||||
supervisor.
|
||||
|
||||
You can call post `/auth`
|
||||
|
||||
We support:
|
||||
- Json `{ "user|name": "...", "password": "..." }`
|
||||
- application/x-www-form-urlencoded `user|name=...&password=...`
|
||||
- BasicAuth
|
||||
|
11
Dockerfile
11
Dockerfile
@@ -3,6 +3,9 @@ FROM $BUILD_FROM
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
libffi \
|
||||
musl \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
@@ -12,8 +15,12 @@ RUN apk add --no-cache \
|
||||
# Install requirements
|
||||
COPY requirements.txt /usr/src/
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
g++ \
|
||||
make \
|
||||
g++ \
|
||||
openssl-dev \
|
||||
libffi-dev \
|
||||
musl-dev \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -f /usr/src/requirements.txt
|
||||
|
16
README.md
16
README.md
@@ -10,9 +10,19 @@ and updating software.
|
||||
|
||||

|
||||
|
||||
- [Hass.io Addons](https://github.com/home-assistant/hassio-addons)
|
||||
- [Hass.io Build](https://github.com/home-assistant/hassio-build)
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-build/tree/master/builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus on updater.json
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||
|
@@ -9,7 +9,7 @@ from hassio import bootstrap
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def attempt_use_uvloop():
|
||||
def initialize_event_loop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
import uvloop
|
||||
@@ -17,13 +17,17 @@ def attempt_use_uvloop():
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return asyncio.get_event_loop()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
attempt_use_uvloop()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Init async event loop
|
||||
loop = initialize_event_loop()
|
||||
|
||||
# Check if all information are available to setup Hass.io
|
||||
if not bootstrap.check_environment():
|
||||
sys.exit(1)
|
||||
|
||||
@@ -32,7 +36,7 @@ if __name__ == "__main__":
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hass.io setup")
|
||||
coresys = bootstrap.initialize_coresys(loop)
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
|
@@ -1,8 +1,8 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
import shutil
|
||||
@@ -12,30 +12,30 @@ from tempfile import TemporaryDirectory
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import (
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME, RE_SERVICE,
|
||||
MACHINE_ALL)
|
||||
from .utils import check_installed, remove_data
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_UUID,
|
||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES,
|
||||
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, ATTR_FULL_ACCESS,
|
||||
ATTR_PROTECTED, ATTR_ACCESS_TOKEN, ATTR_HOST_PID, ATTR_HASSIO_ROLE,
|
||||
ATTR_MACHINE,
|
||||
SECURITY_PROFILE, SECURITY_DISABLE, SECURITY_DEFAULT)
|
||||
ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_AUTH_API, ATTR_AUTO_UART, ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT, ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
|
||||
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
|
||||
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE, ATTR_MAP,
|
||||
ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS, ATTR_PORTS, ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA, ATTR_SERVICES, ATTR_SLUG,
|
||||
ATTR_STARTUP, ATTR_STATE, ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT,
|
||||
ATTR_TMPFS, ATTR_URL, ATTR_USER, ATTR_UUID, ATTR_VERSION, ATTR_WEBUI,
|
||||
SECURITY_DEFAULT, SECURITY_DISABLE, SECURITY_PROFILE, STATE_NONE,
|
||||
STATE_STARTED, STATE_STOPPED)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..utils import create_token
|
||||
from ..utils.json import write_json_file, read_json_file
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..exceptions import HostAppArmorError
|
||||
from ..utils import create_token
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from .utils import check_installed, remove_data
|
||||
from .validate import (
|
||||
MACHINE_ALL, RE_SERVICE, RE_VOLUME, SCHEMA_ADDON_SNAPSHOT,
|
||||
validate_options)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -56,8 +56,14 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
async def load(self):
|
||||
"""Async initialize of object."""
|
||||
if self.is_installed:
|
||||
await self.instance.attach()
|
||||
if not self.is_installed:
|
||||
return
|
||||
await self.instance.attach()
|
||||
|
||||
# NOTE: Can't be removed after soon
|
||||
if ATTR_IMAGE not in self._data.user[self._id]:
|
||||
self._data.user[self._id][ATTR_IMAGE] = self.image_name
|
||||
self.save_data()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
@@ -87,10 +93,14 @@ class Addon(CoreSysAttributes):
|
||||
@property
|
||||
def available(self):
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
if self.sys_arch not in self.supported_arch:
|
||||
# Architecture
|
||||
if not self.sys_arch.is_supported(self.supported_arch):
|
||||
return False
|
||||
|
||||
# Machine / Hardware
|
||||
if self.sys_machine not in self.supported_machine:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
@@ -104,26 +114,27 @@ class Addon(CoreSysAttributes):
|
||||
self._data.user[self._id] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
ATTR_IMAGE: self.image_name,
|
||||
}
|
||||
self._data.save_data()
|
||||
self.save_data()
|
||||
|
||||
def _set_uninstall(self):
|
||||
"""Set add-on as uninstalled."""
|
||||
self._data.system.pop(self._id, None)
|
||||
self._data.user.pop(self._id, None)
|
||||
self._data.save_data()
|
||||
self.save_data()
|
||||
|
||||
def _set_update(self, version):
|
||||
"""Update version of add-on."""
|
||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||
self._data.user[self._id][ATTR_VERSION] = version
|
||||
self._data.save_data()
|
||||
self.save_data()
|
||||
|
||||
def _restore_data(self, user, system):
|
||||
"""Restore data to add-on."""
|
||||
self._data.user[self._id] = deepcopy(user)
|
||||
self._data.system[self._id] = deepcopy(system)
|
||||
self._data.save_data()
|
||||
self.save_data()
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
@@ -401,6 +412,11 @@ class Addon(CoreSysAttributes):
|
||||
"""Return True if the add-on access to GPIO interface."""
|
||||
return self._mesh[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_kernel_modules(self):
|
||||
"""Return True if the add-on access to kernel modules."""
|
||||
return self._mesh[ATTR_KERNEL_MODULES]
|
||||
|
||||
@property
|
||||
def with_full_access(self):
|
||||
"""Return True if the add-on want full access to hardware."""
|
||||
@@ -411,6 +427,11 @@ class Addon(CoreSysAttributes):
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self._mesh[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def access_auth_api(self):
|
||||
"""Return True if the add-on access to login/auth backend."""
|
||||
return self._mesh[ATTR_AUTH_API]
|
||||
|
||||
@property
|
||||
def with_audio(self):
|
||||
"""Return True if the add-on access to audio."""
|
||||
@@ -486,21 +507,36 @@ class Addon(CoreSysAttributes):
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of add-on."""
|
||||
addon_data = self._mesh
|
||||
if self.is_installed:
|
||||
# NOTE: cleanup
|
||||
if ATTR_IMAGE in self._data.user[self._id]:
|
||||
return self._data.user[self._id][ATTR_IMAGE]
|
||||
return self.image_name
|
||||
|
||||
@property
|
||||
def image_name(self):
|
||||
"""Return image name for install/update."""
|
||||
if self.is_detached:
|
||||
addon_data = self._data.system.get(self._id)
|
||||
else:
|
||||
addon_data = self._data.cache.get(self._id)
|
||||
|
||||
# Repository with Dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.sys_arch)
|
||||
arch = self.sys_arch.match(addon_data[ATTR_ARCH])
|
||||
return addon_data[ATTR_IMAGE].format(arch=arch)
|
||||
|
||||
# local build
|
||||
return "{}/{}-addon-{}".format(
|
||||
addon_data[ATTR_REPOSITORY], self.sys_arch,
|
||||
addon_data[ATTR_SLUG])
|
||||
return (f"{addon_data[ATTR_REPOSITORY]}/"
|
||||
f"{self.sys_arch.default}-"
|
||||
f"addon-{addon_data[ATTR_SLUG]}")
|
||||
|
||||
@property
|
||||
def need_build(self):
|
||||
"""Return True if this add-on need a local build."""
|
||||
return ATTR_IMAGE not in self._mesh
|
||||
if self.is_detached:
|
||||
return ATTR_IMAGE not in self._data.system.get(self._id)
|
||||
return ATTR_IMAGE not in self._data.cache.get(self._id)
|
||||
|
||||
@property
|
||||
def map_volumes(self):
|
||||
@@ -669,7 +705,8 @@ class Addon(CoreSysAttributes):
|
||||
"""Install an add-on."""
|
||||
if not self.available:
|
||||
_LOGGER.error(
|
||||
"Add-on %s not supported on %s", self._id, self.sys_arch)
|
||||
"Add-on %s not supported on %s with %s architecture",
|
||||
self._id, self.sys_machine, self.sys_arch.supported)
|
||||
return False
|
||||
|
||||
if self.is_installed:
|
||||
@@ -684,7 +721,8 @@ class Addon(CoreSysAttributes):
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
|
||||
if not await self.instance.install(self.last_version):
|
||||
if not await self.instance.install(
|
||||
self.last_version, self.image_name):
|
||||
return False
|
||||
|
||||
self._set_install(self.last_version)
|
||||
@@ -735,7 +773,7 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
# Access Token
|
||||
self._data.user[self._id][ATTR_ACCESS_TOKEN] = create_token()
|
||||
self._data.save_data()
|
||||
self.save_data()
|
||||
|
||||
# Options
|
||||
if not self.write_options():
|
||||
@@ -764,7 +802,8 @@ class Addon(CoreSysAttributes):
|
||||
_LOGGER.warning("No update available for add-on %s", self._id)
|
||||
return False
|
||||
|
||||
if not await self.instance.update(self.last_version):
|
||||
if not await self.instance.update(
|
||||
self.last_version, self.image_name):
|
||||
return False
|
||||
self._set_update(self.last_version)
|
||||
|
||||
|
@@ -1,45 +1,50 @@
|
||||
"""Hass.io add-on build environment."""
|
||||
from __future__ import annotations
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
|
||||
from .validate import SCHEMA_BUILD_CONFIG, BASE_IMAGE
|
||||
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
|
||||
|
||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Handle build options for add-ons."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
def __init__(self, coresys: CoreSys, slug: str) -> None:
|
||||
"""Initialize Hass.io add-on builder."""
|
||||
self.coresys = coresys
|
||||
self._id = slug
|
||||
self.coresys: CoreSys = coresys
|
||||
self._id: str = slug
|
||||
|
||||
super().__init__(
|
||||
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def addon(self):
|
||||
def addon(self) -> Addon:
|
||||
"""Return add-on of build data."""
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def base_image(self):
|
||||
def base_image(self) -> str:
|
||||
"""Base images for this add-on."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self.sys_arch, BASE_IMAGE[self.sys_arch])
|
||||
self.sys_arch.default,
|
||||
f"homeassistant/{self.sys_arch.default}-base:latest")
|
||||
|
||||
@property
|
||||
def squash(self):
|
||||
def squash(self) -> bool:
|
||||
"""Return True or False if squash is active."""
|
||||
return self._data[ATTR_SQUASH]
|
||||
|
||||
@property
|
||||
def additional_args(self):
|
||||
def additional_args(self) -> Dict[str, str]:
|
||||
"""Return additional Docker build arguments."""
|
||||
return self._data[ATTR_ARGS]
|
||||
|
||||
@@ -53,7 +58,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
'squash': self.squash,
|
||||
'labels': {
|
||||
'io.hass.version': version,
|
||||
'io.hass.arch': self.sys_arch,
|
||||
'io.hass.arch': self.sys_arch.default,
|
||||
'io.hass.type': META_ADDON,
|
||||
'io.hass.name': self._fix_label('name'),
|
||||
'io.hass.description': self._fix_label('description'),
|
||||
@@ -61,7 +66,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
'buildargs': {
|
||||
'BUILD_FROM': self.base_image,
|
||||
'BUILD_VERSION': version,
|
||||
'BUILD_ARCH': self.sys_arch,
|
||||
'BUILD_ARCH': self.sys_arch.default,
|
||||
**self.additional_args,
|
||||
}
|
||||
}
|
||||
@@ -71,7 +76,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
|
||||
return args
|
||||
|
||||
def _fix_label(self, label_name):
|
||||
def _fix_label(self, label_name: str) -> str:
|
||||
"""Remove characters they are not supported."""
|
||||
label = getattr(self.addon, label_name, "")
|
||||
return label.replace("'", "")
|
||||
|
@@ -99,29 +99,32 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
|
||||
# validate
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(
|
||||
repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
except (OSError, json.JSONDecodeError):
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
continue
|
||||
|
||||
# validate
|
||||
try:
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s: %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
continue
|
||||
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(
|
||||
repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
def _set_builtin_repositories(self):
|
||||
"""Add local built-in repository into dataset."""
|
||||
try:
|
||||
builtin_file = Path(__file__).parent.joinpath('built-in.json')
|
||||
builtin_file = Path(__file__).parent.joinpath("built-in.json")
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read built-in json: %s", err)
|
||||
|
@@ -1,24 +1,38 @@
|
||||
"""Util add-ons functions."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import (
|
||||
SECURITY_DISABLE, SECURITY_PROFILE, PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_RAWIO, PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_DAC_READ_SEARCH, ROLE_ADMIN, ROLE_MANAGER)
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
ROLE_ADMIN,
|
||||
ROLE_MANAGER,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rating_security(addon):
|
||||
"""Return 1-5 for security rating.
|
||||
def rating_security(addon: Addon) -> int:
|
||||
"""Return 1-6 for security rating.
|
||||
|
||||
1 = not secure
|
||||
5 = high secure
|
||||
6 = high secure
|
||||
"""
|
||||
rating = 5
|
||||
|
||||
@@ -28,11 +42,22 @@ def rating_security(addon):
|
||||
elif addon.apparmor == SECURITY_PROFILE:
|
||||
rating += 1
|
||||
|
||||
# Home Assistant Login
|
||||
if addon.access_auth_api:
|
||||
rating += 1
|
||||
|
||||
# Privileged options
|
||||
if any(privilege in addon.privileged
|
||||
for privilege in (PRIVILEGED_NET_ADMIN, PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO, PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_DAC_READ_SEARCH)):
|
||||
if any(
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
)
|
||||
):
|
||||
rating += -1
|
||||
|
||||
# API Hass.io role
|
||||
@@ -60,23 +85,24 @@ def rating_security(addon):
|
||||
return max(min(6, rating), 1)
|
||||
|
||||
|
||||
def get_hash_from_repository(name):
|
||||
def get_hash_from_repository(name: str) -> str:
|
||||
"""Generate a hash from repository."""
|
||||
key = name.lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def extract_hash_from_path(path):
|
||||
def extract_hash_from_path(path: Path) -> str:
|
||||
"""Extract repo id from path."""
|
||||
repo_dir = path.parts[-1]
|
||||
repository_dir = path.parts[-1]
|
||||
|
||||
if not RE_SHA1.match(repo_dir):
|
||||
return get_hash_from_repository(repo_dir)
|
||||
return repo_dir
|
||||
if not RE_SHA1.match(repository_dir):
|
||||
return get_hash_from_repository(repository_dir)
|
||||
return repository_dir
|
||||
|
||||
|
||||
def check_installed(method):
|
||||
"""Wrap function with check if add-on is installed."""
|
||||
|
||||
async def wrap_check(addon, *args, **kwargs):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
@@ -87,18 +113,18 @@ def check_installed(method):
|
||||
return wrap_check
|
||||
|
||||
|
||||
async def remove_data(folder):
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder),
|
||||
stdout=asyncio.subprocess.DEVNULL
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
else:
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
||||
|
@@ -6,32 +6,29 @@ import uuid
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE,
|
||||
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE,
|
||||
BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER,
|
||||
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK, ATTR_UUID,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY,
|
||||
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, ATTR_PROTECTED,
|
||||
ATTR_FULL_ACCESS, ATTR_ACCESS_TOKEN, ATTR_HOST_PID, ATTR_HASSIO_ROLE,
|
||||
ATTR_MACHINE,
|
||||
PRIVILEGED_NET_ADMIN, PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_IPC_LOCK, PRIVILEGED_SYS_TIME, PRIVILEGED_SYS_NICE,
|
||||
PRIVILEGED_SYS_RESOURCE, PRIVILEGED_SYS_PTRACE, PRIVILEGED_DAC_READ_SEARCH,
|
||||
ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_MANAGER, ROLE_ADMIN)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_DEVICE, UUID_MATCH
|
||||
ARCH_ALL, ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_ARGS,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART, ATTR_AUTO_UPDATE, ATTR_BOOT, ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
|
||||
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
|
||||
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE,
|
||||
ATTR_MAINTAINER, ATTR_MAP, ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS,
|
||||
ATTR_PORTS, ATTR_PRIVILEGED, ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA,
|
||||
ATTR_SERVICES, ATTR_SLUG, ATTR_SQUASH, ATTR_STARTUP, ATTR_STATE,
|
||||
ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT, ATTR_TMPFS, ATTR_URL, ATTR_USER,
|
||||
ATTR_UUID, ATTR_VERSION, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
|
||||
PRIVILEGED_ALL, ROLE_ALL, ROLE_DEFAULT, STARTUP_ALL, STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES, STATE_STARTED, STATE_STOPPED)
|
||||
from ..services.validate import DISCOVERY_SERVICES
|
||||
from ..validate import (
|
||||
ALSA_DEVICE, DOCKER_PORTS, NETWORK_PORT, SHA256, UUID_MATCH)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = 'str'
|
||||
@@ -52,49 +49,20 @@ RE_SCHEMA_ELEMENT = re.compile(
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
|
||||
ARCH_ALL = [
|
||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
||||
]
|
||||
|
||||
MACHINE_ALL = [
|
||||
'intel-nuc', 'qemux86', 'qemux86-64', 'qemuarm', 'qemuarm-64',
|
||||
'raspberrypi', 'raspberrypi2', 'raspberrypi3', 'raspberrypi3-64',
|
||||
'odroid-c2', 'odroid-xu',
|
||||
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
|
||||
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
|
||||
'raspberrypi3', 'raspberrypi3-64', 'tinker',
|
||||
]
|
||||
|
||||
STARTUP_ALL = [
|
||||
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION
|
||||
]
|
||||
|
||||
PRIVILEGED_ALL = [
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_IPC_LOCK,
|
||||
PRIVILEGED_SYS_TIME,
|
||||
PRIVILEGED_SYS_NICE,
|
||||
PRIVILEGED_SYS_RESOURCE,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
]
|
||||
|
||||
ROLE_ALL = [
|
||||
ROLE_DEFAULT,
|
||||
ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER,
|
||||
ROLE_ADMIN,
|
||||
]
|
||||
|
||||
BASE_IMAGE = {
|
||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
||||
ARCH_I386: "homeassistant/i386-base:latest",
|
||||
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
||||
}
|
||||
|
||||
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
@@ -137,12 +105,14 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [vol.In(DISCOVERY_SERVICES)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
@@ -157,7 +127,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE):
|
||||
vol.Match(r"^([a-zA-Z.:\d{}]+/)*?([\w{}]+)/([\-\w{}]+)$"),
|
||||
vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
@@ -173,8 +143,8 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({
|
||||
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
|
||||
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
|
||||
}),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
||||
@@ -186,8 +156,9 @@ SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): vol.Match(r"^[0-9a-f]{64}$"),
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
|
@@ -5,16 +5,17 @@ from pathlib import Path
|
||||
from aiohttp import web
|
||||
|
||||
from .addons import APIAddons
|
||||
from .auth import APIAuth
|
||||
from .discovery import APIDiscovery
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .hardware import APIHardware
|
||||
from .host import APIHost
|
||||
from .hassos import APIHassOS
|
||||
from .info import APIInfo
|
||||
from .proxy import APIProxy
|
||||
from .supervisor import APISupervisor
|
||||
from .snapshots import APISnapshots
|
||||
from .services import APIServices
|
||||
from .version import APIVersion
|
||||
from .security import SecurityMiddleware
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
@@ -29,7 +30,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self.coresys = coresys
|
||||
self.security = SecurityMiddleware(coresys)
|
||||
self.webapp = web.Application(
|
||||
middlewares=[self.security.token_validation], loop=coresys.loop)
|
||||
middlewares=[self.security.token_validation])
|
||||
|
||||
# service stuff
|
||||
self._runner = web.AppRunner(self.webapp)
|
||||
@@ -48,7 +49,8 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_snapshots()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
self._register_version()
|
||||
self._register_info()
|
||||
self._register_auth()
|
||||
|
||||
def _register_host(self):
|
||||
"""Register hostcontrol functions."""
|
||||
@@ -64,10 +66,10 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get('/host/services', api_host.services),
|
||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
||||
web.post('/host/services/{service}/start', api_host.service_start),
|
||||
web.post(
|
||||
'/host/services/{service}/restart', api_host.service_restart),
|
||||
web.post(
|
||||
'/host/services/{service}/reload', api_host.service_reload),
|
||||
web.post('/host/services/{service}/restart',
|
||||
api_host.service_restart),
|
||||
web.post('/host/services/{service}/reload',
|
||||
api_host.service_reload),
|
||||
])
|
||||
|
||||
def _register_hassos(self):
|
||||
@@ -92,13 +94,22 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get('/hardware/audio', api_hardware.audio),
|
||||
])
|
||||
|
||||
def _register_version(self):
|
||||
"""Register version functions."""
|
||||
api_version = APIVersion()
|
||||
api_version.coresys = self.coresys
|
||||
def _register_info(self):
|
||||
"""Register info functions."""
|
||||
api_info = APIInfo()
|
||||
api_info.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/version', api_version.info),
|
||||
web.get('/info', api_info.info),
|
||||
])
|
||||
|
||||
def _register_auth(self):
|
||||
"""Register auth functions."""
|
||||
api_auth = APIAuth()
|
||||
api_auth.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.post('/auth', api_auth.auth),
|
||||
])
|
||||
|
||||
def _register_supervisor(self):
|
||||
@@ -213,8 +224,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes([
|
||||
web.get('/discovery', api_discovery.list),
|
||||
web.get('/discovery/{uuid}', api_discovery.get_discovery),
|
||||
web.delete('/discovery/{uuid}',
|
||||
api_discovery.del_discovery),
|
||||
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
|
||||
web.post('/discovery', api_discovery.set_discovery),
|
||||
])
|
||||
|
||||
@@ -228,8 +238,8 @@ class RestAPI(CoreSysAttributes):
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel', create_response('hassio-main-es5'))])
|
||||
self.webapp.add_routes(
|
||||
[web.get('/panel', create_response('hassio-main-es5'))])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.add_routes([
|
||||
@@ -255,8 +265,8 @@ class RestAPI(CoreSysAttributes):
|
||||
try:
|
||||
await self._site.start()
|
||||
except OSError as err:
|
||||
_LOGGER.fatal(
|
||||
"Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
|
||||
err)
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
|
@@ -20,7 +20,8 @@ from ..const import (
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
|
||||
ATTR_FULL_ACCESS, ATTR_PROTECTED, ATTR_RATING, ATTR_HOST_PID,
|
||||
ATTR_HASSIO_ROLE, ATTR_MACHINE, ATTR_AVAILABLE,
|
||||
ATTR_HASSIO_ROLE, ATTR_MACHINE, ATTR_AVAILABLE, ATTR_AUTH_API,
|
||||
ATTR_KERNEL_MODULES,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT, REQUEST_FROM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS, ALSA_DEVICE
|
||||
@@ -149,8 +150,10 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HASSIO_ROLE: addon.hassio_role,
|
||||
ATTR_AUTH_API: addon.access_auth_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_DOCKER_API: addon.access_docker_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
|
61
hassio/api/auth.py
Normal file
61
hassio/api/auth.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Init file for Hass.io auth/SSO RESTful API."""
|
||||
import logging
|
||||
|
||||
from aiohttp import BasicAuth
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION, WWW_AUTHENTICATE
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request, addon):
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
|
||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||
|
||||
def _process_dict(self, request, addon, data):
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
username = data.get('username') or data.get('user')
|
||||
password = data.get('password')
|
||||
|
||||
return self.sys_auth.check_login(addon, username, password)
|
||||
|
||||
@api_process
|
||||
async def auth(self, request):
|
||||
"""Process login request."""
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
if not addon.access_auth_api:
|
||||
raise APIForbidden("Can't use Home Assistant auth!")
|
||||
|
||||
# BasicAuth
|
||||
if AUTHORIZATION in request.headers:
|
||||
return await self._process_basic(request, addon)
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
data = await request.json()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
# URL encoded
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(headers={
|
||||
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
|
||||
})
|
@@ -4,34 +4,39 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
|
||||
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
|
||||
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, ATTR_MACHINE,
|
||||
ATTR_REFRESH_TOKEN, CONTENT_TYPE_BINARY)
|
||||
ATTR_ARCH, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_BOOT, ATTR_CPU_PERCENT,
|
||||
ATTR_CUSTOM, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_MACHINE, ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_PASSWORD,
|
||||
ATTR_PORT, ATTR_REFRESH_TOKEN, ATTR_SSL, ATTR_VERSION, ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG, CONTENT_TYPE_BINARY)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import NETWORK_PORT, DOCKER_IMAGE
|
||||
from ..exceptions import APIError
|
||||
from ..validate import DOCKER_IMAGE, NETWORK_PORT
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORT):
|
||||
NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL):
|
||||
vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG):
|
||||
vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -49,6 +54,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||
|
28
hassio/api/info.py
Normal file
28
hassio/api/info.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Init file for Hass.io info RESTful API."""
|
||||
import logging
|
||||
|
||||
from ..const import (ATTR_ARCH, ATTR_CHANNEL, ATTR_HASSOS, ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME, ATTR_MACHINE, ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED_ARCH)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIInfo(CoreSysAttributes):
|
||||
"""Handle RESTful API for info functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show system info."""
|
||||
return {
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
||||
ATTR_HASSOS: self.sys_hassos.version,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_MACHINE: self.sys_machine,
|
||||
ATTR_ARCH: self.sys_arch.default,
|
||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
}
|
2
hassio/api/panel/chunk.0853908528652fbc5d4f.js
Normal file
2
hassio/api/panel/chunk.0853908528652fbc5d4f.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.0853908528652fbc5d4f.js.gz
Normal file
BIN
hassio/api/panel/chunk.0853908528652fbc5d4f.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.0853908528652fbc5d4f.js.map
Normal file
1
hassio/api/panel/chunk.0853908528652fbc5d4f.js.map
Normal file
File diff suppressed because one or more lines are too long
2
hassio/api/panel/chunk.0cb8b788b03dcc48da14.js
Normal file
2
hassio/api/panel/chunk.0cb8b788b03dcc48da14.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.0cb8b788b03dcc48da14.js.gz
Normal file
BIN
hassio/api/panel/chunk.0cb8b788b03dcc48da14.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.0cb8b788b03dcc48da14.js.map
Normal file
1
hassio/api/panel/chunk.0cb8b788b03dcc48da14.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{102:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(91),i=t.n(e),o=t(93),u=t.n(o),a=i.a,c=u.a}}]);
|
||||
//# sourceMappingURL=chunk.7ee37c2565bcf2d88182.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","a"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,IAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,IAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASG,EAAAI,EACTN,EAAYK,EAAAC","file":"chunk.7ee37c2565bcf2d88182.js","sourcesContent":["import marked_ from 'marked';\nimport filterXSS_ from 'xss';\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}
|
2
hassio/api/panel/chunk.8c049a124b9397e54c16.js
Normal file
2
hassio/api/panel/chunk.8c049a124b9397e54c16.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.8c049a124b9397e54c16.js.gz
Normal file
BIN
hassio/api/panel/chunk.8c049a124b9397e54c16.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.8c049a124b9397e54c16.js.map
Normal file
1
hassio/api/panel/chunk.8c049a124b9397e54c16.js.map
Normal file
File diff suppressed because one or more lines are too long
2
hassio/api/panel/chunk.9e3883f96f68b3ce89f5.js
Normal file
2
hassio/api/panel/chunk.9e3883f96f68b3ce89f5.js
Normal file
@@ -0,0 +1,2 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{100:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(89),i=t.n(e),o=t(91),u=t.n(o),a=i.a,c=u.a}}]);
|
||||
//# sourceMappingURL=chunk.9e3883f96f68b3ce89f5.js.map
|
BIN
hassio/api/panel/chunk.9e3883f96f68b3ce89f5.js.gz
Normal file
BIN
hassio/api/panel/chunk.9e3883f96f68b3ce89f5.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.9e3883f96f68b3ce89f5.js.map
Normal file
1
hassio/api/panel/chunk.9e3883f96f68b3ce89f5.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,IAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,IAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.9e3883f96f68b3ce89f5.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
3
hassio/api/panel/chunk.c1ac97370d72bce0a835.js
Normal file
3
hassio/api/panel/chunk.c1ac97370d72bce0a835.js
Normal file
File diff suppressed because one or more lines are too long
@@ -401,6 +401,28 @@ part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
@@ -510,6 +532,174 @@ part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
@@ -628,25 +818,3 @@ The complete set of contributors may be found at http://polymer.github.io/CONTRI
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.c1ac97370d72bce0a835.js.gz
Normal file
BIN
hassio/api/panel/chunk.c1ac97370d72bce0a835.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.c1ac97370d72bce0a835.js.map
Normal file
1
hassio/api/panel/chunk.c1ac97370d72bce0a835.js.map
Normal file
File diff suppressed because one or more lines are too long
2
hassio/api/panel/chunk.d0eb7b86b775838caf5e.js
Normal file
2
hassio/api/panel/chunk.d0eb7b86b775838caf5e.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.d0eb7b86b775838caf5e.js.gz
Normal file
BIN
hassio/api/panel/chunk.d0eb7b86b775838caf5e.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.d0eb7b86b775838caf5e.js.map
Normal file
1
hassio/api/panel/chunk.d0eb7b86b775838caf5e.js.map
Normal file
File diff suppressed because one or more lines are too long
3
hassio/api/panel/chunk.f32f3c841cc3e1d081f7.js
Normal file
3
hassio/api/panel/chunk.f32f3c841cc3e1d081f7.js
Normal file
File diff suppressed because one or more lines are too long
@@ -138,6 +138,106 @@ Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
@@ -369,33 +469,3 @@ The complete set of contributors may be found at http://polymer.github.io/CONTRI
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.f32f3c841cc3e1d081f7.js.gz
Normal file
BIN
hassio/api/panel/chunk.f32f3c841cc3e1d081f7.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.f32f3c841cc3e1d081f7.js.map
Normal file
1
hassio/api/panel/chunk.f32f3c841cc3e1d081f7.js.map
Normal file
File diff suppressed because one or more lines are too long
@@ -1,2 +1,2 @@
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],u=0,l=[];u<a.length;u++)o=a[u],r[o]&&l.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(c&&c(n);l.length;)l.shift()()}var t={},r={1:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,u=document.getElementsByTagName("head")[0],c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"a8fa5591357cce978816",2:"457ac71b0904d7243237",3:"57f5b43a82b988080555",4:"7ee37c2565bcf2d88182",5:"72a6da063fe4cb6308e8",6:"ad9001ac29bd3acbb520"}[e]+".js"}(e),i=function(n){c.onerror=c.onload=null,clearTimeout(l);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var l=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,u.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var u=0;u<a.length;u++)n(a[u]);var c=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(2)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(5),t.e(3)]).then(t.bind(null,1))})}]);
|
||||
!function(e){function n(n){for(var t,o,i=n[0],u=n[1],c=0,f=[];c<i.length;c++)o=i[c],r[o]&&f.push(r[o][0]),r[o]=0;for(t in u)Object.prototype.hasOwnProperty.call(u,t)&&(e[t]=u[t]);for(a&&a(n);f.length;)f.shift()()}var t={},r={1:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var u,c=document.getElementsByTagName("head")[0],a=document.createElement("script");a.charset="utf-8",a.timeout=120,o.nc&&a.setAttribute("nonce",o.nc),a.src=function(e){return o.p+"chunk."+{0:"f32f3c841cc3e1d081f7",2:"8c049a124b9397e54c16",3:"d0eb7b86b775838caf5e",4:"9e3883f96f68b3ce89f5",5:"0cb8b788b03dcc48da14",6:"c1ac97370d72bce0a835",7:"0853908528652fbc5d4f"}[e]+".js"}(e),u=function(n){a.onerror=a.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,t[1](u)}r[e]=void 0}};var f=setTimeout(function(){u({type:"timeout",target:a})},12e4);a.onerror=a.onload=u,c.appendChild(a)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=n,i=i.slice();for(var c=0;c<i.length;c++)n(i[c]);var a=u;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(2)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(6),t.e(3)]).then(t.bind(null,1))})}]);
|
||||
//# sourceMappingURL=entrypoint.js.map
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -5,14 +5,15 @@ import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadGateway, HTTPInternalServerError, HTTPUnauthorized)
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION
|
||||
import async_timeout
|
||||
|
||||
from ..const import HEADER_HA_ACCESS
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HomeAssistantAuthError, HomeAssistantAPIError
|
||||
from ..exceptions import (
|
||||
HomeAssistantAuthError, HomeAssistantAPIError, APIError)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -82,19 +83,13 @@ class APIProxy(CoreSysAttributes):
|
||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||
try:
|
||||
await response.prepare(request)
|
||||
while True:
|
||||
data = await client.content.read(10)
|
||||
if not data:
|
||||
break
|
||||
async for data in client.content:
|
||||
await response.write(data)
|
||||
|
||||
except aiohttp.ClientError:
|
||||
except (aiohttp.ClientError, aiohttp.ClientPayloadError):
|
||||
pass
|
||||
|
||||
finally:
|
||||
client.close()
|
||||
_LOGGER.info("Home Assistant EventStream close")
|
||||
|
||||
_LOGGER.info("Home Assistant EventStream close")
|
||||
return response
|
||||
|
||||
async def api(self, request):
|
||||
@@ -117,7 +112,7 @@ class APIProxy(CoreSysAttributes):
|
||||
|
||||
try:
|
||||
client = await self.sys_websession_ssl.ws_connect(
|
||||
url, heartbeat=60, verify_ssl=False)
|
||||
url, heartbeat=30, verify_ssl=False)
|
||||
|
||||
# Handle authentication
|
||||
data = await client.receive_json()
|
||||
@@ -129,7 +124,7 @@ class APIProxy(CoreSysAttributes):
|
||||
# Invalid protocol
|
||||
_LOGGER.error(
|
||||
"Got unexpected response from HA WebSocket: %s", data)
|
||||
raise HTTPBadGateway()
|
||||
raise APIError()
|
||||
|
||||
if self.sys_homeassistant.refresh_token:
|
||||
await self.sys_homeassistant.ensure_access_token()
|
||||
@@ -149,26 +144,25 @@ class APIProxy(CoreSysAttributes):
|
||||
return client
|
||||
|
||||
# Renew the Token is invalid
|
||||
if (data.get('type') == 'invalid_auth' and
|
||||
self.sys_homeassistant.refresh_token):
|
||||
if data.get('type') == 'invalid_auth' and self.sys_homeassistant.refresh_token:
|
||||
self.sys_homeassistant.access_token = None
|
||||
return await self._websocket_client()
|
||||
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
except (RuntimeError, ValueError) as err:
|
||||
except (RuntimeError, ValueError, ClientConnectorError) as err:
|
||||
_LOGGER.error("Client error on WebSocket API %s.", err)
|
||||
except HomeAssistantAuthError as err:
|
||||
except HomeAssistantAuthError:
|
||||
_LOGGER.error("Failed authentication to Home Assistant WebSocket")
|
||||
|
||||
raise HTTPBadGateway()
|
||||
raise APIError()
|
||||
|
||||
async def websocket(self, request):
|
||||
"""Initialize a WebSocket API connection."""
|
||||
_LOGGER.info("Home Assistant WebSocket API request initialize")
|
||||
|
||||
# init server
|
||||
server = web.WebSocketResponse(heartbeat=60)
|
||||
server = web.WebSocketResponse(heartbeat=30)
|
||||
await server.prepare(request)
|
||||
|
||||
# handle authentication
|
||||
@@ -180,8 +174,7 @@ class APIProxy(CoreSysAttributes):
|
||||
|
||||
# Check API access
|
||||
response = await server.receive_json()
|
||||
hassio_token = (response.get('api_password') or
|
||||
response.get('access_token'))
|
||||
hassio_token = response.get('api_password') or response.get('access_token')
|
||||
addon = self.sys_addons.from_token(hassio_token)
|
||||
|
||||
if not addon or not addon.access_homeassistant_api:
|
||||
@@ -200,10 +193,13 @@ class APIProxy(CoreSysAttributes):
|
||||
})
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||
raise HTTPInternalServerError() from None
|
||||
return server
|
||||
|
||||
# init connection to hass
|
||||
client = await self._websocket_client()
|
||||
try:
|
||||
client = await self._websocket_client()
|
||||
except APIError:
|
||||
return server
|
||||
|
||||
_LOGGER.info("Home Assistant WebSocket API request running")
|
||||
try:
|
||||
@@ -238,7 +234,7 @@ class APIProxy(CoreSysAttributes):
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
except RuntimeError as err:
|
||||
except (RuntimeError, ConnectionError, TypeError) as err:
|
||||
_LOGGER.info("Home Assistant WebSocket API error: %s", err)
|
||||
|
||||
finally:
|
||||
@@ -248,8 +244,10 @@ class APIProxy(CoreSysAttributes):
|
||||
server_read.cancel()
|
||||
|
||||
# close connections
|
||||
await client.close()
|
||||
await server.close()
|
||||
if not client.closed:
|
||||
await client.close()
|
||||
if not server.closed:
|
||||
await server.close()
|
||||
|
||||
_LOGGER.info("Home Assistant WebSocket API connection is closed")
|
||||
return server
|
||||
|
@@ -7,7 +7,7 @@ from aiohttp.web_exceptions import HTTPUnauthorized, HTTPForbidden
|
||||
|
||||
from ..const import (
|
||||
HEADER_TOKEN, REQUEST_FROM, ROLE_ADMIN, ROLE_DEFAULT, ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER)
|
||||
ROLE_MANAGER, ROLE_BACKUP)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -33,9 +33,10 @@ NO_SECURITY_CHECK = re.compile(
|
||||
ADDONS_API_BYPASS = re.compile(
|
||||
r"^(?:"
|
||||
r"|/addons/self/(?!security|update)[^/]+"
|
||||
r"|/version"
|
||||
r"|/info"
|
||||
r"|/services.*"
|
||||
r"|/discovery.*"
|
||||
r"|/auth"
|
||||
r")$"
|
||||
)
|
||||
|
||||
@@ -52,6 +53,11 @@ ADDONS_ROLE_ACCESS = {
|
||||
r"|/homeassistant/.+"
|
||||
r")$"
|
||||
),
|
||||
ROLE_BACKUP: re.compile(
|
||||
r"^(?:"
|
||||
r"|/snapshots.*"
|
||||
r")$"
|
||||
),
|
||||
ROLE_MANAGER: re.compile(
|
||||
r"^(?:"
|
||||
r"|/homeassistant/.+"
|
||||
|
@@ -61,7 +61,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_ARCH: self.sys_arch,
|
||||
ATTR_ARCH: self.sys_supervisor.arch,
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
@@ -116,8 +116,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
if version == self.sys_supervisor.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.sys_supervisor.update(version))
|
||||
return await asyncio.shield(self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
@@ -125,8 +124,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
tasks = [
|
||||
self.sys_updater.reload(),
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
asyncio.wait(tasks))
|
||||
results, _ = await asyncio.shield(asyncio.wait(tasks))
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
|
@@ -1,26 +0,0 @@
|
||||
"""Init file for Hass.io version RESTful API."""
|
||||
import logging
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_HOMEASSISTANT, ATTR_SUPERVISOR, ATTR_MACHINE, ATTR_ARCH, ATTR_HASSOS,
|
||||
ATTR_CHANNEL)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIVersion(CoreSysAttributes):
|
||||
"""Handle RESTful API for version functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show version info."""
|
||||
return {
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
||||
ATTR_HASSOS: self.sys_hassos.version,
|
||||
ATTR_MACHINE: self.sys_machine,
|
||||
ATTR_ARCH: self.sys_arch,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
}
|
44
hassio/arch.json
Normal file
44
hassio/arch.json
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"raspberrypi": [
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi2": [
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3": [
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3-64": [
|
||||
"aarch64",
|
||||
"armhf"
|
||||
],
|
||||
"tinker": [
|
||||
"armhf"
|
||||
],
|
||||
"odroid-c2": [
|
||||
"aarch64"
|
||||
],
|
||||
"odroid-xu": [
|
||||
"armhf"
|
||||
],
|
||||
"orangepi-prime": [
|
||||
"aarch64"
|
||||
],
|
||||
"qemux86": [
|
||||
"i386"
|
||||
],
|
||||
"qemux86-64": [
|
||||
"amd64",
|
||||
"i386"
|
||||
],
|
||||
"qemuarm": [
|
||||
"armhf"
|
||||
],
|
||||
"qemuarm-64": [
|
||||
"aarch64"
|
||||
],
|
||||
"intel-nuc": [
|
||||
"amd64",
|
||||
"i386"
|
||||
]
|
||||
}
|
67
hassio/arch.py
Normal file
67
hassio/arch.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Handle Arch for underlay maschine/platforms."""
|
||||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
from .coresys import CoreSysAttributes, CoreSys
|
||||
from .exceptions import HassioArchNotFound
|
||||
from .utils.json import read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CpuArch(CoreSysAttributes):
|
||||
"""Manage available architectures."""
|
||||
|
||||
def __init__(self, coresys: CoreSys) -> None:
|
||||
"""Initialize CPU Architecture handler."""
|
||||
self.coresys = coresys
|
||||
self._supported_arch: List[str] = []
|
||||
self._default_arch: str
|
||||
|
||||
@property
|
||||
def default(self) -> str:
|
||||
"""Return system default arch."""
|
||||
return self._default_arch
|
||||
|
||||
@property
|
||||
def supervisor(self) -> str:
|
||||
"""Return supervisor arch."""
|
||||
return self.sys_supervisor.arch
|
||||
|
||||
@property
|
||||
def supported(self) -> List[str]:
|
||||
"""Return support arch by CPU/Machine."""
|
||||
return self._supported_arch
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load data and initialize default arch."""
|
||||
try:
|
||||
arch_file = Path(__file__).parent.joinpath("arch.json")
|
||||
arch_data = read_json_file(arch_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read arch json: %s", err)
|
||||
return
|
||||
|
||||
# Evaluate current CPU/Platform
|
||||
if not self.sys_machine or self.sys_machine not in arch_data:
|
||||
_LOGGER.warning("Can't detect underlay machine type!")
|
||||
self._default_arch = self.sys_supervisor.arch
|
||||
self._supported_arch.append(self.default)
|
||||
return
|
||||
|
||||
# Use configs from arch.json
|
||||
self._supported_arch.extend(arch_data[self.sys_machine])
|
||||
self._default_arch = self.supported[0]
|
||||
|
||||
def is_supported(self, arch_list: List[str]) -> bool:
|
||||
"""Return True if there is a supported arch by this platform."""
|
||||
return not set(self.supported).isdisjoint(set(arch_list))
|
||||
|
||||
def match(self, arch_list: List[str]) -> str:
|
||||
"""Return best match for this CPU/Platform."""
|
||||
for self_arch in self.supported:
|
||||
if self_arch in arch_list:
|
||||
return self_arch
|
||||
raise HassioArchNotFound()
|
95
hassio/auth.py
Normal file
95
hassio/auth.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||
import logging
|
||||
import hashlib
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_AUTH_CONFIG
|
||||
from .exceptions import AuthError, HomeAssistantAPIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Auth(JsonConfig, CoreSysAttributes):
|
||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_AUTH, SCHEMA_AUTH_CONFIG)
|
||||
self.coresys = coresys
|
||||
|
||||
def _check_cache(self, username, password):
|
||||
"""Check password in cache."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) == password_h:
|
||||
_LOGGER.info("Cache hit for %s", username)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("No cache hit for %s", username)
|
||||
return False
|
||||
|
||||
def _update_cache(self, username, password):
|
||||
"""Cache a username, password."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) == password_h:
|
||||
return
|
||||
|
||||
self._data[username_h] = password_h
|
||||
self.save_data()
|
||||
|
||||
def _dismatch_cache(self, username, password):
|
||||
"""Remove user from cache."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) != password_h:
|
||||
return
|
||||
|
||||
self._data.pop(username_h, None)
|
||||
self.save_data()
|
||||
|
||||
async def check_login(self, addon, username, password):
|
||||
"""Check username login."""
|
||||
if password is None:
|
||||
_LOGGER.error("None as password is not supported!")
|
||||
raise AuthError()
|
||||
_LOGGER.info("Auth request from %s for %s", addon.slug, username)
|
||||
|
||||
# Check API state
|
||||
if not await self.sys_homeassistant.check_api_state():
|
||||
_LOGGER.info("Home Assistant not running, check cache")
|
||||
return self._check_cache(username, password)
|
||||
|
||||
try:
|
||||
async with self.sys_homeassistant.make_request(
|
||||
'post', 'api/hassio_auth', json={
|
||||
ATTR_USERNAME: username,
|
||||
ATTR_PASSWORD: password,
|
||||
ATTR_ADDON: addon.slug,
|
||||
}) as req:
|
||||
|
||||
if req.status == 200:
|
||||
_LOGGER.info("Success login from %s", username)
|
||||
self._update_cache(username, password)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("Wrong login from %s", username)
|
||||
self._dismatch_cache(username, password)
|
||||
return False
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Can't request auth on Home Assistant!")
|
||||
|
||||
raise AuthError()
|
||||
|
||||
|
||||
def _rehash(value, salt2=""):
|
||||
"""Rehash a value."""
|
||||
for idx in range(1, 20):
|
||||
value = hashlib.sha256(f"{value}{idx}{salt2}".encode()).hexdigest()
|
||||
return value
|
@@ -1,43 +1,47 @@
|
||||
"""Bootstrap Hass.io."""
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import signal
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .core import HassIO
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .arch import CpuArch
|
||||
from .auth import Auth
|
||||
from .const import SOCKET_DOCKER
|
||||
from .core import HassIO
|
||||
from .coresys import CoreSys
|
||||
from .supervisor import Supervisor
|
||||
from .dbus import DBusManager
|
||||
from .discovery import Discovery
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
from .services import ServiceManager
|
||||
from .discovery import Discovery
|
||||
from .host import HostManager
|
||||
from .dbus import DBusManager
|
||||
from .hassos import HassOS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENV_SHARE = 'SUPERVISOR_SHARE'
|
||||
ENV_NAME = 'SUPERVISOR_NAME'
|
||||
ENV_REPO = 'HOMEASSISTANT_REPOSITORY'
|
||||
ENV_SHARE = "SUPERVISOR_SHARE"
|
||||
ENV_NAME = "SUPERVISOR_NAME"
|
||||
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
|
||||
|
||||
MACHINE_ID = Path('/etc/machine-id')
|
||||
MACHINE_ID = Path("/etc/machine-id")
|
||||
|
||||
|
||||
def initialize_coresys(loop):
|
||||
async def initialize_coresys():
|
||||
"""Initialize HassIO coresys/objects."""
|
||||
coresys = CoreSys(loop)
|
||||
coresys = CoreSys()
|
||||
|
||||
# Initialize core objects
|
||||
coresys.core = HassIO(coresys)
|
||||
coresys.arch = CpuArch(coresys)
|
||||
coresys.auth = Auth(coresys)
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
@@ -67,9 +71,8 @@ def initialize_system_data(coresys):
|
||||
|
||||
# Home Assistant configuration folder
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home Assistant configuration folder %s",
|
||||
config.path_homeassistant)
|
||||
_LOGGER.info("Create Home Assistant configuration folder %s",
|
||||
config.path_homeassistant)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
@@ -79,8 +82,8 @@ def initialize_system_data(coresys):
|
||||
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on data folder %s", config.path_addons_data)
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s",
|
||||
config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
@@ -132,26 +135,26 @@ def migrate_system_env(coresys):
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
|
||||
"[%(name)s] %(message)s")
|
||||
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
||||
datefmt = '%y-%m-%d %H:%M:%S'
|
||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||
datefmt = "%y-%m-%d %H:%M:%S"
|
||||
|
||||
# suppress overly verbose logs from libraries that aren't helpful
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
|
||||
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
'DEBUG': 'cyan',
|
||||
'INFO': 'green',
|
||||
'WARNING': 'yellow',
|
||||
'ERROR': 'red',
|
||||
'CRITICAL': 'red',
|
||||
}
|
||||
))
|
||||
logging.getLogger().handlers[0].setFormatter(
|
||||
ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
"DEBUG": "cyan",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
))
|
||||
|
||||
|
||||
def check_environment():
|
||||
@@ -170,12 +173,12 @@ def check_environment():
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which('socat'):
|
||||
if not shutil.which("socat"):
|
||||
_LOGGER.fatal("Can't find socat!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which('gdbus'):
|
||||
if not shutil.which("gdbus"):
|
||||
_LOGGER.fatal("Can't find gdbus!")
|
||||
return False
|
||||
|
||||
@@ -185,19 +188,19 @@ def check_environment():
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM and SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGTERM,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGHUP,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGINT,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
466
hassio/const.py
466
hassio/const.py
@@ -2,20 +2,18 @@
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = '134'
|
||||
HASSIO_VERSION = "144"
|
||||
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
URL_HASSIO_VERSION = \
|
||||
"https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
URL_HASSIO_APPARMOR = \
|
||||
"https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
URL_HASSIO_APPARMOR = "https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
||||
|
||||
URL_HASSOS_OTA = (
|
||||
"https://github.com/home-assistant/hassos/releases/download/"
|
||||
"{version}/hassos_{board}-{version}.raucb")
|
||||
URL_HASSOS_OTA = ("https://github.com/home-assistant/hassos/releases/download/"
|
||||
"{version}/hassos_{board}-{version}.raucb")
|
||||
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
FILE_HASSIO_AUTH = Path(HASSIO_DATA, "auth.json")
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
@@ -25,233 +23,271 @@ FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
|
||||
DOCKER_NETWORK = 'hassio'
|
||||
DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23')
|
||||
DOCKER_NETWORK_RANGE = ip_network('172.30.33.0/24')
|
||||
DOCKER_NETWORK = "hassio"
|
||||
DOCKER_NETWORK_MASK = ip_network("172.30.32.0/23")
|
||||
DOCKER_NETWORK_RANGE = ip_network("172.30.33.0/24")
|
||||
|
||||
LABEL_VERSION = 'io.hass.version'
|
||||
LABEL_ARCH = 'io.hass.arch'
|
||||
LABEL_TYPE = 'io.hass.type'
|
||||
LABEL_MACHINE = 'io.hass.machine'
|
||||
LABEL_VERSION = "io.hass.version"
|
||||
LABEL_ARCH = "io.hass.arch"
|
||||
LABEL_TYPE = "io.hass.type"
|
||||
LABEL_MACHINE = "io.hass.machine"
|
||||
|
||||
META_ADDON = 'addon'
|
||||
META_SUPERVISOR = 'supervisor'
|
||||
META_HOMEASSISTANT = 'homeassistant'
|
||||
META_ADDON = "addon"
|
||||
META_SUPERVISOR = "supervisor"
|
||||
META_HOMEASSISTANT = "homeassistant"
|
||||
|
||||
JSON_RESULT = 'result'
|
||||
JSON_DATA = 'data'
|
||||
JSON_MESSAGE = 'message'
|
||||
JSON_RESULT = "result"
|
||||
JSON_DATA = "data"
|
||||
JSON_MESSAGE = "message"
|
||||
|
||||
RESULT_ERROR = 'error'
|
||||
RESULT_OK = 'ok'
|
||||
RESULT_ERROR = "error"
|
||||
RESULT_OK = "ok"
|
||||
|
||||
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||
CONTENT_TYPE_PNG = 'image/png'
|
||||
CONTENT_TYPE_JSON = 'application/json'
|
||||
CONTENT_TYPE_TEXT = 'text/plain'
|
||||
CONTENT_TYPE_TAR = 'application/tar'
|
||||
HEADER_HA_ACCESS = 'x-ha-access'
|
||||
HEADER_TOKEN = 'x-hassio-key'
|
||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
||||
CONTENT_TYPE_PNG = "image/png"
|
||||
CONTENT_TYPE_JSON = "application/json"
|
||||
CONTENT_TYPE_TEXT = "text/plain"
|
||||
CONTENT_TYPE_TAR = "application/tar"
|
||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||
HEADER_HA_ACCESS = "x-ha-access"
|
||||
HEADER_TOKEN = "x-hassio-key"
|
||||
|
||||
ENV_TOKEN = 'HASSIO_TOKEN'
|
||||
ENV_TIME = 'TZ'
|
||||
ENV_TOKEN = "HASSIO_TOKEN"
|
||||
ENV_TIME = "TZ"
|
||||
|
||||
REQUEST_FROM = 'HASSIO_FROM'
|
||||
REQUEST_FROM = "HASSIO_FROM"
|
||||
|
||||
ATTR_MACHINE = 'machine'
|
||||
ATTR_WAIT_BOOT = 'wait_boot'
|
||||
ATTR_DEPLOYMENT = 'deployment'
|
||||
ATTR_WATCHDOG = 'watchdog'
|
||||
ATTR_CHANGELOG = 'changelog'
|
||||
ATTR_DATE = 'date'
|
||||
ATTR_ARCH = 'arch'
|
||||
ATTR_LONG_DESCRIPTION = 'long_description'
|
||||
ATTR_HOSTNAME = 'hostname'
|
||||
ATTR_TIMEZONE = 'timezone'
|
||||
ATTR_ARGS = 'args'
|
||||
ATTR_OPERATING_SYSTEM = 'operating_system'
|
||||
ATTR_CHASSIS = 'chassis'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_SOURCE = 'source'
|
||||
ATTR_FEATURES = 'features'
|
||||
ATTR_ADDONS = 'addons'
|
||||
ATTR_PROVIDERS = 'providers'
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_VERSION_LATEST = 'version_latest'
|
||||
ATTR_AUTO_UART = 'auto_uart'
|
||||
ATTR_LAST_BOOT = 'last_boot'
|
||||
ATTR_LAST_VERSION = 'last_version'
|
||||
ATTR_CHANNEL = 'channel'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_SLUG = 'slug'
|
||||
ATTR_DESCRIPTON = 'description'
|
||||
ATTR_STARTUP = 'startup'
|
||||
ATTR_BOOT = 'boot'
|
||||
ATTR_PORTS = 'ports'
|
||||
ATTR_PORT = 'port'
|
||||
ATTR_SSL = 'ssl'
|
||||
ATTR_MAP = 'map'
|
||||
ATTR_WEBUI = 'webui'
|
||||
ATTR_OPTIONS = 'options'
|
||||
ATTR_INSTALLED = 'installed'
|
||||
ATTR_DETACHED = 'detached'
|
||||
ATTR_STATE = 'state'
|
||||
ATTR_SCHEMA = 'schema'
|
||||
ATTR_IMAGE = 'image'
|
||||
ATTR_ICON = 'icon'
|
||||
ATTR_LOGO = 'logo'
|
||||
ATTR_STDIN = 'stdin'
|
||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||
ATTR_REPOSITORY = 'repository'
|
||||
ATTR_REPOSITORIES = 'repositories'
|
||||
ATTR_URL = 'url'
|
||||
ATTR_MAINTAINER = 'maintainer'
|
||||
ATTR_PASSWORD = 'password'
|
||||
ATTR_TOTP = 'totp'
|
||||
ATTR_INITIALIZE = 'initialize'
|
||||
ATTR_LOCATON = 'location'
|
||||
ATTR_BUILD = 'build'
|
||||
ATTR_DEVICES = 'devices'
|
||||
ATTR_ENVIRONMENT = 'environment'
|
||||
ATTR_HOST_NETWORK = 'host_network'
|
||||
ATTR_HOST_PID = 'host_pid'
|
||||
ATTR_HOST_IPC = 'host_ipc'
|
||||
ATTR_HOST_DBUS = 'host_dbus'
|
||||
ATTR_NETWORK = 'network'
|
||||
ATTR_TMPFS = 'tmpfs'
|
||||
ATTR_PRIVILEGED = 'privileged'
|
||||
ATTR_USER = 'user'
|
||||
ATTR_SYSTEM = 'system'
|
||||
ATTR_SNAPSHOTS = 'snapshots'
|
||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||
ATTR_HASSIO = 'hassio'
|
||||
ATTR_HASSIO_API = 'hassio_api'
|
||||
ATTR_HOMEASSISTANT_API = 'homeassistant_api'
|
||||
ATTR_UUID = 'uuid'
|
||||
ATTR_FOLDERS = 'folders'
|
||||
ATTR_SIZE = 'size'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_TIMEOUT = 'timeout'
|
||||
ATTR_AUTO_UPDATE = 'auto_update'
|
||||
ATTR_CUSTOM = 'custom'
|
||||
ATTR_AUDIO = 'audio'
|
||||
ATTR_AUDIO_INPUT = 'audio_input'
|
||||
ATTR_AUDIO_OUTPUT = 'audio_output'
|
||||
ATTR_INPUT = 'input'
|
||||
ATTR_OUTPUT = 'output'
|
||||
ATTR_DISK = 'disk'
|
||||
ATTR_SERIAL = 'serial'
|
||||
ATTR_SECURITY = 'security'
|
||||
ATTR_BUILD_FROM = 'build_from'
|
||||
ATTR_SQUASH = 'squash'
|
||||
ATTR_GPIO = 'gpio'
|
||||
ATTR_LEGACY = 'legacy'
|
||||
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
ATTR_CPU_PERCENT = 'cpu_percent'
|
||||
ATTR_NETWORK_RX = 'network_rx'
|
||||
ATTR_NETWORK_TX = 'network_tx'
|
||||
ATTR_MEMORY_LIMIT = 'memory_limit'
|
||||
ATTR_MEMORY_USAGE = 'memory_usage'
|
||||
ATTR_BLK_READ = 'blk_read'
|
||||
ATTR_BLK_WRITE = 'blk_write'
|
||||
ATTR_ADDON = 'addon'
|
||||
ATTR_AVAILABLE = 'available'
|
||||
ATTR_HOST = 'host'
|
||||
ATTR_USERNAME = 'username'
|
||||
ATTR_PROTOCOL = 'protocol'
|
||||
ATTR_DISCOVERY = 'discovery'
|
||||
ATTR_CONFIG = 'config'
|
||||
ATTR_SERVICES = 'services'
|
||||
ATTR_SERVICE = 'service'
|
||||
ATTR_DISCOVERY = 'discovery'
|
||||
ATTR_PROTECTED = 'protected'
|
||||
ATTR_CRYPTO = 'crypto'
|
||||
ATTR_BRANCH = 'branch'
|
||||
ATTR_KERNEL = 'kernel'
|
||||
ATTR_APPARMOR = 'apparmor'
|
||||
ATTR_DEVICETREE = 'devicetree'
|
||||
ATTR_CPE = 'cpe'
|
||||
ATTR_BOARD = 'board'
|
||||
ATTR_HASSOS = 'hassos'
|
||||
ATTR_HASSOS_CLI = 'hassos_cli'
|
||||
ATTR_VERSION_CLI = 'version_cli'
|
||||
ATTR_VERSION_CLI_LATEST = 'version_cli_latest'
|
||||
ATTR_REFRESH_TOKEN = 'refresh_token'
|
||||
ATTR_ACCESS_TOKEN = 'access_token'
|
||||
ATTR_DOCKER_API = 'docker_api'
|
||||
ATTR_FULL_ACCESS = 'full_access'
|
||||
ATTR_PROTECTED = 'protected'
|
||||
ATTR_RATING = 'rating'
|
||||
ATTR_HASSIO_ROLE = 'hassio_role'
|
||||
ATTR_SUPERVISOR = 'supervisor'
|
||||
ATTR_MACHINE = "machine"
|
||||
ATTR_WAIT_BOOT = "wait_boot"
|
||||
ATTR_DEPLOYMENT = "deployment"
|
||||
ATTR_WATCHDOG = "watchdog"
|
||||
ATTR_CHANGELOG = "changelog"
|
||||
ATTR_DATE = "date"
|
||||
ATTR_ARCH = "arch"
|
||||
ATTR_LONG_DESCRIPTION = "long_description"
|
||||
ATTR_HOSTNAME = "hostname"
|
||||
ATTR_TIMEZONE = "timezone"
|
||||
ATTR_ARGS = "args"
|
||||
ATTR_OPERATING_SYSTEM = "operating_system"
|
||||
ATTR_CHASSIS = "chassis"
|
||||
ATTR_TYPE = "type"
|
||||
ATTR_SOURCE = "source"
|
||||
ATTR_FEATURES = "features"
|
||||
ATTR_ADDONS = "addons"
|
||||
ATTR_PROVIDERS = "providers"
|
||||
ATTR_VERSION = "version"
|
||||
ATTR_VERSION_LATEST = "version_latest"
|
||||
ATTR_AUTO_UART = "auto_uart"
|
||||
ATTR_LAST_BOOT = "last_boot"
|
||||
ATTR_LAST_VERSION = "last_version"
|
||||
ATTR_CHANNEL = "channel"
|
||||
ATTR_NAME = "name"
|
||||
ATTR_SLUG = "slug"
|
||||
ATTR_DESCRIPTON = "description"
|
||||
ATTR_STARTUP = "startup"
|
||||
ATTR_BOOT = "boot"
|
||||
ATTR_PORTS = "ports"
|
||||
ATTR_PORT = "port"
|
||||
ATTR_SSL = "ssl"
|
||||
ATTR_MAP = "map"
|
||||
ATTR_WEBUI = "webui"
|
||||
ATTR_OPTIONS = "options"
|
||||
ATTR_INSTALLED = "installed"
|
||||
ATTR_DETACHED = "detached"
|
||||
ATTR_STATE = "state"
|
||||
ATTR_SCHEMA = "schema"
|
||||
ATTR_IMAGE = "image"
|
||||
ATTR_ICON = "icon"
|
||||
ATTR_LOGO = "logo"
|
||||
ATTR_STDIN = "stdin"
|
||||
ATTR_ADDONS_REPOSITORIES = "addons_repositories"
|
||||
ATTR_REPOSITORY = "repository"
|
||||
ATTR_REPOSITORIES = "repositories"
|
||||
ATTR_URL = "url"
|
||||
ATTR_MAINTAINER = "maintainer"
|
||||
ATTR_PASSWORD = "password"
|
||||
ATTR_TOTP = "totp"
|
||||
ATTR_INITIALIZE = "initialize"
|
||||
ATTR_LOCATON = "location"
|
||||
ATTR_BUILD = "build"
|
||||
ATTR_DEVICES = "devices"
|
||||
ATTR_ENVIRONMENT = "environment"
|
||||
ATTR_HOST_NETWORK = "host_network"
|
||||
ATTR_HOST_PID = "host_pid"
|
||||
ATTR_HOST_IPC = "host_ipc"
|
||||
ATTR_HOST_DBUS = "host_dbus"
|
||||
ATTR_NETWORK = "network"
|
||||
ATTR_TMPFS = "tmpfs"
|
||||
ATTR_PRIVILEGED = "privileged"
|
||||
ATTR_USER = "user"
|
||||
ATTR_SYSTEM = "system"
|
||||
ATTR_SNAPSHOTS = "snapshots"
|
||||
ATTR_HOMEASSISTANT = "homeassistant"
|
||||
ATTR_HASSIO = "hassio"
|
||||
ATTR_HASSIO_API = "hassio_api"
|
||||
ATTR_HOMEASSISTANT_API = "homeassistant_api"
|
||||
ATTR_UUID = "uuid"
|
||||
ATTR_FOLDERS = "folders"
|
||||
ATTR_SIZE = "size"
|
||||
ATTR_TYPE = "type"
|
||||
ATTR_TIMEOUT = "timeout"
|
||||
ATTR_AUTO_UPDATE = "auto_update"
|
||||
ATTR_CUSTOM = "custom"
|
||||
ATTR_AUDIO = "audio"
|
||||
ATTR_AUDIO_INPUT = "audio_input"
|
||||
ATTR_AUDIO_OUTPUT = "audio_output"
|
||||
ATTR_INPUT = "input"
|
||||
ATTR_OUTPUT = "output"
|
||||
ATTR_DISK = "disk"
|
||||
ATTR_SERIAL = "serial"
|
||||
ATTR_SECURITY = "security"
|
||||
ATTR_BUILD_FROM = "build_from"
|
||||
ATTR_SQUASH = "squash"
|
||||
ATTR_GPIO = "gpio"
|
||||
ATTR_LEGACY = "legacy"
|
||||
ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list"
|
||||
ATTR_CPU_PERCENT = "cpu_percent"
|
||||
ATTR_NETWORK_RX = "network_rx"
|
||||
ATTR_NETWORK_TX = "network_tx"
|
||||
ATTR_MEMORY_LIMIT = "memory_limit"
|
||||
ATTR_MEMORY_USAGE = "memory_usage"
|
||||
ATTR_BLK_READ = "blk_read"
|
||||
ATTR_BLK_WRITE = "blk_write"
|
||||
ATTR_ADDON = "addon"
|
||||
ATTR_AVAILABLE = "available"
|
||||
ATTR_HOST = "host"
|
||||
ATTR_USERNAME = "username"
|
||||
ATTR_PROTOCOL = "protocol"
|
||||
ATTR_DISCOVERY = "discovery"
|
||||
ATTR_CONFIG = "config"
|
||||
ATTR_SERVICES = "services"
|
||||
ATTR_SERVICE = "service"
|
||||
ATTR_DISCOVERY = "discovery"
|
||||
ATTR_PROTECTED = "protected"
|
||||
ATTR_CRYPTO = "crypto"
|
||||
ATTR_BRANCH = "branch"
|
||||
ATTR_KERNEL = "kernel"
|
||||
ATTR_APPARMOR = "apparmor"
|
||||
ATTR_DEVICETREE = "devicetree"
|
||||
ATTR_CPE = "cpe"
|
||||
ATTR_BOARD = "board"
|
||||
ATTR_HASSOS = "hassos"
|
||||
ATTR_HASSOS_CLI = "hassos_cli"
|
||||
ATTR_VERSION_CLI = "version_cli"
|
||||
ATTR_VERSION_CLI_LATEST = "version_cli_latest"
|
||||
ATTR_REFRESH_TOKEN = "refresh_token"
|
||||
ATTR_ACCESS_TOKEN = "access_token"
|
||||
ATTR_DOCKER_API = "docker_api"
|
||||
ATTR_FULL_ACCESS = "full_access"
|
||||
ATTR_PROTECTED = "protected"
|
||||
ATTR_RATING = "rating"
|
||||
ATTR_HASSIO_ROLE = "hassio_role"
|
||||
ATTR_SUPERVISOR = "supervisor"
|
||||
ATTR_AUTH_API = "auth_api"
|
||||
ATTR_KERNEL_MODULES = "kernel_modules"
|
||||
ATTR_SUPPORTED_ARCH = "supported_arch"
|
||||
|
||||
SERVICE_MQTT = 'mqtt'
|
||||
PROVIDE_SERVICE = 'provide'
|
||||
NEED_SERVICE = 'need'
|
||||
WANT_SERVICE = 'want'
|
||||
SERVICE_MQTT = "mqtt"
|
||||
PROVIDE_SERVICE = "provide"
|
||||
NEED_SERVICE = "need"
|
||||
WANT_SERVICE = "want"
|
||||
|
||||
STARTUP_INITIALIZE = 'initialize'
|
||||
STARTUP_SYSTEM = 'system'
|
||||
STARTUP_SERVICES = 'services'
|
||||
STARTUP_APPLICATION = 'application'
|
||||
STARTUP_ONCE = 'once'
|
||||
STARTUP_INITIALIZE = "initialize"
|
||||
STARTUP_SYSTEM = "system"
|
||||
STARTUP_SERVICES = "services"
|
||||
STARTUP_APPLICATION = "application"
|
||||
STARTUP_ONCE = "once"
|
||||
|
||||
BOOT_AUTO = 'auto'
|
||||
BOOT_MANUAL = 'manual'
|
||||
STARTUP_ALL = [
|
||||
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION
|
||||
]
|
||||
|
||||
STATE_STARTED = 'started'
|
||||
STATE_STOPPED = 'stopped'
|
||||
STATE_NONE = 'none'
|
||||
BOOT_AUTO = "auto"
|
||||
BOOT_MANUAL = "manual"
|
||||
|
||||
MAP_CONFIG = 'config'
|
||||
MAP_SSL = 'ssl'
|
||||
MAP_ADDONS = 'addons'
|
||||
MAP_BACKUP = 'backup'
|
||||
MAP_SHARE = 'share'
|
||||
STATE_STARTED = "started"
|
||||
STATE_STOPPED = "stopped"
|
||||
STATE_NONE = "none"
|
||||
|
||||
ARCH_ARMHF = 'armhf'
|
||||
ARCH_AARCH64 = 'aarch64'
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_I386 = 'i386'
|
||||
MAP_CONFIG = "config"
|
||||
MAP_SSL = "ssl"
|
||||
MAP_ADDONS = "addons"
|
||||
MAP_BACKUP = "backup"
|
||||
MAP_SHARE = "share"
|
||||
|
||||
CHANNEL_STABLE = 'stable'
|
||||
CHANNEL_BETA = 'beta'
|
||||
CHANNEL_DEV = 'dev'
|
||||
ARCH_ARMHF = "armhf"
|
||||
ARCH_ARMV7 = "armv7"
|
||||
ARCH_AARCH64 = "aarch64"
|
||||
ARCH_AMD64 = "amd64"
|
||||
ARCH_I386 = "i386"
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
ARCH_ALL = [ARCH_ARMHF, ARCH_ARMV7, ARCH_AARCH64, ARCH_AMD64, ARCH_I386]
|
||||
|
||||
FOLDER_HOMEASSISTANT = 'homeassistant'
|
||||
FOLDER_SHARE = 'share'
|
||||
FOLDER_ADDONS = 'addons/local'
|
||||
FOLDER_SSL = 'ssl'
|
||||
CHANNEL_STABLE = "stable"
|
||||
CHANNEL_BETA = "beta"
|
||||
CHANNEL_DEV = "dev"
|
||||
|
||||
SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
REPOSITORY_CORE = "core"
|
||||
REPOSITORY_LOCAL = "local"
|
||||
|
||||
CRYPTO_AES128 = 'aes128'
|
||||
FOLDER_HOMEASSISTANT = "homeassistant"
|
||||
FOLDER_SHARE = "share"
|
||||
FOLDER_ADDONS = "addons/local"
|
||||
FOLDER_SSL = "ssl"
|
||||
|
||||
SECURITY_PROFILE = 'profile'
|
||||
SECURITY_DEFAULT = 'default'
|
||||
SECURITY_DISABLE = 'disable'
|
||||
SNAPSHOT_FULL = "full"
|
||||
SNAPSHOT_PARTIAL = "partial"
|
||||
|
||||
PRIVILEGED_NET_ADMIN = 'NET_ADMIN'
|
||||
PRIVILEGED_SYS_ADMIN = 'SYS_ADMIN'
|
||||
PRIVILEGED_SYS_RAWIO = 'SYS_RAWIO'
|
||||
PRIVILEGED_IPC_LOCK = 'IPC_LOCK'
|
||||
PRIVILEGED_SYS_TIME = 'SYS_TIME'
|
||||
PRIVILEGED_SYS_NICE = 'SYS_NICE'
|
||||
PRIVILEGED_SYS_RESOURCE = 'SYS_RESOURCE'
|
||||
PRIVILEGED_SYS_PTRACE = 'SYS_PTRACE'
|
||||
PRIVILEGED_DAC_READ_SEARCH = 'DAC_READ_SEARCH'
|
||||
CRYPTO_AES128 = "aes128"
|
||||
|
||||
FEATURES_SHUTDOWN = 'shutdown'
|
||||
FEATURES_REBOOT = 'reboot'
|
||||
FEATURES_HASSOS = 'hassos'
|
||||
FEATURES_HOSTNAME = 'hostname'
|
||||
FEATURES_SERVICES = 'services'
|
||||
SECURITY_PROFILE = "profile"
|
||||
SECURITY_DEFAULT = "default"
|
||||
SECURITY_DISABLE = "disable"
|
||||
|
||||
ROLE_DEFAULT = 'default'
|
||||
ROLE_HOMEASSISTANT = 'homeassistant'
|
||||
ROLE_MANAGER = 'manager'
|
||||
ROLE_ADMIN = 'admin'
|
||||
PRIVILEGED_NET_ADMIN = "NET_ADMIN"
|
||||
PRIVILEGED_SYS_ADMIN = "SYS_ADMIN"
|
||||
PRIVILEGED_SYS_RAWIO = "SYS_RAWIO"
|
||||
PRIVILEGED_IPC_LOCK = "IPC_LOCK"
|
||||
PRIVILEGED_SYS_TIME = "SYS_TIME"
|
||||
PRIVILEGED_SYS_NICE = "SYS_NICE"
|
||||
PRIVILEGED_SYS_MODULE = "SYS_MODULE"
|
||||
PRIVILEGED_SYS_RESOURCE = "SYS_RESOURCE"
|
||||
PRIVILEGED_SYS_PTRACE = "SYS_PTRACE"
|
||||
PRIVILEGED_DAC_READ_SEARCH = "DAC_READ_SEARCH"
|
||||
|
||||
PRIVILEGED_ALL = [
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_IPC_LOCK,
|
||||
PRIVILEGED_SYS_TIME,
|
||||
PRIVILEGED_SYS_NICE,
|
||||
PRIVILEGED_SYS_RESOURCE,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
]
|
||||
|
||||
FEATURES_SHUTDOWN = "shutdown"
|
||||
FEATURES_REBOOT = "reboot"
|
||||
FEATURES_HASSOS = "hassos"
|
||||
FEATURES_HOSTNAME = "hostname"
|
||||
FEATURES_SERVICES = "services"
|
||||
|
||||
ROLE_DEFAULT = "default"
|
||||
ROLE_HOMEASSISTANT = "homeassistant"
|
||||
ROLE_BACKUP = "backup"
|
||||
ROLE_MANAGER = "manager"
|
||||
ROLE_ADMIN = "admin"
|
||||
|
||||
ROLE_ALL = [
|
||||
ROLE_DEFAULT,
|
||||
ROLE_HOMEASSISTANT,
|
||||
ROLE_BACKUP,
|
||||
ROLE_MANAGER,
|
||||
ROLE_ADMIN,
|
||||
]
|
||||
|
||||
CHAN_ID = "chan_id"
|
||||
CHAN_TYPE = "chan_type"
|
||||
|
@@ -6,8 +6,8 @@ import logging
|
||||
import async_timeout
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
|
||||
from .const import (STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION,
|
||||
STARTUP_INITIALIZE)
|
||||
from .exceptions import HassioError, HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -31,12 +31,15 @@ class HassIO(CoreSysAttributes):
|
||||
# Load Host
|
||||
await self.sys_host.load()
|
||||
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Home Assistant
|
||||
await self.sys_homeassistant.load()
|
||||
|
||||
# Load CPU/Arch
|
||||
await self.sys_arch.load()
|
||||
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
@@ -61,12 +64,11 @@ class HassIO(CoreSysAttributes):
|
||||
async def start(self):
|
||||
"""Start Hass.io orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on dev mode, only read new versions
|
||||
if not self.sys_dev and self.sys_supervisor.need_update:
|
||||
if await self.sys_supervisor.update():
|
||||
if self.sys_supervisor.need_update:
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
||||
elif await self.sys_supervisor.update():
|
||||
return
|
||||
else:
|
||||
_LOGGER.info("Ignore Hass.io auto updates on dev channel")
|
||||
|
||||
# start api
|
||||
await self.sys_api.start()
|
||||
|
@@ -1,287 +1,455 @@
|
||||
"""Handle core shared data."""
|
||||
from __future__ import annotations
|
||||
import asyncio
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .const import CHANNEL_DEV
|
||||
from .config import CoreConfig
|
||||
from .const import CHANNEL_DEV
|
||||
from .docker import DockerAPI
|
||||
from .misc.dns import DNSForward
|
||||
from .misc.hardware import Hardware
|
||||
from .misc.scheduler import Scheduler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .arch import CpuArch
|
||||
from .auth import Auth
|
||||
from .core import HassIO
|
||||
from .dbus import DBusManager
|
||||
from .discovery import Discovery
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
|
||||
|
||||
class CoreSys:
|
||||
"""Class that handle all shared data."""
|
||||
|
||||
def __init__(self, loop):
|
||||
def __init__(self):
|
||||
"""Initialize coresys."""
|
||||
# Static attributes
|
||||
self.exit_code = 0
|
||||
self.machine_id = None
|
||||
self.machine_id: str = None
|
||||
|
||||
# External objects
|
||||
self._loop = loop
|
||||
self._websession = aiohttp.ClientSession(loop=loop)
|
||||
self._websession_ssl = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
|
||||
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
|
||||
self._websession: aiohttp.ClientSession = aiohttp.ClientSession()
|
||||
self._websession_ssl: aiohttp.ClientSession = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(ssl=False))
|
||||
|
||||
# Global objects
|
||||
self._config = CoreConfig()
|
||||
self._hardware = Hardware()
|
||||
self._docker = DockerAPI()
|
||||
self._scheduler = Scheduler(loop=loop)
|
||||
self._dns = DNSForward(loop=loop)
|
||||
self._config: CoreConfig = CoreConfig()
|
||||
self._hardware: Hardware = Hardware()
|
||||
self._docker: DockerAPI = DockerAPI()
|
||||
self._scheduler: Scheduler = Scheduler()
|
||||
self._dns: DNSForward = DNSForward()
|
||||
|
||||
# Internal objects pointers
|
||||
self._core = None
|
||||
self._homeassistant = None
|
||||
self._supervisor = None
|
||||
self._addons = None
|
||||
self._api = None
|
||||
self._updater = None
|
||||
self._snapshots = None
|
||||
self._tasks = None
|
||||
self._host = None
|
||||
self._dbus = None
|
||||
self._hassos = None
|
||||
self._services = None
|
||||
self._discovery = None
|
||||
self._core: HassIO = None
|
||||
self._arch: CpuArch = None
|
||||
self._auth: Auth = None
|
||||
self._homeassistant: HomeAssistant = None
|
||||
self._supervisor: Supervisor = None
|
||||
self._addons: AddonManager = None
|
||||
self._api: RestAPI = None
|
||||
self._updater: Updater = None
|
||||
self._snapshots: SnapshotManager = None
|
||||
self._tasks: Tasks = None
|
||||
self._host: HostManager = None
|
||||
self._dbus: DBusManager = None
|
||||
self._hassos: HassOS = None
|
||||
self._services: ServiceManager = None
|
||||
self._discovery: Discovery = None
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return running arch of the Hass.io system."""
|
||||
if self._supervisor:
|
||||
return self._supervisor.arch
|
||||
return None
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
def machine(self) -> str:
|
||||
"""Return running machine type of the Hass.io system."""
|
||||
if self._homeassistant:
|
||||
return self._homeassistant.machine
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
def dev(self) -> str:
|
||||
"""Return True if we run dev mode."""
|
||||
return self._updater.channel == CHANNEL_DEV
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
def timezone(self) -> str:
|
||||
"""Return timezone."""
|
||||
return self._config.timezone
|
||||
|
||||
@property
|
||||
def loop(self):
|
||||
def loop(self) -> asyncio.BaseEventLoop:
|
||||
"""Return loop object."""
|
||||
return self._loop
|
||||
|
||||
@property
|
||||
def websession(self):
|
||||
def websession(self) -> aiohttp.ClientSession:
|
||||
"""Return websession object."""
|
||||
return self._websession
|
||||
|
||||
@property
|
||||
def websession_ssl(self):
|
||||
def websession_ssl(self) -> aiohttp.ClientSession:
|
||||
"""Return websession object with disabled SSL."""
|
||||
return self._websession_ssl
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
def config(self) -> CoreConfig:
|
||||
"""Return CoreConfig object."""
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def hardware(self):
|
||||
def hardware(self) -> Hardware:
|
||||
"""Return Hardware object."""
|
||||
return self._hardware
|
||||
|
||||
@property
|
||||
def docker(self):
|
||||
def docker(self) -> DockerAPI:
|
||||
"""Return DockerAPI object."""
|
||||
return self._docker
|
||||
|
||||
@property
|
||||
def scheduler(self):
|
||||
def scheduler(self) -> Scheduler:
|
||||
"""Return Scheduler object."""
|
||||
return self._scheduler
|
||||
|
||||
@property
|
||||
def dns(self):
|
||||
def dns(self) -> DNSForward:
|
||||
"""Return DNSForward object."""
|
||||
return self._dns
|
||||
|
||||
@property
|
||||
def core(self):
|
||||
def core(self) -> HassIO:
|
||||
"""Return HassIO object."""
|
||||
return self._core
|
||||
|
||||
@core.setter
|
||||
def core(self, value):
|
||||
def core(self, value: HassIO):
|
||||
"""Set a Hass.io object."""
|
||||
if self._core:
|
||||
raise RuntimeError("Hass.io already set!")
|
||||
self._core = value
|
||||
|
||||
@property
|
||||
def homeassistant(self):
|
||||
def arch(self) -> CpuArch:
|
||||
"""Return CpuArch object."""
|
||||
return self._arch
|
||||
|
||||
@arch.setter
|
||||
def arch(self, value: CpuArch):
|
||||
"""Set a CpuArch object."""
|
||||
if self._arch:
|
||||
raise RuntimeError("CpuArch already set!")
|
||||
self._arch = value
|
||||
|
||||
@property
|
||||
def auth(self) -> Auth:
|
||||
"""Return Auth object."""
|
||||
return self._auth
|
||||
|
||||
@auth.setter
|
||||
def auth(self, value: Auth):
|
||||
"""Set a Auth object."""
|
||||
if self._auth:
|
||||
raise RuntimeError("Auth already set!")
|
||||
self._auth = value
|
||||
|
||||
@property
|
||||
def homeassistant(self) -> HomeAssistant:
|
||||
"""Return Home Assistant object."""
|
||||
return self._homeassistant
|
||||
|
||||
@homeassistant.setter
|
||||
def homeassistant(self, value):
|
||||
def homeassistant(self, value: HomeAssistant):
|
||||
"""Set a HomeAssistant object."""
|
||||
if self._homeassistant:
|
||||
raise RuntimeError("Home Assistant already set!")
|
||||
self._homeassistant = value
|
||||
|
||||
@property
|
||||
def supervisor(self):
|
||||
def supervisor(self) -> Supervisor:
|
||||
"""Return Supervisor object."""
|
||||
return self._supervisor
|
||||
|
||||
@supervisor.setter
|
||||
def supervisor(self, value):
|
||||
def supervisor(self, value: Supervisor):
|
||||
"""Set a Supervisor object."""
|
||||
if self._supervisor:
|
||||
raise RuntimeError("Supervisor already set!")
|
||||
self._supervisor = value
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
def api(self) -> RestAPI:
|
||||
"""Return API object."""
|
||||
return self._api
|
||||
|
||||
@api.setter
|
||||
def api(self, value):
|
||||
def api(self, value: RestAPI):
|
||||
"""Set an API object."""
|
||||
if self._api:
|
||||
raise RuntimeError("API already set!")
|
||||
self._api = value
|
||||
|
||||
@property
|
||||
def updater(self):
|
||||
def updater(self) -> Updater:
|
||||
"""Return Updater object."""
|
||||
return self._updater
|
||||
|
||||
@updater.setter
|
||||
def updater(self, value):
|
||||
def updater(self, value: Updater):
|
||||
"""Set a Updater object."""
|
||||
if self._updater:
|
||||
raise RuntimeError("Updater already set!")
|
||||
self._updater = value
|
||||
|
||||
@property
|
||||
def addons(self):
|
||||
def addons(self) -> AddonManager:
|
||||
"""Return AddonManager object."""
|
||||
return self._addons
|
||||
|
||||
@addons.setter
|
||||
def addons(self, value):
|
||||
def addons(self, value: AddonManager):
|
||||
"""Set a AddonManager object."""
|
||||
if self._addons:
|
||||
raise RuntimeError("AddonManager already set!")
|
||||
self._addons = value
|
||||
|
||||
@property
|
||||
def snapshots(self):
|
||||
def snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
return self._snapshots
|
||||
|
||||
@snapshots.setter
|
||||
def snapshots(self, value):
|
||||
def snapshots(self, value: SnapshotManager):
|
||||
"""Set a SnapshotManager object."""
|
||||
if self._snapshots:
|
||||
raise RuntimeError("SnapshotsManager already set!")
|
||||
self._snapshots = value
|
||||
|
||||
@property
|
||||
def tasks(self):
|
||||
def tasks(self) -> Tasks:
|
||||
"""Return Tasks object."""
|
||||
return self._tasks
|
||||
|
||||
@tasks.setter
|
||||
def tasks(self, value):
|
||||
def tasks(self, value: Tasks):
|
||||
"""Set a Tasks object."""
|
||||
if self._tasks:
|
||||
raise RuntimeError("Tasks already set!")
|
||||
self._tasks = value
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
def services(self) -> ServiceManager:
|
||||
"""Return ServiceManager object."""
|
||||
return self._services
|
||||
|
||||
@services.setter
|
||||
def services(self, value):
|
||||
def services(self, value: ServiceManager):
|
||||
"""Set a ServiceManager object."""
|
||||
if self._services:
|
||||
raise RuntimeError("Services already set!")
|
||||
self._services = value
|
||||
|
||||
@property
|
||||
def discovery(self):
|
||||
def discovery(self) -> Discovery:
|
||||
"""Return ServiceManager object."""
|
||||
return self._discovery
|
||||
|
||||
@discovery.setter
|
||||
def discovery(self, value):
|
||||
def discovery(self, value: Discovery):
|
||||
"""Set a Discovery object."""
|
||||
if self._discovery:
|
||||
raise RuntimeError("Discovery already set!")
|
||||
self._discovery = value
|
||||
|
||||
@property
|
||||
def dbus(self):
|
||||
def dbus(self) -> DBusManager:
|
||||
"""Return DBusManager object."""
|
||||
return self._dbus
|
||||
|
||||
@dbus.setter
|
||||
def dbus(self, value):
|
||||
def dbus(self, value: DBusManager):
|
||||
"""Set a DBusManager object."""
|
||||
if self._dbus:
|
||||
raise RuntimeError("DBusManager already set!")
|
||||
self._dbus = value
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
def host(self) -> HostManager:
|
||||
"""Return HostManager object."""
|
||||
return self._host
|
||||
|
||||
@host.setter
|
||||
def host(self, value):
|
||||
def host(self, value: HostManager):
|
||||
"""Set a HostManager object."""
|
||||
if self._host:
|
||||
raise RuntimeError("HostManager already set!")
|
||||
self._host = value
|
||||
|
||||
@property
|
||||
def hassos(self):
|
||||
def hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
return self._hassos
|
||||
|
||||
@hassos.setter
|
||||
def hassos(self, value):
|
||||
def hassos(self, value: HassOS):
|
||||
"""Set a HassOS object."""
|
||||
if self._hassos:
|
||||
raise RuntimeError("HassOS already set!")
|
||||
self._hassos = value
|
||||
|
||||
def run_in_executor(self, funct, *args):
|
||||
"""Wrapper for executor pool."""
|
||||
return self._loop.run_in_executor(None, funct, *args)
|
||||
|
||||
def create_task(self, coroutine):
|
||||
"""Wrapper for async task."""
|
||||
return self._loop.create_task(coroutine)
|
||||
|
||||
|
||||
class CoreSysAttributes:
|
||||
"""Inheret basic CoreSysAttributes."""
|
||||
|
||||
coresys = None
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Mapping to coresys."""
|
||||
if name.startswith("sys_") and hasattr(self.coresys, name[4:]):
|
||||
return getattr(self.coresys, name[4:])
|
||||
raise AttributeError(f"Can't resolve {name} on {self}")
|
||||
@property
|
||||
def sys_machine(self) -> str:
|
||||
"""Return running machine type of the Hass.io system."""
|
||||
return self.coresys.machine
|
||||
|
||||
@property
|
||||
def sys_dev(self) -> str:
|
||||
"""Return True if we run dev mode."""
|
||||
return self.coresys.dev
|
||||
|
||||
@property
|
||||
def sys_timezone(self) -> str:
|
||||
"""Return timezone."""
|
||||
return self.coresys.timezone
|
||||
|
||||
@property
|
||||
def sys_machine_id(self) -> str:
|
||||
"""Return timezone."""
|
||||
return self.coresys.machine_id
|
||||
|
||||
@property
|
||||
def sys_loop(self) -> asyncio.BaseEventLoop:
|
||||
"""Return loop object."""
|
||||
return self.coresys.loop
|
||||
|
||||
@property
|
||||
def sys_websession(self) -> aiohttp.ClientSession:
|
||||
"""Return websession object."""
|
||||
return self.coresys.websession
|
||||
|
||||
@property
|
||||
def sys_websession_ssl(self) -> aiohttp.ClientSession:
|
||||
"""Return websession object with disabled SSL."""
|
||||
return self.coresys.websession_ssl
|
||||
|
||||
@property
|
||||
def sys_config(self) -> CoreConfig:
|
||||
"""Return CoreConfig object."""
|
||||
return self.coresys.config
|
||||
|
||||
@property
|
||||
def sys_hardware(self) -> Hardware:
|
||||
"""Return Hardware object."""
|
||||
return self.coresys.hardware
|
||||
|
||||
@property
|
||||
def sys_docker(self) -> DockerAPI:
|
||||
"""Return DockerAPI object."""
|
||||
return self.coresys.docker
|
||||
|
||||
@property
|
||||
def sys_scheduler(self) -> Scheduler:
|
||||
"""Return Scheduler object."""
|
||||
return self.coresys.scheduler
|
||||
|
||||
@property
|
||||
def sys_dns(self) -> DNSForward:
|
||||
"""Return DNSForward object."""
|
||||
return self.coresys.dns
|
||||
|
||||
@property
|
||||
def sys_core(self) -> HassIO:
|
||||
"""Return HassIO object."""
|
||||
return self.coresys.core
|
||||
|
||||
@property
|
||||
def sys_arch(self) -> CpuArch:
|
||||
"""Return CpuArch object."""
|
||||
return self.coresys.arch
|
||||
|
||||
@property
|
||||
def sys_auth(self) -> Auth:
|
||||
"""Return Auth object."""
|
||||
return self.coresys.auth
|
||||
|
||||
@property
|
||||
def sys_homeassistant(self) -> HomeAssistant:
|
||||
"""Return Home Assistant object."""
|
||||
return self.coresys.homeassistant
|
||||
|
||||
@property
|
||||
def sys_supervisor(self) -> Supervisor:
|
||||
"""Return Supervisor object."""
|
||||
return self.coresys.supervisor
|
||||
|
||||
@property
|
||||
def sys_api(self) -> RestAPI:
|
||||
"""Return API object."""
|
||||
return self.coresys.api
|
||||
|
||||
@property
|
||||
def sys_updater(self) -> Updater:
|
||||
"""Return Updater object."""
|
||||
return self.coresys.updater
|
||||
|
||||
@property
|
||||
def sys_addons(self) -> AddonManager:
|
||||
"""Return AddonManager object."""
|
||||
return self.coresys.addons
|
||||
|
||||
@property
|
||||
def sys_snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
return self.coresys.snapshots
|
||||
|
||||
@property
|
||||
def sys_tasks(self) -> Tasks:
|
||||
"""Return Tasks object."""
|
||||
return self.coresys.tasks
|
||||
|
||||
@property
|
||||
def sys_services(self) -> ServiceManager:
|
||||
"""Return ServiceManager object."""
|
||||
return self.coresys.services
|
||||
|
||||
@property
|
||||
def sys_discovery(self) -> Discovery:
|
||||
"""Return ServiceManager object."""
|
||||
return self.coresys.discovery
|
||||
|
||||
@property
|
||||
def sys_dbus(self) -> DBusManager:
|
||||
"""Return DBusManager object."""
|
||||
return self.coresys.dbus
|
||||
|
||||
@property
|
||||
def sys_host(self) -> HostManager:
|
||||
"""Return HostManager object."""
|
||||
return self.coresys.host
|
||||
|
||||
@property
|
||||
def sys_hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
return self.coresys.hassos
|
||||
|
||||
def sys_run_in_executor(self, funct, *args) -> asyncio.Future:
|
||||
"""Wrapper for executor pool."""
|
||||
return self.sys_loop.run_in_executor(None, funct, *args)
|
||||
|
||||
def sys_create_task(self, coroutine) -> asyncio.Task:
|
||||
"""Wrapper for async task."""
|
||||
return self.sys_loop.create_task(coroutine)
|
||||
|
@@ -36,6 +36,7 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
||||
discovery = Message(**message)
|
||||
messages[discovery.uuid] = discovery
|
||||
|
||||
_LOGGER.info("Load %d messages", len(messages))
|
||||
self.message_obj = messages
|
||||
|
||||
def save(self):
|
||||
@@ -60,7 +61,7 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
||||
def send(self, addon, service, config):
|
||||
"""Send a discovery message to Home Assistant."""
|
||||
try:
|
||||
DISCOVERY_SERVICES[service](config)
|
||||
config = DISCOVERY_SERVICES[service](config)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error(
|
||||
"Invalid discovery %s config", humanize_error(config, err))
|
||||
@@ -73,7 +74,7 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
||||
for old_message in self.list_messages:
|
||||
if old_message != message:
|
||||
continue
|
||||
_LOGGER.warning("Duplicate discovery message from %s", addon.slug)
|
||||
_LOGGER.info("Duplicate discovery message from %s", addon.slug)
|
||||
return old_message
|
||||
|
||||
_LOGGER.info("Send discovery to Home Assistant %s from %s",
|
||||
@@ -115,7 +116,7 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
||||
@attr.s
|
||||
class Message:
|
||||
"""Represent a single Discovery message."""
|
||||
uuid = attr.ib(factory=lambda: uuid4().hex, cmp=False, init=False)
|
||||
addon = attr.ib()
|
||||
service = attr.ib()
|
||||
config = attr.ib(cmp=False)
|
||||
uuid = attr.ib(factory=lambda: uuid4().hex, cmp=False)
|
||||
|
@@ -65,7 +65,8 @@ class DockerAPI:
|
||||
|
||||
# Create container
|
||||
try:
|
||||
container = self.docker.containers.create(image, **kwargs)
|
||||
container = self.docker.containers.create(
|
||||
image, use_config_proxy=False, **kwargs)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
return False
|
||||
@@ -101,6 +102,7 @@ class DockerAPI:
|
||||
image,
|
||||
command=command,
|
||||
network=self.network.name,
|
||||
use_config_proxy=False,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
@@ -1,16 +1,14 @@
|
||||
"""Init file for Hass.io add-on Docker object."""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (
|
||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN,
|
||||
ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..const import (MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE,
|
||||
ENV_TOKEN, ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -44,16 +42,16 @@ class DockerAddon(DockerInterface):
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of Docker image."""
|
||||
if not self.addon.legacy:
|
||||
return super().version
|
||||
return self.addon.version_installed
|
||||
if self.addon.legacy:
|
||||
return self.addon.version_installed
|
||||
return super().version
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of Docker image."""
|
||||
if not self.addon.legacy:
|
||||
return super().arch
|
||||
return self.sys_arch
|
||||
if self.addon.legacy:
|
||||
return self.sys_arch.default
|
||||
return super().arch
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -82,12 +80,14 @@ class DockerAddon(DockerInterface):
|
||||
"""Return environment for Docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
# Need audio settings
|
||||
if self.addon.with_audio:
|
||||
addon_env.update({
|
||||
'ALSA_OUTPUT': self.addon.audio_output,
|
||||
'ALSA_INPUT': self.addon.audio_input,
|
||||
})
|
||||
# Provide options for legacy add-ons
|
||||
if self.addon.legacy:
|
||||
for key, value in self.addon.options.items():
|
||||
if isinstance(value, (int, str)):
|
||||
addon_env[key] = value
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Can not set nested option %s as Docker env", key)
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
@@ -101,7 +101,7 @@ class DockerAddon(DockerInterface):
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# Use audio devices
|
||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||
if self.addon.with_audio and self.sys_hardware.support_audio:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Auto mapping UART devices
|
||||
@@ -177,8 +177,10 @@ class DockerAddon(DockerInterface):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': "/data", 'mode': 'rw'
|
||||
}}
|
||||
'bind': "/data",
|
||||
'mode': 'rw'
|
||||
}
|
||||
}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
@@ -186,43 +188,52 @@ class DockerAddon(DockerInterface):
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
'bind': "/config",
|
||||
'mode': addon_mapping[MAP_CONFIG]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
'bind': "/ssl",
|
||||
'mode': addon_mapping[MAP_SSL]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
'bind': "/addons",
|
||||
'mode': addon_mapping[MAP_ADDONS]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
'bind': "/backup",
|
||||
'mode': addon_mapping[MAP_BACKUP]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_share): {
|
||||
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
'bind': "/share",
|
||||
'mode': addon_mapping[MAP_SHARE]
|
||||
}
|
||||
})
|
||||
|
||||
# Init other hardware mappings
|
||||
|
||||
# GPIO support
|
||||
if self.addon.with_gpio:
|
||||
if self.addon.with_gpio and self.sys_hardware.support_gpio:
|
||||
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
|
||||
if not Path(gpio_path).exists():
|
||||
continue
|
||||
volumes.update({
|
||||
gpio_path: {
|
||||
'bind': gpio_path, 'mode': 'rw'
|
||||
'bind': gpio_path,
|
||||
'mode': 'rw'
|
||||
},
|
||||
})
|
||||
|
||||
@@ -230,7 +241,17 @@ class DockerAddon(DockerInterface):
|
||||
if self.addon.with_devicetree:
|
||||
volumes.update({
|
||||
"/sys/firmware/devicetree/base": {
|
||||
'bind': "/device-tree", 'mode': 'ro'
|
||||
'bind': "/device-tree",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Kernel Modules support
|
||||
if self.addon.with_kernel_modules:
|
||||
volumes.update({
|
||||
"/lib/modules": {
|
||||
'bind': "/lib/modules",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
@@ -238,7 +259,8 @@ class DockerAddon(DockerInterface):
|
||||
if not self.addon.protected and self.addon.access_docker_api:
|
||||
volumes.update({
|
||||
"/var/run/docker.sock": {
|
||||
'bind': "/var/run/docker.sock", 'mode': 'ro'
|
||||
'bind': "/var/run/docker.sock",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
@@ -246,15 +268,19 @@ class DockerAddon(DockerInterface):
|
||||
if self.addon.host_dbus:
|
||||
volumes.update({
|
||||
"/var/run/dbus": {
|
||||
'bind': "/var/run/dbus", 'mode': 'rw'
|
||||
}})
|
||||
'bind': "/var/run/dbus",
|
||||
'mode': 'rw'
|
||||
}
|
||||
})
|
||||
|
||||
# ALSA configuration
|
||||
if self.addon.with_audio:
|
||||
volumes.update({
|
||||
str(self.addon.path_extern_asound): {
|
||||
'bind': "/etc/asound.conf", 'mode': 'ro'
|
||||
}})
|
||||
'bind': "/etc/asound.conf",
|
||||
'mode': 'ro'
|
||||
}
|
||||
})
|
||||
|
||||
return volumes
|
||||
|
||||
@@ -268,8 +294,8 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
# Security check
|
||||
if not self.addon.protected:
|
||||
_LOGGER.warning(
|
||||
"%s run with disabled protected mode!", self.addon.name)
|
||||
_LOGGER.warning("%s run with disabled protected mode!",
|
||||
self.addon.name)
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
@@ -292,16 +318,15 @@ class DockerAddon(DockerInterface):
|
||||
security_opt=self.security_opt,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs
|
||||
)
|
||||
tmpfs=self.tmpfs)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start Docker add-on %s with version %s",
|
||||
self.image, self.version)
|
||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image,
|
||||
self.version)
|
||||
|
||||
return ret
|
||||
|
||||
def _install(self, tag):
|
||||
def _install(self, tag, image=None):
|
||||
"""Pull Docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
@@ -309,7 +334,7 @@ class DockerAddon(DockerInterface):
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag)
|
||||
return super()._install(tag, image)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a Docker container.
|
||||
@@ -321,7 +346,7 @@ class DockerAddon(DockerInterface):
|
||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||
try:
|
||||
image, log = self.sys_docker.images.build(
|
||||
**build_env.get_docker_args(tag))
|
||||
use_config_proxy=False, **build_env.get_docker_args(tag))
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||
image.tag(self.image, tag='latest')
|
||||
|
@@ -3,8 +3,8 @@ import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -15,7 +15,7 @@ class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS CLI image."""
|
||||
return f"homeassistant/{self.sys_arch}-hassio-cli"
|
||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
||||
|
||||
def _stop(self):
|
||||
"""Don't need stop."""
|
||||
@@ -33,5 +33,5 @@ class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info("Found HassOS CLI %s with version %s",
|
||||
self.image, self.version)
|
||||
_LOGGER.info("Found HassOS CLI %s with version %s", self.image,
|
||||
self.version)
|
||||
|
@@ -65,26 +65,28 @@ class DockerInterface(CoreSysAttributes):
|
||||
return self.lock.locked()
|
||||
|
||||
@process_lock
|
||||
def install(self, tag):
|
||||
def install(self, tag, image=None):
|
||||
"""Pull docker image."""
|
||||
return self.sys_run_in_executor(self._install, tag)
|
||||
return self.sys_run_in_executor(self._install, tag, image)
|
||||
|
||||
def _install(self, tag):
|
||||
def _install(self, tag, image=None):
|
||||
"""Pull Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||
image = self.sys_docker.images.pull(f"{self.image}:{tag}")
|
||||
image = image or self.image
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self._meta = image.attrs
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", image, tag)
|
||||
docker_image = self.sys_docker.images.pull(f"{image}:{tag}")
|
||||
|
||||
docker_image.tag(image, tag='latest')
|
||||
self._meta = docker_image.attrs
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
|
||||
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
@@ -97,8 +99,8 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
assert f"{self.image}:{self.version}" in image.tags
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
assert f"{self.image}:{self.version}" in docker_image.tags
|
||||
except (docker.errors.DockerException, AssertionError):
|
||||
return False
|
||||
|
||||
@@ -117,17 +119,17 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if container.status != 'running':
|
||||
if docker_container.status != 'running':
|
||||
return False
|
||||
|
||||
# we run on an old image, stop and start it
|
||||
if container.image.id != image.id:
|
||||
if docker_container.image.id != docker_image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -150,8 +152,8 @@ class DockerInterface(CoreSysAttributes):
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Attach to image %s with version %s", self.image, self.version)
|
||||
_LOGGER.info("Attach to image %s with version %s", self.image,
|
||||
self.version)
|
||||
|
||||
return True
|
||||
|
||||
@@ -178,18 +180,18 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
if container.status == 'running':
|
||||
if docker_container.status == 'running':
|
||||
_LOGGER.info("Stop %s Docker application", self.image)
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.stop(timeout=self.timeout)
|
||||
docker_container.stop(timeout=self.timeout)
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Clean %s Docker application", self.image)
|
||||
container.remove(force=True)
|
||||
docker_container.remove(force=True)
|
||||
|
||||
return True
|
||||
|
||||
@@ -206,8 +208,8 @@ class DockerInterface(CoreSysAttributes):
|
||||
# Cleanup container
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info(
|
||||
"Remove Docker %s with latest and %s", self.image, self.version)
|
||||
_LOGGER.info("Remove Docker %s with latest and %s", self.image,
|
||||
self.version)
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
@@ -226,20 +228,22 @@ class DockerInterface(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def update(self, tag):
|
||||
def update(self, tag, image=None):
|
||||
"""Update a Docker image."""
|
||||
return self.sys_run_in_executor(self._update, tag)
|
||||
return self.sys_run_in_executor(self._update, tag, image)
|
||||
|
||||
def _update(self, tag):
|
||||
def _update(self, tag, image=None):
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
_LOGGER.info(
|
||||
"Update Docker %s with %s:%s", self.version, self.image, tag)
|
||||
image = image or self.image
|
||||
|
||||
_LOGGER.info("Update Docker %s:%s to %s:%s", self.image, self.version,
|
||||
image, tag)
|
||||
|
||||
# Update docker image
|
||||
if not self._install(tag):
|
||||
if not self._install(tag, image):
|
||||
return False
|
||||
|
||||
# Stop container & cleanup
|
||||
@@ -261,12 +265,12 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return container.logs(tail=100, stdout=True, stderr=True)
|
||||
return docker_container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grep logs from %s: %s", self.image, err)
|
||||
|
||||
@@ -318,12 +322,12 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return None
|
||||
|
||||
try:
|
||||
stats = container.stats(stream=False)
|
||||
stats = docker_container.stats(stream=False)
|
||||
return DockerStats(stats)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||
|
@@ -3,111 +3,113 @@
|
||||
|
||||
class HassioError(Exception):
|
||||
"""Root exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HassioNotSupportedError(HassioError):
|
||||
"""Function is not supported."""
|
||||
pass
|
||||
|
||||
|
||||
# HomeAssistant
|
||||
|
||||
|
||||
class HomeAssistantError(HassioError):
|
||||
"""Home Assistant exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantUpdateError(HomeAssistantError):
|
||||
"""Error on update of a Home Assistant."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantAPIError(HomeAssistantError):
|
||||
"""Home Assistant API exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantAuthError(HomeAssistantAPIError):
|
||||
"""Home Assistant Auth API exception."""
|
||||
pass
|
||||
|
||||
|
||||
# HassOS
|
||||
|
||||
|
||||
class HassOSError(HassioError):
|
||||
"""HassOS exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HassOSUpdateError(HassOSError):
|
||||
"""Error on update of a HassOS."""
|
||||
pass
|
||||
|
||||
|
||||
class HassOSNotSupportedError(HassioNotSupportedError):
|
||||
"""Function not supported by HassOS."""
|
||||
pass
|
||||
|
||||
|
||||
# Arch
|
||||
|
||||
|
||||
class HassioArchNotFound(HassioNotSupportedError):
|
||||
"""No matches with exists arch."""
|
||||
|
||||
|
||||
# Updater
|
||||
|
||||
|
||||
class HassioUpdaterError(HassioError):
|
||||
"""Error on Updater."""
|
||||
pass
|
||||
|
||||
|
||||
# Auth
|
||||
|
||||
|
||||
class AuthError(HassioError):
|
||||
"""Auth errors."""
|
||||
|
||||
|
||||
# Host
|
||||
|
||||
|
||||
class HostError(HassioError):
|
||||
"""Internal Host error."""
|
||||
pass
|
||||
|
||||
|
||||
class HostNotSupportedError(HassioNotSupportedError):
|
||||
"""Host function is not supprted."""
|
||||
pass
|
||||
|
||||
|
||||
class HostServiceError(HostError):
|
||||
"""Host service functions fails."""
|
||||
pass
|
||||
|
||||
|
||||
class HostAppArmorError(HostError):
|
||||
"""Host apparmor functions fails."""
|
||||
pass
|
||||
|
||||
|
||||
# API
|
||||
|
||||
|
||||
class APIError(HassioError, RuntimeError):
|
||||
"""API errors."""
|
||||
pass
|
||||
|
||||
|
||||
class APIForbidden(APIError):
|
||||
"""API forbidden error."""
|
||||
pass
|
||||
|
||||
|
||||
# Service / Discovery
|
||||
|
||||
|
||||
class DiscoveryError(HassioError):
|
||||
"""Discovery Errors."""
|
||||
pass
|
||||
|
||||
|
||||
class ServicesError(HassioError):
|
||||
"""Services Errors."""
|
||||
pass
|
||||
|
||||
|
||||
# utils/gdbus
|
||||
|
||||
|
||||
class DBusError(HassioError):
|
||||
"""DBus generic error."""
|
||||
pass
|
||||
|
||||
|
||||
class DBusNotConnectedError(HostNotSupportedError):
|
||||
@@ -116,26 +118,22 @@ class DBusNotConnectedError(HostNotSupportedError):
|
||||
|
||||
class DBusFatalError(DBusError):
|
||||
"""DBus call going wrong."""
|
||||
pass
|
||||
|
||||
|
||||
class DBusParseError(DBusError):
|
||||
"""DBus parse error."""
|
||||
pass
|
||||
|
||||
|
||||
# util/apparmor
|
||||
|
||||
|
||||
class AppArmorError(HostAppArmorError):
|
||||
"""General AppArmor error."""
|
||||
pass
|
||||
|
||||
|
||||
class AppArmorFileError(AppArmorError):
|
||||
"""AppArmor profile file error."""
|
||||
pass
|
||||
|
||||
|
||||
class AppArmorInvalidError(AppArmorError):
|
||||
"""AppArmor profile validate error."""
|
||||
pass
|
||||
|
@@ -66,7 +66,7 @@ class HassOS(CoreSysAttributes):
|
||||
return self._board
|
||||
|
||||
def _check_host(self):
|
||||
"""Check if HassOS is availabe."""
|
||||
"""Check if HassOS is available."""
|
||||
if not self.available:
|
||||
_LOGGER.error("No HassOS available")
|
||||
raise HassOSNotSupportedError()
|
||||
|
@@ -13,16 +13,14 @@ import aiohttp
|
||||
from aiohttp import hdrs
|
||||
import attr
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID,
|
||||
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||
ATTR_WAIT_BOOT, ATTR_REFRESH_TOKEN, ATTR_ACCESS_TOKEN,
|
||||
HEADER_HA_ACCESS)
|
||||
from .const import (FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||
ATTR_UUID, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL,
|
||||
ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_REFRESH_TOKEN,
|
||||
ATTR_ACCESS_TOKEN, HEADER_HA_ACCESS)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .exceptions import (
|
||||
HomeAssistantUpdateError, HomeAssistantError, HomeAssistantAPIError,
|
||||
HomeAssistantAuthError)
|
||||
from .exceptions import (HomeAssistantUpdateError, HomeAssistantError,
|
||||
HomeAssistantAPIError, HomeAssistantAuthError)
|
||||
from .utils import convert_to_ascii, process_lock, create_token
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
@@ -66,6 +64,11 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Return the system machines."""
|
||||
return self.instance.machine
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of running Home Assistant."""
|
||||
return self.instance.arch
|
||||
|
||||
@property
|
||||
def error_state(self):
|
||||
"""Return True if system is in error."""
|
||||
@@ -109,9 +112,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
@property
|
||||
def api_url(self):
|
||||
"""Return API url to Home Assistant."""
|
||||
return "{}://{}:{}".format(
|
||||
'https' if self.api_ssl else 'http', self.api_ip, self.api_port
|
||||
)
|
||||
return "{}://{}:{}".format('https' if self.api_ssl else 'http',
|
||||
self.api_ip, self.api_port)
|
||||
|
||||
@property
|
||||
def watchdog(self):
|
||||
@@ -171,8 +173,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
@property
|
||||
def is_custom_image(self):
|
||||
"""Return True if a custom image is used."""
|
||||
return all(attr in self._data for attr in
|
||||
(ATTR_IMAGE, ATTR_LAST_VERSION))
|
||||
return all(
|
||||
attr in self._data for attr in (ATTR_IMAGE, ATTR_LAST_VERSION))
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
@@ -191,7 +193,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def hassio_token(self):
|
||||
"""Return a access token for the Hass.io API."""
|
||||
"""Return an access token for the Hass.io API."""
|
||||
return self._data.get(ATTR_ACCESS_TOKEN)
|
||||
|
||||
@property
|
||||
@@ -211,15 +213,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
while True:
|
||||
if await self.instance.install('landingpage'):
|
||||
break
|
||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# Run landingpage after installation
|
||||
_LOGGER.info("Start landing page")
|
||||
try:
|
||||
await self._start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.warning("Can't start landing page")
|
||||
_LOGGER.warning("Fails install landingpage, retry after 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
@process_lock
|
||||
async def install(self):
|
||||
@@ -233,8 +228,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
tag = self.last_version
|
||||
if tag and await self.instance.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on install Home Assistant. Retry in 60sec")
|
||||
await asyncio.sleep(60)
|
||||
_LOGGER.warning("Error on install Home Assistant. Retry in 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
# finishing
|
||||
_LOGGER.info("Home Assistant docker now installed")
|
||||
@@ -260,7 +255,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return HomeAssistantUpdateError()
|
||||
|
||||
# process a update
|
||||
# process an update
|
||||
async def _update(to_version):
|
||||
"""Run Home Assistant update."""
|
||||
try:
|
||||
@@ -356,8 +351,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
async def check_config(self):
|
||||
"""Run Home Assistant config check."""
|
||||
result = await self.instance.execute_command(
|
||||
"python3 -m homeassistant -c /config --script check_config"
|
||||
)
|
||||
"python3 -m homeassistant -c /config --script check_config")
|
||||
|
||||
# if not valid
|
||||
if result.exit_code is None:
|
||||
@@ -375,8 +369,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
|
||||
async def ensure_access_token(self):
|
||||
"""Ensures there is an access token."""
|
||||
if (self.access_token is not None and
|
||||
self._access_token_expires > datetime.utcnow()):
|
||||
if self.access_token is not None and self._access_token_expires > datetime.utcnow():
|
||||
return
|
||||
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
@@ -386,8 +379,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
data={
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": self.refresh_token
|
||||
}
|
||||
) as resp:
|
||||
}) as resp:
|
||||
if resp.status != 200:
|
||||
_LOGGER.error("Can't update Home Assistant access token!")
|
||||
raise HomeAssistantAuthError()
|
||||
@@ -399,8 +391,13 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
datetime.utcnow() + timedelta(seconds=tokens['expires_in'])
|
||||
|
||||
@asynccontextmanager
|
||||
async def make_request(self, method, path, json=None, content_type=None,
|
||||
data=None, timeout=30):
|
||||
async def make_request(self,
|
||||
method,
|
||||
path,
|
||||
json=None,
|
||||
content_type=None,
|
||||
data=None,
|
||||
timeout=30):
|
||||
"""Async context manager to make a request with right auth."""
|
||||
url = f"{self.api_url}/{path}"
|
||||
headers = {}
|
||||
@@ -422,8 +419,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
try:
|
||||
async with getattr(self.sys_websession_ssl, method)(
|
||||
url, data=data, timeout=timeout, json=json,
|
||||
headers=headers
|
||||
) as resp:
|
||||
headers=headers) as resp:
|
||||
# Access token expired
|
||||
if resp.status == 401 and self.refresh_token:
|
||||
self.access_token = None
|
||||
@@ -442,17 +438,17 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
async with self.make_request('get', 'api/') as resp:
|
||||
if resp.status in (200, 201):
|
||||
return True
|
||||
err = resp.status
|
||||
status = resp.status
|
||||
_LOGGER.warning("Home Assistant API config mismatch: %s", status)
|
||||
|
||||
_LOGGER.warning("Home Assistant API config mismatch: %d", err)
|
||||
return False
|
||||
|
||||
async def _block_till_run(self):
|
||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||
start_time = time.monotonic()
|
||||
migration_progress = False
|
||||
migration_file = Path(
|
||||
self.sys_config.path_homeassistant, '.migration_progress')
|
||||
migration_file = Path(self.sys_config.path_homeassistant,
|
||||
'.migration_progress')
|
||||
|
||||
def check_port():
|
||||
"""Check if port is mapped."""
|
||||
@@ -469,23 +465,20 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
return False
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(10)
|
||||
await asyncio.sleep(5)
|
||||
|
||||
# 1
|
||||
# Check if Container is is_running
|
||||
# 1: Check if Container is is_running
|
||||
if not await self.instance.is_running():
|
||||
_LOGGER.error("Home Assistant has crashed!")
|
||||
break
|
||||
|
||||
# 2
|
||||
# Check if API response
|
||||
# 2: Check if API response
|
||||
if await self.sys_run_in_executor(check_port):
|
||||
_LOGGER.info("Detect a running Home Assistant instance")
|
||||
self._error_state = False
|
||||
return
|
||||
|
||||
# 3
|
||||
# Running DB Migration
|
||||
# 3: Running DB Migration
|
||||
if migration_file.exists():
|
||||
if not migration_progress:
|
||||
migration_progress = True
|
||||
@@ -496,11 +489,9 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
start_time = time.monotonic()
|
||||
_LOGGER.info("Home Assistant record migration done")
|
||||
|
||||
# 4
|
||||
# Timeout
|
||||
# 4: Timeout
|
||||
if time.monotonic() - start_time > self.wait_boot:
|
||||
_LOGGER.warning(
|
||||
"Don't wait anymore of Home Assistant startup!")
|
||||
_LOGGER.warning("Don't wait anymore of Home Assistant startup!")
|
||||
break
|
||||
|
||||
self._error_state = True
|
||||
|
@@ -6,7 +6,8 @@ from string import Template
|
||||
|
||||
import attr
|
||||
|
||||
from ..const import ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME
|
||||
from ..const import (
|
||||
ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME, CHAN_ID, CHAN_TYPE)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -58,7 +59,9 @@ class AlsaAudio(CoreSysAttributes):
|
||||
|
||||
# Process devices
|
||||
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
|
||||
for chan_id, chan_type in dev_data[ATTR_DEVICES].items():
|
||||
for chan_info in dev_data[ATTR_DEVICES]:
|
||||
chan_id = chan_info[CHAN_ID]
|
||||
chan_type = chan_info[CHAN_TYPE]
|
||||
alsa_id = f"{dev_id},{chan_id}"
|
||||
dev_name = dev_data[ATTR_NAME]
|
||||
|
||||
|
@@ -13,9 +13,8 @@ COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
||||
class DNSForward:
|
||||
"""Manage DNS forwarding to internal DNS."""
|
||||
|
||||
def __init__(self, loop):
|
||||
def __init__(self):
|
||||
"""Initialize DNS forwarding."""
|
||||
self.loop = loop
|
||||
self.proc = None
|
||||
|
||||
async def start(self):
|
||||
@@ -25,9 +24,7 @@ class DNSForward:
|
||||
*shlex.split(COMMAND),
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
loop=self.loop
|
||||
)
|
||||
stderr=asyncio.subprocess.DEVNULL)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't start DNS forwarding: %s", err)
|
||||
else:
|
||||
|
@@ -6,7 +6,7 @@ import re
|
||||
|
||||
import pyudev
|
||||
|
||||
from ..const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||
from ..const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES, CHAN_ID, CHAN_TYPE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,6 +20,7 @@ PROC_STAT = Path("/proc/stat")
|
||||
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
||||
|
||||
GPIO_DEVICES = Path("/sys/class/gpio")
|
||||
SOC_DEVICES = Path("/sys/devices/platform/soc")
|
||||
RE_TTY = re.compile(r"tty[A-Z]+")
|
||||
|
||||
|
||||
@@ -60,6 +61,11 @@ class Hardware:
|
||||
|
||||
return dev_list
|
||||
|
||||
@property
|
||||
def support_audio(self):
|
||||
"""Return True if the system have audio support."""
|
||||
return bool(self.audio_devices)
|
||||
|
||||
@property
|
||||
def audio_devices(self):
|
||||
"""Return all available audio interfaces."""
|
||||
@@ -68,10 +74,8 @@ class Hardware:
|
||||
return {}
|
||||
|
||||
try:
|
||||
with ASOUND_CARDS.open('r') as cards_file:
|
||||
cards = cards_file.read()
|
||||
with ASOUND_DEVICES.open('r') as devices_file:
|
||||
devices = devices_file.read()
|
||||
cards = ASOUND_CARDS.read_text()
|
||||
devices = ASOUND_DEVICES.read_text()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound data: %s", err)
|
||||
return {}
|
||||
@@ -83,20 +87,27 @@ class Hardware:
|
||||
audio_list[match.group(1)] = {
|
||||
ATTR_NAME: match.group(3),
|
||||
ATTR_TYPE: match.group(2),
|
||||
ATTR_DEVICES: {},
|
||||
ATTR_DEVICES: [],
|
||||
}
|
||||
|
||||
# parse devices
|
||||
for match in RE_DEVICES.finditer(devices):
|
||||
try:
|
||||
audio_list[match.group(1)][ATTR_DEVICES][match.group(2)] = \
|
||||
match.group(3)
|
||||
audio_list[match.group(1)][ATTR_DEVICES].append({
|
||||
CHAN_ID: match.group(2),
|
||||
CHAN_TYPE: match.group(3)
|
||||
})
|
||||
except KeyError:
|
||||
_LOGGER.warning("Wrong audio device found %s", match.group(0))
|
||||
continue
|
||||
|
||||
return audio_list
|
||||
|
||||
@property
|
||||
def support_gpio(self):
|
||||
"""Return True if device support GPIOs."""
|
||||
return SOC_DEVICES.exists() and GPIO_DEVICES.exists()
|
||||
|
||||
@property
|
||||
def gpio_devices(self):
|
||||
"""Return list of GPIO interface on device."""
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Schedule for Hass.io."""
|
||||
import logging
|
||||
import asyncio
|
||||
from datetime import date, datetime, time, timedelta
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -13,9 +14,9 @@ TASK = 'task'
|
||||
class Scheduler:
|
||||
"""Schedule task inside Hass.io."""
|
||||
|
||||
def __init__(self, loop):
|
||||
def __init__(self):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self.loop = asyncio.get_running_loop()
|
||||
self._data = {}
|
||||
self.suspend = False
|
||||
|
||||
@@ -57,8 +58,8 @@ class Scheduler:
|
||||
job = self.loop.call_later(interval, self._run_task, task_id)
|
||||
elif isinstance(interval, time):
|
||||
today = datetime.combine(date.today(), interval)
|
||||
tomorrow = datetime.combine(
|
||||
date.today() + timedelta(days=1), interval)
|
||||
tomorrow = datetime.combine(date.today() + timedelta(days=1),
|
||||
interval)
|
||||
|
||||
# Check if we run it today or next day
|
||||
if today > datetime.today():
|
||||
|
@@ -118,7 +118,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
async def do_snapshot_full(self, name="", password=None):
|
||||
"""Create a full snapshot."""
|
||||
if self.lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
_LOGGER.error("A snapshot/restore process is already running")
|
||||
return None
|
||||
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
|
||||
@@ -153,7 +153,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
password=None):
|
||||
"""Create a partial snapshot."""
|
||||
if self.lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
_LOGGER.error("A snapshot/restore process is already running")
|
||||
return None
|
||||
|
||||
addons = addons or []
|
||||
@@ -201,7 +201,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
async def do_restore_full(self, snapshot, password=None):
|
||||
"""Restore a snapshot."""
|
||||
if self.lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
_LOGGER.error("A snapshot/restore process is already running")
|
||||
return False
|
||||
|
||||
if snapshot.sys_type != SNAPSHOT_FULL:
|
||||
@@ -274,7 +274,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
addons=None, folders=None, password=None):
|
||||
"""Restore a snapshot."""
|
||||
if self.lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
_LOGGER.error("A snapshot/restore process is already running")
|
||||
return False
|
||||
|
||||
if snapshot.protected and not snapshot.set_password(password):
|
||||
|
@@ -6,24 +6,43 @@ import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util import Padding
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
||||
from .utils import (
|
||||
remove_folder, password_to_key, password_for_validating, key_to_iv)
|
||||
from ..const import (
|
||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE,
|
||||
ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT, ATTR_CRYPTO,
|
||||
ATTR_LAST_VERSION, ATTR_PROTECTED, ATTR_WAIT_BOOT, ATTR_SIZE,
|
||||
ATTR_REFRESH_TOKEN, CRYPTO_AES128)
|
||||
from ..coresys import CoreSysAttributes
|
||||
ATTR_ADDONS,
|
||||
ATTR_BOOT,
|
||||
ATTR_CRYPTO,
|
||||
ATTR_DATE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SLUG,
|
||||
ATTR_SSL,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
CRYPTO_AES128,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import write_json_file
|
||||
from ..utils.tar import SecureTarFile
|
||||
from .utils import key_to_iv, password_for_validating, password_to_key, remove_folder
|
||||
from .validate import ALL_FOLDERS, SCHEMA_SNAPSHOT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,14 +50,14 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class Snapshot(CoreSysAttributes):
|
||||
"""A single Hass.io snapshot."""
|
||||
|
||||
def __init__(self, coresys, tar_file):
|
||||
def __init__(self, coresys: CoreSys, tar_file: Path):
|
||||
"""Initialize a snapshot."""
|
||||
self.coresys = coresys
|
||||
self._tarfile = tar_file
|
||||
self._data = {}
|
||||
self.coresys: CoreSys = coresys
|
||||
self._tarfile: Path = tar_file
|
||||
self._data: Dict[str, Any] = {}
|
||||
self._tmp = None
|
||||
self._key = None
|
||||
self._aes = None
|
||||
self._key: Optional[bytes] = None
|
||||
self._aes: Optional[Cipher] = None
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
@@ -130,13 +149,11 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
# Set password
|
||||
if password:
|
||||
self._key = password_to_key(password)
|
||||
self._aes = AES.new(
|
||||
self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
||||
self._init_password(password)
|
||||
self._data[ATTR_PROTECTED] = password_for_validating(password)
|
||||
self._data[ATTR_CRYPTO] = CRYPTO_AES128
|
||||
|
||||
def set_password(self, password):
|
||||
def set_password(self, password: str) -> bool:
|
||||
"""Set the password for an existing snapshot."""
|
||||
if not password:
|
||||
return False
|
||||
@@ -145,25 +162,39 @@ class Snapshot(CoreSysAttributes):
|
||||
if validating != self._data[ATTR_PROTECTED]:
|
||||
return False
|
||||
|
||||
self._key = password_to_key(password)
|
||||
self._aes = AES.new(self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
||||
self._init_password(password)
|
||||
return True
|
||||
|
||||
def _encrypt_data(self, data):
|
||||
def _init_password(self, password: str) -> None:
|
||||
"""Set password + init aes cipher."""
|
||||
self._key = password_to_key(password)
|
||||
self._aes = Cipher(
|
||||
algorithms.AES(self._key),
|
||||
modes.CBC(key_to_iv(self._key)),
|
||||
backend=default_backend(),
|
||||
)
|
||||
|
||||
def _encrypt_data(self, data: str) -> str:
|
||||
"""Make data secure."""
|
||||
if not self._key or data is None:
|
||||
return data
|
||||
|
||||
return b64encode(
|
||||
self._aes.encrypt(Padding.pad(data.encode(), 16))).decode()
|
||||
encrypt = self._aes.encryptor()
|
||||
padder = padding.PKCS7(128).padder()
|
||||
|
||||
def _decrypt_data(self, data):
|
||||
data = padder.update(data.encode()) + padder.finalize()
|
||||
return b64encode(encrypt.update(data)).decode()
|
||||
|
||||
def _decrypt_data(self, data: str) -> str:
|
||||
"""Make data readable."""
|
||||
if not self._key or data is None:
|
||||
return data
|
||||
|
||||
return Padding.unpad(
|
||||
self._aes.decrypt(b64decode(data)), 16).decode()
|
||||
decrypt = self._aes.decryptor()
|
||||
padder = padding.PKCS7(128).unpadder()
|
||||
|
||||
data = padder.update(decrypt.update(b64decode(data))) + padder.finalize()
|
||||
return data.decode()
|
||||
|
||||
async def load(self):
|
||||
"""Read snapshot.json from tar file."""
|
||||
|
@@ -2,14 +2,36 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE,
|
||||
ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, ATTR_SIZE,
|
||||
ATTR_LAST_VERSION, ATTR_WAIT_BOOT, ATTR_PROTECTED, ATTR_CRYPTO,
|
||||
ATTR_ADDONS,
|
||||
ATTR_BOOT,
|
||||
ATTR_CRYPTO,
|
||||
ATTR_DATE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL, CRYPTO_AES128)
|
||||
from ..validate import NETWORK_PORT, REPOSITORIES, DOCKER_IMAGE
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SLUG,
|
||||
ATTR_SSL,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
CRYPTO_AES128,
|
||||
FOLDER_ADDONS,
|
||||
FOLDER_HOMEASSISTANT,
|
||||
FOLDER_SHARE,
|
||||
FOLDER_SSL,
|
||||
SNAPSHOT_FULL,
|
||||
SNAPSHOT_PARTIAL,
|
||||
)
|
||||
from ..validate import DOCKER_IMAGE, NETWORK_PORT, REPOSITORIES
|
||||
|
||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||
|
||||
@@ -24,34 +46,51 @@ def unique_addons(addons_list):
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_PROTECTED, 'encrypted'):
|
||||
vol.All(vol.Coerce(str), vol.Length(min=1, max=1)),
|
||||
vol.Inclusive(ATTR_CRYPTO, 'encrypted'): CRYPTO_AES128,
|
||||
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=600):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
}, extra=vol.REMOVE_EXTRA),
|
||||
vol.Optional(ATTR_FOLDERS, default=list):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
vol.Optional(ATTR_ADDONS, default=list): vol.All([vol.Schema({
|
||||
SCHEMA_SNAPSHOT = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
||||
}, extra=vol.REMOVE_EXTRA)], unique_addons),
|
||||
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_PROTECTED, "encrypted"): vol.All(
|
||||
vol.Coerce(str), vol.Length(min=1, max=1)
|
||||
),
|
||||
vol.Inclusive(ATTR_CRYPTO, "encrypted"): CRYPTO_AES128,
|
||||
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): DOCKER_IMAGE,
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=600): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=60)
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
),
|
||||
vol.Optional(ATTR_FOLDERS, default=list): vol.All(
|
||||
[vol.In(ALL_FOLDERS)], vol.Unique()
|
||||
),
|
||||
vol.Optional(ATTR_ADDONS, default=list): vol.All(
|
||||
[
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
],
|
||||
unique_addons,
|
||||
),
|
||||
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
@@ -3,6 +3,7 @@ import asyncio
|
||||
import logging
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .exceptions import HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -104,7 +105,10 @@ class Tasks(CoreSysAttributes):
|
||||
return
|
||||
|
||||
_LOGGER.warning("Watchdog found a problem with Home Assistant Docker!")
|
||||
await self.sys_homeassistant.start()
|
||||
try:
|
||||
await self.sys_homeassistant.start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Watchdog Home Assistant reanimation fails!")
|
||||
|
||||
async def _watchdog_homeassistant_api(self):
|
||||
"""Create scheduler task for monitoring running state of API.
|
||||
@@ -136,6 +140,8 @@ class Tasks(CoreSysAttributes):
|
||||
_LOGGER.error("Watchdog found a problem with Home Assistant API!")
|
||||
try:
|
||||
await self.sys_homeassistant.restart()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Watchdog Home Assistant reanimation fails!")
|
||||
finally:
|
||||
self._cache[HASS_WATCHDOG_API] = 0
|
||||
|
||||
|
@@ -1,7 +1,8 @@
|
||||
"""Tools file for Hass.io."""
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, timezone, tzinfo
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pytz
|
||||
|
||||
@@ -14,10 +15,10 @@ _LOGGER = logging.getLogger(__name__)
|
||||
# All rights reserved.
|
||||
# https://github.com/django/django/blob/master/LICENSE
|
||||
DATETIME_RE = re.compile(
|
||||
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
|
||||
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
|
||||
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
|
||||
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
|
||||
r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
|
||||
r"[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
|
||||
r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
|
||||
r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
|
||||
)
|
||||
|
||||
|
||||
@@ -35,25 +36,25 @@ def parse_datetime(dt_str):
|
||||
match = DATETIME_RE.match(dt_str)
|
||||
if not match:
|
||||
return None
|
||||
kws = match.groupdict() # type: Dict[str, Any]
|
||||
if kws['microsecond']:
|
||||
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
|
||||
tzinfo_str = kws.pop('tzinfo')
|
||||
kws: Dict[str, Any] = match.groupdict()
|
||||
if kws["microsecond"]:
|
||||
kws["microsecond"] = kws["microsecond"].ljust(6, "0")
|
||||
tzinfo_str = kws.pop("tzinfo")
|
||||
|
||||
tzinfo = None # type: Optional[dt.tzinfo]
|
||||
if tzinfo_str == 'Z':
|
||||
tzinfo = UTC
|
||||
tzinfo_val: Optional[tzinfo] = None
|
||||
if tzinfo_str == "Z":
|
||||
tzinfo_val = UTC
|
||||
elif tzinfo_str is not None:
|
||||
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
||||
offset_hours = int(tzinfo_str[1:3])
|
||||
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
||||
if tzinfo_str[0] == '-':
|
||||
if tzinfo_str[0] == "-":
|
||||
offset = -offset
|
||||
tzinfo = timezone(offset)
|
||||
tzinfo_val = timezone(offset)
|
||||
else:
|
||||
tzinfo = None
|
||||
tzinfo_val = None
|
||||
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
||||
kws['tzinfo'] = tzinfo
|
||||
kws["tzinfo"] = tzinfo_val
|
||||
return datetime(**kws)
|
||||
|
||||
|
||||
|
@@ -1,35 +1,50 @@
|
||||
"""Tarfile fileobject handler for encrypted files."""
|
||||
import tarfile
|
||||
import hashlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from typing import IO, Optional
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Util.Padding import pad
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.primitives.ciphers import (
|
||||
CipherContext,
|
||||
Cipher,
|
||||
algorithms,
|
||||
modes,
|
||||
)
|
||||
|
||||
BLOCK_SIZE = 16
|
||||
BLOCK_SIZE_BITS = 128
|
||||
|
||||
MOD_READ = 'r'
|
||||
MOD_WRITE = 'w'
|
||||
MOD_READ = "r"
|
||||
MOD_WRITE = "w"
|
||||
|
||||
|
||||
class SecureTarFile:
|
||||
"""Handle encrypted files for tarfile library."""
|
||||
|
||||
def __init__(self, name, mode, key=None, gzip=True):
|
||||
def __init__(
|
||||
self, name: Path, mode: str, key: Optional[bytes] = None, gzip: bool = True
|
||||
) -> None:
|
||||
"""Initialize encryption handler."""
|
||||
self._file = None
|
||||
self._mode = mode
|
||||
self._name = name
|
||||
self._file: Optional[IO[bytes]] = None
|
||||
self._mode: str = mode
|
||||
self._name: Path = name
|
||||
|
||||
# Tarfile options
|
||||
self._tar = None
|
||||
self._tar_mode = f"{mode}|gz" if gzip else f"{mode}|"
|
||||
self._tar: Optional[tarfile.TarFile] = None
|
||||
self._tar_mode: str = f"{mode}|gz" if gzip else f"{mode}|"
|
||||
|
||||
# Encryption/Decription
|
||||
self._aes = None
|
||||
self._key = key
|
||||
# Encryption/Description
|
||||
self._aes: Optional[Cipher] = None
|
||||
self._key: bytes = key
|
||||
|
||||
def __enter__(self):
|
||||
# Function helper
|
||||
self._decrypt: Optional[CipherContext] = None
|
||||
self._encrypt: Optional[CipherContext] = None
|
||||
|
||||
def __enter__(self) -> tarfile.TarFile:
|
||||
"""Start context manager tarfile."""
|
||||
if not self._key:
|
||||
self._tar = tarfile.open(name=str(self._name), mode=self._tar_mode)
|
||||
@@ -42,45 +57,55 @@ class SecureTarFile:
|
||||
if self._mode == MOD_READ:
|
||||
cbc_rand = self._file.read(16)
|
||||
else:
|
||||
cbc_rand = get_random_bytes(16)
|
||||
cbc_rand = os.urandom(16)
|
||||
self._file.write(cbc_rand)
|
||||
self._aes = AES.new(
|
||||
self._key, AES.MODE_CBC, iv=_generate_iv(self._key, cbc_rand))
|
||||
|
||||
# Create Cipher
|
||||
self._aes = Cipher(
|
||||
algorithms.AES(self._key),
|
||||
modes.CBC(_generate_iv(self._key, cbc_rand)),
|
||||
backend=default_backend(),
|
||||
)
|
||||
|
||||
self._decrypt = self._aes.decryptor()
|
||||
self._encrypt = self._aes.encryptor()
|
||||
|
||||
self._tar = tarfile.open(fileobj=self, mode=self._tar_mode)
|
||||
return self._tar
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||
"""Close file."""
|
||||
if self._tar:
|
||||
self._tar.close()
|
||||
if self._file:
|
||||
self._file.close()
|
||||
|
||||
def write(self, data):
|
||||
def write(self, data: bytes) -> None:
|
||||
"""Write data."""
|
||||
if len(data) % BLOCK_SIZE != 0:
|
||||
data = pad(data, BLOCK_SIZE)
|
||||
self._file.write(self._aes.encrypt(data))
|
||||
padder = padding.PKCS7(BLOCK_SIZE_BITS).padder()
|
||||
data = padder.update(data) + padder.finalize()
|
||||
|
||||
def read(self, size=0):
|
||||
self._file.write(self._encrypt.update(data))
|
||||
|
||||
def read(self, size: int = 0) -> bytes:
|
||||
"""Read data."""
|
||||
return self._aes.decrypt(self._file.read(size))
|
||||
return self._decrypt.update(self._file.read(size))
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
def path(self) -> Path:
|
||||
"""Return path object of tarfile."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
def size(self) -> int:
|
||||
"""Return snapshot size."""
|
||||
if not self._name.is_file():
|
||||
return 0
|
||||
return round(self._name.stat().st_size / 1048576, 2) # calc mbyte
|
||||
return round(self._name.stat().st_size / 1_048_576, 2) # calc mbyte
|
||||
|
||||
|
||||
def _generate_iv(key, salt):
|
||||
def _generate_iv(key: bytes, salt: bytes) -> bytes:
|
||||
"""Generate an iv from data."""
|
||||
temp_iv = key + salt
|
||||
for _ in range(100):
|
||||
|
@@ -6,6 +6,7 @@ import voluptuous as vol
|
||||
|
||||
def schema_or(schema):
|
||||
"""Allow schema or empty."""
|
||||
|
||||
def _wrapper(value):
|
||||
"""Wrapper for validator."""
|
||||
if not value:
|
||||
@@ -22,7 +23,7 @@ def validate_timezone(timezone):
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
raise vol.Invalid(
|
||||
"Invalid time zone passed in. Valid options can be found here: "
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||
from None
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones"
|
||||
) from None
|
||||
|
||||
return timezone
|
||||
|
@@ -23,6 +23,7 @@ DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
||||
ALSA_DEVICE = vol.Maybe(vol.Match(r"\d+,\d+"))
|
||||
CHANNELS = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV])
|
||||
UUID_MATCH = vol.Match(r"^[0-9a-f]{32}$")
|
||||
SHA256 = vol.Match(r"^[0-9a-f]{64}$")
|
||||
SERVICE_ALL = vol.In([SERVICE_MQTT])
|
||||
|
||||
|
||||
@@ -74,7 +75,7 @@ DOCKER_PORTS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): vol.Match(r"^[0-9a-f]{64}$"),
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||
@@ -120,3 +121,8 @@ SCHEMA_DISCOVERY = vol.Schema([
|
||||
SCHEMA_DISCOVERY_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_DISCOVERY, default=list): schema_or(SCHEMA_DISCOVERY),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
SCHEMA_AUTH_CONFIG = vol.Schema({
|
||||
SHA256: SHA256
|
||||
})
|
||||
|
Submodule home-assistant-polymer updated: f11ca53282...b4d4591273
27
pylintrc
27
pylintrc
@@ -15,26 +15,33 @@ reports=no
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
|
||||
disable=
|
||||
locally-disabled,
|
||||
duplicate-code,
|
||||
cyclic-import,
|
||||
abstract-class-little-used,
|
||||
abstract-class-not-used,
|
||||
unused-argument,
|
||||
abstract-method,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
global-statement,
|
||||
locally-disabled,
|
||||
not-context-manager,
|
||||
redefined-variable-type,
|
||||
too-few-public-methods,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-instance-attributes,
|
||||
too-many-lines,
|
||||
too-many-locals,
|
||||
too-many-public-methods,
|
||||
too-many-return-statements,
|
||||
too-many-statements,
|
||||
too-many-lines,
|
||||
unused-argument,
|
||||
line-too-long,
|
||||
bad-continuation,
|
||||
too-few-public-methods,
|
||||
abstract-method,
|
||||
no-else-return,
|
||||
useless-return,
|
||||
not-async-context-manager
|
||||
no-self-use,
|
||||
not-async-context-manager,
|
||||
too-many-locals,
|
||||
too-many-branches,
|
||||
no-else-return
|
||||
|
||||
[EXCEPTIONS]
|
||||
overgeneral-exceptions=Exception,HomeAssistantError
|
||||
overgeneral-exceptions=Exception
|
||||
|
@@ -1,13 +1,13 @@
|
||||
attr==0.3.1
|
||||
async_timeout==3.0.0
|
||||
aiohttp==3.4.0
|
||||
docker==3.5.0
|
||||
colorlog==3.1.2
|
||||
voluptuous==0.11.5
|
||||
gitpython==2.1.10
|
||||
pytz==2018.4
|
||||
pyudev==0.21.0
|
||||
pycryptodome==3.6.6
|
||||
aiohttp==3.5.4
|
||||
async_timeout==3.0.1
|
||||
attrs==18.2.0
|
||||
cchardet==2.1.4
|
||||
colorlog==4.0.2
|
||||
cpe==1.2.1
|
||||
uvloop==0.11.2
|
||||
cchardet==2.1.1
|
||||
cryptography==2.5
|
||||
docker==3.7.0
|
||||
gitpython==2.1.11
|
||||
pytz==2018.9
|
||||
pyudev==0.21.0
|
||||
uvloop==0.11.3
|
||||
voluptuous==0.11.5
|
||||
|
5
requirements_tests.txt
Normal file
5
requirements_tests.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
flake8==3.7.5
|
||||
pylint==2.2.2
|
||||
pytest==4.1.1
|
||||
pytest-timeout==1.3.3
|
||||
pytest-aiohttp==0.3.0
|
17
setup.cfg
Normal file
17
setup.cfg
Normal file
@@ -0,0 +1,17 @@
|
||||
[isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
line_length=88
|
||||
indent = " "
|
||||
not_skip = __init__.py
|
||||
force_sort_within_sections = true
|
||||
sections = FUTURE,STDLIB,INBETWEENS,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
default_section = THIRDPARTY
|
||||
forced_separate = tests
|
||||
combine_as_imports = true
|
||||
use_parentheses = true
|
||||
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
ignore = E501
|
17
setup.py
17
setup.py
@@ -2,7 +2,6 @@ from setuptools import setup
|
||||
|
||||
from hassio.const import HASSIO_VERSION
|
||||
|
||||
|
||||
setup(
|
||||
name='HassIO',
|
||||
version=HASSIO_VERSION,
|
||||
@@ -11,9 +10,9 @@ setup(
|
||||
author_email='hello@home-assistant.io',
|
||||
url='https://home-assistant.io/',
|
||||
description=('Open-source private cloud os for Home-Assistant'
|
||||
' based on ResinOS'),
|
||||
' based on HassOS'),
|
||||
long_description=('A maintainless private cloud operator system that'
|
||||
'setup a Home-Assistant instance. Based on ResinOS'),
|
||||
'setup a Home-Assistant instance. Based on HassOS'),
|
||||
classifiers=[
|
||||
'Intended Audience :: End Users/Desktop',
|
||||
'Intended Audience :: Developers',
|
||||
@@ -30,13 +29,7 @@ setup(
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
packages=[
|
||||
'hassio',
|
||||
'hassio.docker',
|
||||
'hassio.addons',
|
||||
'hassio.api',
|
||||
'hassio.misc',
|
||||
'hassio.utils',
|
||||
'hassio.snapshots'
|
||||
'hassio', 'hassio.docker', 'hassio.addons', 'hassio.api', 'hassio.misc',
|
||||
'hassio.utils', 'hassio.snapshots'
|
||||
],
|
||||
include_package_data=True
|
||||
)
|
||||
include_package_data=True)
|
||||
|
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Hass.io Testframework."""
|
1
tests/addons/__init__.py
Normal file
1
tests/addons/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Add-ons tests."""
|
71
tests/addons/test_config.py
Normal file
71
tests/addons/test_config.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Validate Add-on configs."""
|
||||
|
||||
import voluptuous as vol
|
||||
import pytest
|
||||
|
||||
from hassio.addons import validate as vd
|
||||
|
||||
from ..common import load_json_fixture
|
||||
|
||||
|
||||
def test_basic_config():
|
||||
"""Validate basic config and check the default values."""
|
||||
config = load_json_fixture("basic-addon-config.json")
|
||||
|
||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||
|
||||
assert valid_config['name'] == "Test Add-on"
|
||||
assert valid_config['image'] == "test/{arch}-my-custom-addon"
|
||||
|
||||
# Check defaults
|
||||
assert not valid_config['host_network']
|
||||
assert not valid_config['host_ipc']
|
||||
assert not valid_config['host_dbus']
|
||||
assert not valid_config['host_pid']
|
||||
|
||||
assert not valid_config['hassio_api']
|
||||
assert not valid_config['homeassistant_api']
|
||||
assert not valid_config['docker_api']
|
||||
|
||||
|
||||
def test_invalid_repository():
|
||||
"""Validate basic config with invalid repositories."""
|
||||
config = load_json_fixture("basic-addon-config.json")
|
||||
|
||||
config['image'] = "something"
|
||||
with pytest.raises(vol.Invalid):
|
||||
vd.SCHEMA_ADDON_CONFIG(config)
|
||||
|
||||
config['image'] = "homeassistant/no-valid-repo:no-tag-allow"
|
||||
with pytest.raises(vol.Invalid):
|
||||
vd.SCHEMA_ADDON_CONFIG(config)
|
||||
|
||||
config[
|
||||
'image'] = "registry.gitlab.com/company/add-ons/test-example/text-example:no-tag-allow"
|
||||
with pytest.raises(vol.Invalid):
|
||||
vd.SCHEMA_ADDON_CONFIG(config)
|
||||
|
||||
|
||||
def test_valid_repository():
|
||||
"""Validate basic config with different valid repositories"""
|
||||
config = load_json_fixture("basic-addon-config.json")
|
||||
|
||||
custom_registry = "registry.gitlab.com/company/add-ons/core/test-example"
|
||||
config['image'] = custom_registry
|
||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||
assert valid_config['image'] == custom_registry
|
||||
|
||||
|
||||
def test_valid_map():
|
||||
"""Validate basic config with different valid maps"""
|
||||
config = load_json_fixture("basic-addon-config.json")
|
||||
|
||||
config['map'] = ['backup:rw', 'ssl:ro', 'config']
|
||||
vd.SCHEMA_ADDON_CONFIG(config)
|
||||
|
||||
|
||||
def test_valid_basic_build():
|
||||
"""Validate basic build config."""
|
||||
config = load_json_fixture("basic-build-config.json")
|
||||
|
||||
vd.SCHEMA_BUILD_CONFIG(config)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user