mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-17 13:09:22 +00:00
Compare commits
109 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f14eef62ae | ||
![]() |
ee86770570 | ||
![]() |
385a4e9f6f | ||
![]() |
142cdcffca | ||
![]() |
eb6c753514 | ||
![]() |
c3b62c80fb | ||
![]() |
f77e176a6e | ||
![]() |
3f99dec858 | ||
![]() |
81b0cf55b0 | ||
![]() |
1d5d2dc731 | ||
![]() |
04f5ee0a80 | ||
![]() |
7a02777cfb | ||
![]() |
7257c44d27 | ||
![]() |
cb15602814 | ||
![]() |
0f2c333484 | ||
![]() |
6f2cf2ef85 | ||
![]() |
70a721a47d | ||
![]() |
b32947af98 | ||
![]() |
94b44ec7fe | ||
![]() |
5c8aa71c31 | ||
![]() |
a6c424b7c8 | ||
![]() |
38e40c342d | ||
![]() |
26d390b66e | ||
![]() |
baddafa552 | ||
![]() |
f443d3052b | ||
![]() |
8fc27ff28e | ||
![]() |
3784d759f5 | ||
![]() |
61037f3852 | ||
![]() |
db8aaecdbe | ||
![]() |
15a4541595 | ||
![]() |
50ae8e2335 | ||
![]() |
279df17ba4 | ||
![]() |
f8e6362283 | ||
![]() |
0c44064926 | ||
![]() |
73c437574c | ||
![]() |
69a2182c04 | ||
![]() |
ce80e6cd32 | ||
![]() |
054def09f7 | ||
![]() |
eebe90bd14 | ||
![]() |
6ea280ce60 | ||
![]() |
e992b70f92 | ||
![]() |
0f58bb35ba | ||
![]() |
56abfb6adc | ||
![]() |
8352d61f8d | ||
![]() |
51d585f299 | ||
![]() |
d017a52922 | ||
![]() |
78ec0d1314 | ||
![]() |
c84151e9e8 | ||
![]() |
e8e599cb8c | ||
![]() |
232b9ea239 | ||
![]() |
1c49351e66 | ||
![]() |
34d1f4725d | ||
![]() |
7cd81dcc95 | ||
![]() |
1bdd3d88de | ||
![]() |
d105552fa9 | ||
![]() |
b5af35bd6c | ||
![]() |
7d46487491 | ||
![]() |
38a599011e | ||
![]() |
e59e2fc8d7 | ||
![]() |
b9ce405ada | ||
![]() |
d7df423deb | ||
![]() |
99eea99e93 | ||
![]() |
63d82ce03e | ||
![]() |
13a2c1ecd9 | ||
![]() |
627ab4ee81 | ||
![]() |
54f45539be | ||
![]() |
53297205c8 | ||
![]() |
0f09fdfcce | ||
![]() |
24db0fdb86 | ||
![]() |
7349234638 | ||
![]() |
c691f2a559 | ||
![]() |
110cd32dc3 | ||
![]() |
26d8dc0ec6 | ||
![]() |
fd41bda828 | ||
![]() |
1e3868bb70 | ||
![]() |
ece6c644cf | ||
![]() |
6a5bd5a014 | ||
![]() |
664334f1ad | ||
![]() |
e5e28747d4 | ||
![]() |
c7956d95ae | ||
![]() |
5ce6abdbb6 | ||
![]() |
fad0185c26 | ||
![]() |
86faf32709 | ||
![]() |
19f413796d | ||
![]() |
8f94b4d63f | ||
![]() |
db263f84af | ||
![]() |
747810b729 | ||
![]() |
d6768f15a1 | ||
![]() |
6c75957578 | ||
![]() |
3a8307acfe | ||
![]() |
f20c7d42ee | ||
![]() |
9419fbff94 | ||
![]() |
3ac6c03637 | ||
![]() |
a95274f1b3 | ||
![]() |
9d2fb87cec | ||
![]() |
ce9c3565b6 | ||
![]() |
b0ec58ed1b | ||
![]() |
893a5f8dd3 | ||
![]() |
98064f6a90 | ||
![]() |
5146f89354 | ||
![]() |
fb46592d48 | ||
![]() |
b4fb5ac681 | ||
![]() |
4b7201dc59 | ||
![]() |
3a5a4e4c27 | ||
![]() |
70104a9280 | ||
![]() |
efbc7b17a1 | ||
![]() |
64c5e20fc4 | ||
![]() |
13498afa97 | ||
![]() |
f6375f1bd6 |
115
API.md
115
API.md
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
Interface for Home Assistant to control things from supervisor.
|
Interface for Home Assistant to control things from supervisor.
|
||||||
|
|
||||||
On error:
|
On error / Code 400:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -13,7 +13,7 @@ On error:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
On success:
|
On success / Code 200:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -22,6 +22,8 @@ On success:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with envoriment `HASSIO_TOKEN`.
|
||||||
|
|
||||||
### Hass.io
|
### Hass.io
|
||||||
|
|
||||||
- GET `/supervisor/ping`
|
- GET `/supervisor/ping`
|
||||||
@@ -36,6 +38,7 @@ The addons from `addons` are only installed one.
|
|||||||
"arch": "armhf|aarch64|i386|amd64",
|
"arch": "armhf|aarch64|i386|amd64",
|
||||||
"beta_channel": "true|false",
|
"beta_channel": "true|false",
|
||||||
"timezone": "TIMEZONE",
|
"timezone": "TIMEZONE",
|
||||||
|
"wait_boot": "int",
|
||||||
"addons": [
|
"addons": [
|
||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
@@ -44,6 +47,7 @@ The addons from `addons` are only installed one.
|
|||||||
"repository": "12345678|null",
|
"repository": "12345678|null",
|
||||||
"version": "LAST_VERSION",
|
"version": "LAST_VERSION",
|
||||||
"installed": "INSTALL_VERSION",
|
"installed": "INSTALL_VERSION",
|
||||||
|
"icon": "bool",
|
||||||
"logo": "bool",
|
"logo": "bool",
|
||||||
"state": "started|stopped",
|
"state": "started|stopped",
|
||||||
}
|
}
|
||||||
@@ -70,6 +74,7 @@ Optional:
|
|||||||
{
|
{
|
||||||
"beta_channel": "true|false",
|
"beta_channel": "true|false",
|
||||||
"timezone": "TIMEZONE",
|
"timezone": "TIMEZONE",
|
||||||
|
"wait_boot": "int",
|
||||||
"addons_repositories": [
|
"addons_repositories": [
|
||||||
"REPO_URL"
|
"REPO_URL"
|
||||||
]
|
]
|
||||||
@@ -84,44 +89,20 @@ Reload addons/version.
|
|||||||
|
|
||||||
Output is the raw docker log.
|
Output is the raw docker log.
|
||||||
|
|
||||||
### Security
|
- GET `/supervisor/stats`
|
||||||
|
|
||||||
- GET `/security/info`
|
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"initialize": "bool",
|
"cpu_percent": 0.0,
|
||||||
"totp": "bool"
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- POST `/security/options`
|
### Snapshot
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"password": "xy"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/security/totp`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"password": "xy"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Return QR-Code
|
|
||||||
|
|
||||||
- POST `/security/session`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"password": "xy",
|
|
||||||
"totp": "null|123456"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Backup/Snapshot
|
|
||||||
|
|
||||||
- GET `/snapshots`
|
- GET `/snapshots`
|
||||||
|
|
||||||
@@ -131,7 +112,8 @@ Return QR-Code
|
|||||||
{
|
{
|
||||||
"slug": "SLUG",
|
"slug": "SLUG",
|
||||||
"date": "ISO",
|
"date": "ISO",
|
||||||
"name": "Custom name"
|
"name": "Custom name",
|
||||||
|
"type": "full|partial"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -168,10 +150,7 @@ Return QR-Code
|
|||||||
"name": "custom snapshot name / description",
|
"name": "custom snapshot name / description",
|
||||||
"date": "ISO",
|
"date": "ISO",
|
||||||
"size": "SIZE_IN_MB",
|
"size": "SIZE_IN_MB",
|
||||||
"homeassistant": {
|
"homeassistant": "version",
|
||||||
"version": "INSTALLED_HASS_VERSION",
|
|
||||||
"devices": []
|
|
||||||
},
|
|
||||||
"addons": [
|
"addons": [
|
||||||
{
|
{
|
||||||
"slug": "ADDON_SLUG",
|
"slug": "ADDON_SLUG",
|
||||||
@@ -256,6 +235,8 @@ Optional:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- POST `/host/reload`
|
||||||
|
|
||||||
### Network
|
### Network
|
||||||
|
|
||||||
- GET `/network/info`
|
- GET `/network/info`
|
||||||
@@ -282,7 +263,6 @@ Optional:
|
|||||||
{
|
{
|
||||||
"version": "INSTALL_VERSION",
|
"version": "INSTALL_VERSION",
|
||||||
"last_version": "LAST_VERSION",
|
"last_version": "LAST_VERSION",
|
||||||
"devices": [""],
|
|
||||||
"image": "str",
|
"image": "str",
|
||||||
"custom": "bool -> if custom image",
|
"custom": "bool -> if custom image",
|
||||||
"boot": "bool",
|
"boot": "bool",
|
||||||
@@ -315,7 +295,6 @@ Output is the raw Docker log.
|
|||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"devices": [],
|
|
||||||
"image": "Optional|null",
|
"image": "Optional|null",
|
||||||
"last_version": "Optional for custom image|null",
|
"last_version": "Optional for custom image|null",
|
||||||
"port": "port for access hass",
|
"port": "port for access hass",
|
||||||
@@ -331,6 +310,23 @@ Image with `null` and last_version with `null` reset this options.
|
|||||||
|
|
||||||
Proxy to real home-assistant instance.
|
Proxy to real home-assistant instance.
|
||||||
|
|
||||||
|
- GET `/homeassistant/websocket`
|
||||||
|
|
||||||
|
Proxy to real websocket instance.
|
||||||
|
|
||||||
|
- GET `/homeassistant/stats`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### RESTful for API addons
|
### RESTful for API addons
|
||||||
|
|
||||||
- GET `/addons`
|
- GET `/addons`
|
||||||
@@ -350,15 +346,9 @@ Get all available addons.
|
|||||||
"installed": "none|INSTALL_VERSION",
|
"installed": "none|INSTALL_VERSION",
|
||||||
"detached": "bool",
|
"detached": "bool",
|
||||||
"build": "bool",
|
"build": "bool",
|
||||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
|
||||||
"devices": ["/dev/xy"],
|
|
||||||
"url": "null|url",
|
"url": "null|url",
|
||||||
"logo": "bool",
|
"icon": "bool",
|
||||||
"audio": "bool",
|
"logo": "bool"
|
||||||
"gpio": "bool",
|
|
||||||
"stdin": "bool",
|
|
||||||
"hassio_api": "bool",
|
|
||||||
"homeassistant_api": "bool"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"repositories": [
|
"repositories": [
|
||||||
@@ -380,6 +370,7 @@ Get all available addons.
|
|||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
"description": "description",
|
"description": "description",
|
||||||
|
"long_description": "null|markdown",
|
||||||
"auto_update": "bool",
|
"auto_update": "bool",
|
||||||
"url": "null|url of addon",
|
"url": "null|url of addon",
|
||||||
"detached": "bool",
|
"detached": "bool",
|
||||||
@@ -392,9 +383,14 @@ Get all available addons.
|
|||||||
"options": "{}",
|
"options": "{}",
|
||||||
"network": "{}|null",
|
"network": "{}|null",
|
||||||
"host_network": "bool",
|
"host_network": "bool",
|
||||||
|
"host_ipc": "bool",
|
||||||
|
"host_dbus": "bool",
|
||||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
"devices": ["/dev/xy"],
|
"devices": ["/dev/xy"],
|
||||||
|
"auto_uart": "bool",
|
||||||
|
"icon": "bool",
|
||||||
"logo": "bool",
|
"logo": "bool",
|
||||||
|
"changelog": "bool",
|
||||||
"hassio_api": "bool",
|
"hassio_api": "bool",
|
||||||
"homeassistant_api": "bool",
|
"homeassistant_api": "bool",
|
||||||
"stdin": "bool",
|
"stdin": "bool",
|
||||||
@@ -406,8 +402,12 @@ Get all available addons.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/icon`
|
||||||
|
|
||||||
- GET `/addons/{addon}/logo`
|
- GET `/addons/{addon}/logo`
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/changelog`
|
||||||
|
|
||||||
- POST `/addons/{addon}/options`
|
- POST `/addons/{addon}/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
@@ -423,7 +423,7 @@ Get all available addons.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
For reset custom network/audio settings, set it `null`.
|
Reset custom network/audio/options, set it `null`.
|
||||||
|
|
||||||
- POST `/addons/{addon}/start`
|
- POST `/addons/{addon}/start`
|
||||||
|
|
||||||
@@ -449,6 +449,19 @@ Only supported for local build addons
|
|||||||
|
|
||||||
Write data to add-on stdin
|
Write data to add-on stdin
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/stats`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Host Control
|
## Host Control
|
||||||
|
|
||||||
Communicate over UNIX socket with a host daemon.
|
Communicate over UNIX socket with a host daemon.
|
||||||
|
28
Dockerfile
28
Dockerfile
@@ -1,21 +1,25 @@
|
|||||||
ARG BUILD_FROM
|
ARG BUILD_FROM
|
||||||
FROM $BUILD_FROM
|
FROM $BUILD_FROM
|
||||||
|
|
||||||
# add env
|
# Add env
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
|
|
||||||
# setup base
|
# Setup base
|
||||||
RUN apk add --no-cache python3 python3-dev \
|
RUN apk add --no-cache \
|
||||||
libressl libressl-dev \
|
python3 \
|
||||||
libffi libffi-dev \
|
git \
|
||||||
musl musl-dev \
|
socat \
|
||||||
gcc libstdc++ \
|
libstdc++ \
|
||||||
git socat \
|
&& apk add --no-cache --virtual .build-dependencies \
|
||||||
&& pip3 install --no-cache-dir --upgrade pip \
|
make \
|
||||||
&& pip3 install --no-cache-dir --upgrade cryptography jwcrypto \
|
python3-dev \
|
||||||
&& apk del python3-dev libressl-dev libffi-dev musl-dev gcc
|
g++ \
|
||||||
|
&& pip3 install --no-cache-dir \
|
||||||
|
uvloop \
|
||||||
|
cchardet \
|
||||||
|
&& apk del .build-dependencies
|
||||||
|
|
||||||
# install HassIO
|
# Install HassIO
|
||||||
COPY . /usr/src/hassio
|
COPY . /usr/src/hassio
|
||||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||||
&& rm -rf /usr/src/hassio
|
&& rm -rf /usr/src/hassio
|
||||||
|
10
README.md
10
README.md
@@ -1,8 +1,12 @@
|
|||||||
# Hass.io
|
# Hass.io
|
||||||
|
|
||||||
### First private cloud solution for home automation.
|
## First private cloud solution for home automation
|
||||||
|
|
||||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||||
|
and related applications. The system is controlled via Home Assistant which
|
||||||
|
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||||
|
installation. This includes changing network settings or installing
|
||||||
|
and updating software.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -11,4 +15,4 @@ Hass.io is a Docker based system for managing your Home Assistant installation a
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
|
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||||
|
@@ -10,9 +10,19 @@ import hassio.core as core
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def attempt_use_uvloop():
|
||||||
|
"""Attempt to use uvloop."""
|
||||||
|
try:
|
||||||
|
import uvloop
|
||||||
|
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
bootstrap.initialize_logging()
|
bootstrap.initialize_logging()
|
||||||
|
attempt_use_uvloop()
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
if not bootstrap.check_environment():
|
if not bootstrap.check_environment():
|
||||||
@@ -23,10 +33,10 @@ if __name__ == "__main__":
|
|||||||
loop.set_default_executor(executor)
|
loop.set_default_executor(executor)
|
||||||
|
|
||||||
_LOGGER.info("Initialize Hassio setup")
|
_LOGGER.info("Initialize Hassio setup")
|
||||||
config = bootstrap.initialize_system_data()
|
coresys = bootstrap.initialize_coresys(loop)
|
||||||
hassio = core.HassIO(loop, config)
|
hassio = core.HassIO(coresys)
|
||||||
|
|
||||||
bootstrap.migrate_system_env(config)
|
bootstrap.migrate_system_env(coresys)
|
||||||
|
|
||||||
_LOGGER.info("Setup HassIO")
|
_LOGGER.info("Setup HassIO")
|
||||||
loop.run_until_complete(hassio.setup())
|
loop.run_until_complete(hassio.setup())
|
||||||
|
@@ -6,45 +6,44 @@ from .addon import Addon
|
|||||||
from .repository import Repository
|
from .repository import Repository
|
||||||
from .data import Data
|
from .data import Data
|
||||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||||
|
|
||||||
|
|
||||||
class AddonManager(object):
|
class AddonManager(CoreSysAttributes):
|
||||||
"""Manage addons inside HassIO."""
|
"""Manage addons inside HassIO."""
|
||||||
|
|
||||||
def __init__(self, config, loop, docker):
|
def __init__(self, coresys):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.loop = loop
|
self.coresys = coresys
|
||||||
self.config = config
|
self.data = Data(coresys)
|
||||||
self.docker = docker
|
self.addons_obj = {}
|
||||||
self.data = Data(config)
|
self.repositories_obj = {}
|
||||||
self.addons = {}
|
|
||||||
self.repositories = {}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_addons(self):
|
def list_addons(self):
|
||||||
"""Return a list of all addons."""
|
"""Return a list of all addons."""
|
||||||
return list(self.addons.values())
|
return list(self.addons_obj.values())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_repositories(self):
|
def list_repositories(self):
|
||||||
"""Return list of addon repositories."""
|
"""Return list of addon repositories."""
|
||||||
return list(self.repositories.values())
|
return list(self.repositories_obj.values())
|
||||||
|
|
||||||
def get(self, addon_slug):
|
def get(self, addon_slug):
|
||||||
"""Return a adddon from slug."""
|
"""Return a adddon from slug."""
|
||||||
return self.addons.get(addon_slug)
|
return self.addons_obj.get(addon_slug)
|
||||||
|
|
||||||
async def prepare(self):
|
async def load(self):
|
||||||
"""Startup addon management."""
|
"""Startup addon management."""
|
||||||
self.data.reload()
|
self.data.reload()
|
||||||
|
|
||||||
# init hassio built-in repositories
|
# init hassio built-in repositories
|
||||||
repositories = \
|
repositories = \
|
||||||
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
|
set(self._config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||||
|
|
||||||
# init custom repositories & load addons
|
# init custom repositories & load addons
|
||||||
await self.load_repositories(repositories)
|
await self.load_repositories(repositories)
|
||||||
@@ -52,9 +51,9 @@ class AddonManager(object):
|
|||||||
async def reload(self):
|
async def reload(self):
|
||||||
"""Update addons from repo and reload list."""
|
"""Update addons from repo and reload list."""
|
||||||
tasks = [repository.update() for repository in
|
tasks = [repository.update() for repository in
|
||||||
self.repositories.values()]
|
self.repositories_obj.values()]
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# read data from repositories
|
# read data from repositories
|
||||||
self.data.reload()
|
self.data.reload()
|
||||||
@@ -65,29 +64,29 @@ class AddonManager(object):
|
|||||||
async def load_repositories(self, list_repositories):
|
async def load_repositories(self, list_repositories):
|
||||||
"""Add a new custom repository."""
|
"""Add a new custom repository."""
|
||||||
new_rep = set(list_repositories)
|
new_rep = set(list_repositories)
|
||||||
old_rep = set(self.repositories)
|
old_rep = set(self.repositories_obj)
|
||||||
|
|
||||||
# add new repository
|
# add new repository
|
||||||
async def _add_repository(url):
|
async def _add_repository(url):
|
||||||
"""Helper function to async add repository."""
|
"""Helper function to async add repository."""
|
||||||
repository = Repository(self.config, self.loop, self.data, url)
|
repository = Repository(self.coresys, url)
|
||||||
if not await repository.load():
|
if not await repository.load():
|
||||||
_LOGGER.error("Can't load from repository %s", url)
|
_LOGGER.error("Can't load from repository %s", url)
|
||||||
return
|
return
|
||||||
self.repositories[url] = repository
|
self.repositories_obj[url] = repository
|
||||||
|
|
||||||
# don't add built-in repository to config
|
# don't add built-in repository to config
|
||||||
if url not in BUILTIN_REPOSITORIES:
|
if url not in BUILTIN_REPOSITORIES:
|
||||||
self.config.add_addon_repository(url)
|
self._config.add_addon_repository(url)
|
||||||
|
|
||||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# del new repository
|
# del new repository
|
||||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||||
self.repositories.pop(url).remove()
|
self.repositories_obj.pop(url).remove()
|
||||||
self.config.drop_addon_repository(url)
|
self._config.drop_addon_repository(url)
|
||||||
|
|
||||||
# update data
|
# update data
|
||||||
self.data.reload()
|
self.data.reload()
|
||||||
@@ -98,8 +97,8 @@ class AddonManager(object):
|
|||||||
all_addons = set(self.data.system) | set(self.data.cache)
|
all_addons = set(self.data.system) | set(self.data.cache)
|
||||||
|
|
||||||
# calc diff
|
# calc diff
|
||||||
add_addons = all_addons - set(self.addons)
|
add_addons = all_addons - set(self.addons_obj)
|
||||||
del_addons = set(self.addons) - all_addons
|
del_addons = set(self.addons_obj) - all_addons
|
||||||
|
|
||||||
_LOGGER.info("Load addons: %d all - %d new - %d remove",
|
_LOGGER.info("Load addons: %d all - %d new - %d remove",
|
||||||
len(all_addons), len(add_addons), len(del_addons))
|
len(all_addons), len(add_addons), len(del_addons))
|
||||||
@@ -107,27 +106,27 @@ class AddonManager(object):
|
|||||||
# new addons
|
# new addons
|
||||||
tasks = []
|
tasks = []
|
||||||
for addon_slug in add_addons:
|
for addon_slug in add_addons:
|
||||||
addon = Addon(
|
addon = Addon(self.coresys, addon_slug)
|
||||||
self.config, self.loop, self.docker, self.data, addon_slug)
|
|
||||||
|
|
||||||
tasks.append(addon.load())
|
tasks.append(addon.load())
|
||||||
self.addons[addon_slug] = addon
|
self.addons_obj[addon_slug] = addon
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# remove
|
# remove
|
||||||
for addon_slug in del_addons:
|
for addon_slug in del_addons:
|
||||||
self.addons.pop(addon_slug)
|
self.addons_obj.pop(addon_slug)
|
||||||
|
|
||||||
async def auto_boot(self, stage):
|
async def auto_boot(self, stage):
|
||||||
"""Boot addons with mode auto."""
|
"""Boot addons with mode auto."""
|
||||||
tasks = []
|
tasks = []
|
||||||
for addon in self.addons.values():
|
for addon in self.addons_obj.values():
|
||||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||||
addon.startup == stage:
|
addon.startup == stage:
|
||||||
tasks.append(addon.start())
|
tasks.append(addon.start())
|
||||||
|
|
||||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
await asyncio.sleep(self._config.wait_boot, loop=self._loop)
|
||||||
|
@@ -13,18 +13,20 @@ from voluptuous.humanize import humanize_error
|
|||||||
|
|
||||||
from .validate import (
|
from .validate import (
|
||||||
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME)
|
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME)
|
||||||
|
from .utils import check_installed
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_UUID,
|
||||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY)
|
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
||||||
from .util import check_installed
|
ATTR_HOST_DBUS, ATTR_AUTO_UART)
|
||||||
from ..dock.addon import DockerAddon
|
from ..coresys import CoreSysAttributes
|
||||||
from ..tools import write_json_file, read_json_file
|
from ..docker.addon import DockerAddon
|
||||||
|
from ..utils.json import write_json_file, read_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -33,22 +35,20 @@ RE_WEBUI = re.compile(
|
|||||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
|
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
|
||||||
|
|
||||||
|
|
||||||
class Addon(object):
|
class Addon(CoreSysAttributes):
|
||||||
"""Hold data for addon inside HassIO."""
|
"""Hold data for addon inside HassIO."""
|
||||||
|
|
||||||
def __init__(self, config, loop, docker, data, slug):
|
def __init__(self, coresys, slug):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
self.loop = loop
|
self.coresys = coresys
|
||||||
self.config = config
|
self.instance = DockerAddon(coresys, slug)
|
||||||
self.data = data
|
|
||||||
self._id = slug
|
|
||||||
|
|
||||||
self.docker = DockerAddon(config, loop, docker, self)
|
self._id = slug
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Async initialize of object."""
|
"""Async initialize of object."""
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
await self.docker.attach()
|
await self.instance.attach()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def slug(self):
|
def slug(self):
|
||||||
@@ -58,90 +58,96 @@ class Addon(object):
|
|||||||
@property
|
@property
|
||||||
def _mesh(self):
|
def _mesh(self):
|
||||||
"""Return addon data from system or cache."""
|
"""Return addon data from system or cache."""
|
||||||
return self.data.system.get(self._id, self.data.cache.get(self._id))
|
return self._data.system.get(self._id, self._data.cache.get(self._id))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _data(self):
|
||||||
|
"""Return addons data storage."""
|
||||||
|
return self._addons.data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_installed(self):
|
def is_installed(self):
|
||||||
"""Return True if a addon is installed."""
|
"""Return True if a addon is installed."""
|
||||||
return self._id in self.data.system
|
return self._id in self._data.system
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_detached(self):
|
def is_detached(self):
|
||||||
"""Return True if addon is detached."""
|
"""Return True if addon is detached."""
|
||||||
return self._id not in self.data.cache
|
return self._id not in self._data.cache
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_installed(self):
|
def version_installed(self):
|
||||||
"""Return installed version."""
|
"""Return installed version."""
|
||||||
return self.data.user.get(self._id, {}).get(ATTR_VERSION)
|
return self._data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||||
|
|
||||||
def _set_install(self, version):
|
def _set_install(self, version):
|
||||||
"""Set addon as installed."""
|
"""Set addon as installed."""
|
||||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||||
self.data.user[self._id] = {
|
self._data.user[self._id] = {
|
||||||
ATTR_OPTIONS: {},
|
ATTR_OPTIONS: {},
|
||||||
ATTR_VERSION: version,
|
ATTR_VERSION: version,
|
||||||
}
|
}
|
||||||
self.data.save()
|
self._data.save_data()
|
||||||
|
|
||||||
def _set_uninstall(self):
|
def _set_uninstall(self):
|
||||||
"""Set addon as uninstalled."""
|
"""Set addon as uninstalled."""
|
||||||
self.data.system.pop(self._id, None)
|
self._data.system.pop(self._id, None)
|
||||||
self.data.user.pop(self._id, None)
|
self._data.user.pop(self._id, None)
|
||||||
self.data.save()
|
self._data.save_data()
|
||||||
|
|
||||||
def _set_update(self, version):
|
def _set_update(self, version):
|
||||||
"""Update version of addon."""
|
"""Update version of addon."""
|
||||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||||
self.data.user[self._id][ATTR_VERSION] = version
|
self._data.user[self._id][ATTR_VERSION] = version
|
||||||
self.data.save()
|
self._data.save_data()
|
||||||
|
|
||||||
def _restore_data(self, user, system):
|
def _restore_data(self, user, system):
|
||||||
"""Restore data to addon."""
|
"""Restore data to addon."""
|
||||||
self.data.user[self._id] = deepcopy(user)
|
self._data.user[self._id] = deepcopy(user)
|
||||||
self.data.system[self._id] = deepcopy(system)
|
self._data.system[self._id] = deepcopy(system)
|
||||||
self.data.save()
|
self._data.save_data()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def options(self):
|
def options(self):
|
||||||
"""Return options with local changes."""
|
"""Return options with local changes."""
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
return {
|
return {
|
||||||
**self.data.system[self._id][ATTR_OPTIONS],
|
**self._data.system[self._id][ATTR_OPTIONS],
|
||||||
**self.data.user[self._id][ATTR_OPTIONS]
|
**self._data.user[self._id][ATTR_OPTIONS]
|
||||||
}
|
}
|
||||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
return self._data.cache[self._id][ATTR_OPTIONS]
|
||||||
|
|
||||||
@options.setter
|
@options.setter
|
||||||
def options(self, value):
|
def options(self, value):
|
||||||
"""Store user addon options."""
|
"""Store user addon options."""
|
||||||
self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
if value is None:
|
||||||
self.data.save()
|
self._data.user[self._id][ATTR_OPTIONS] = {}
|
||||||
|
else:
|
||||||
|
self._data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self):
|
def boot(self):
|
||||||
"""Return boot config with prio local settings."""
|
"""Return boot config with prio local settings."""
|
||||||
if ATTR_BOOT in self.data.user.get(self._id, {}):
|
if ATTR_BOOT in self._data.user.get(self._id, {}):
|
||||||
return self.data.user[self._id][ATTR_BOOT]
|
return self._data.user[self._id][ATTR_BOOT]
|
||||||
return self._mesh[ATTR_BOOT]
|
return self._mesh[ATTR_BOOT]
|
||||||
|
|
||||||
@boot.setter
|
@boot.setter
|
||||||
def boot(self, value):
|
def boot(self, value):
|
||||||
"""Store user boot options."""
|
"""Store user boot options."""
|
||||||
self.data.user[self._id][ATTR_BOOT] = value
|
self._data.user[self._id][ATTR_BOOT] = value
|
||||||
self.data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auto_update(self):
|
def auto_update(self):
|
||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
|
if ATTR_AUTO_UPDATE in self._data.user.get(self._id, {}):
|
||||||
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
return self._data.user[self._id][ATTR_AUTO_UPDATE]
|
||||||
|
return None
|
||||||
|
|
||||||
@auto_update.setter
|
@auto_update.setter
|
||||||
def auto_update(self, value):
|
def auto_update(self, value):
|
||||||
"""Set auto update."""
|
"""Set auto update."""
|
||||||
self.data.user[self._id][ATTR_AUTO_UPDATE] = value
|
self._data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||||
self.data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@@ -153,11 +159,31 @@ class Addon(object):
|
|||||||
"""Return timeout of addon for docker stop."""
|
"""Return timeout of addon for docker stop."""
|
||||||
return self._mesh[ATTR_TIMEOUT]
|
return self._mesh[ATTR_TIMEOUT]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uuid(self):
|
||||||
|
"""Return a API token for this add-on."""
|
||||||
|
if self.is_installed:
|
||||||
|
return self._data.user[self._id][ATTR_UUID]
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def description(self):
|
def description(self):
|
||||||
"""Return description of addon."""
|
"""Return description of addon."""
|
||||||
return self._mesh[ATTR_DESCRIPTON]
|
return self._mesh[ATTR_DESCRIPTON]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def long_description(self):
|
||||||
|
"""Return README.md as long_description."""
|
||||||
|
readme = Path(self.path_location, 'README.md')
|
||||||
|
|
||||||
|
# If readme not exists
|
||||||
|
if not readme.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Return data
|
||||||
|
with readme.open('r') as readme_file:
|
||||||
|
return readme_file.read()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def repository(self):
|
def repository(self):
|
||||||
"""Return repository of addon."""
|
"""Return repository of addon."""
|
||||||
@@ -166,8 +192,8 @@ class Addon(object):
|
|||||||
@property
|
@property
|
||||||
def last_version(self):
|
def last_version(self):
|
||||||
"""Return version of addon."""
|
"""Return version of addon."""
|
||||||
if self._id in self.data.cache:
|
if self._id in self._data.cache:
|
||||||
return self.data.cache[self._id][ATTR_VERSION]
|
return self._data.cache[self._id][ATTR_VERSION]
|
||||||
return self.version_installed
|
return self.version_installed
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -182,24 +208,22 @@ class Addon(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if not self.is_installed or \
|
if not self.is_installed or \
|
||||||
ATTR_NETWORK not in self.data.user[self._id]:
|
ATTR_NETWORK not in self._data.user[self._id]:
|
||||||
return self._mesh[ATTR_PORTS]
|
return self._mesh[ATTR_PORTS]
|
||||||
return self.data.user[self._id][ATTR_NETWORK]
|
return self._data.user[self._id][ATTR_NETWORK]
|
||||||
|
|
||||||
@ports.setter
|
@ports.setter
|
||||||
def ports(self, value):
|
def ports(self, value):
|
||||||
"""Set custom ports of addon."""
|
"""Set custom ports of addon."""
|
||||||
if value is None:
|
if value is None:
|
||||||
self.data.user[self._id].pop(ATTR_NETWORK, None)
|
self._data.user[self._id].pop(ATTR_NETWORK, None)
|
||||||
else:
|
else:
|
||||||
new_ports = {}
|
new_ports = {}
|
||||||
for container_port, host_port in value.items():
|
for container_port, host_port in value.items():
|
||||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||||
new_ports[container_port] = host_port
|
new_ports[container_port] = host_port
|
||||||
|
|
||||||
self.data.user[self._id][ATTR_NETWORK] = new_ports
|
self._data.user[self._id][ATTR_NETWORK] = new_ports
|
||||||
|
|
||||||
self.data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def webui(self):
|
def webui(self):
|
||||||
@@ -218,7 +242,7 @@ class Addon(object):
|
|||||||
if self.ports is None:
|
if self.ports is None:
|
||||||
port = t_port
|
port = t_port
|
||||||
else:
|
else:
|
||||||
port = self.ports.get("{}/tcp".format(t_port), t_port)
|
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||||
|
|
||||||
# for interface config or port lists
|
# for interface config or port lists
|
||||||
if isinstance(port, (tuple, list)):
|
if isinstance(port, (tuple, list)):
|
||||||
@@ -230,18 +254,33 @@ class Addon(object):
|
|||||||
else:
|
else:
|
||||||
proto = s_prefix
|
proto = s_prefix
|
||||||
|
|
||||||
return "{}://[HOST]:{}{}".format(proto, port, s_suffix)
|
return f"{proto}://[HOST]:{port}{s_suffix}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host_network(self):
|
def host_network(self):
|
||||||
"""Return True if addon run on host network."""
|
"""Return True if addon run on host network."""
|
||||||
return self._mesh[ATTR_HOST_NETWORK]
|
return self._mesh[ATTR_HOST_NETWORK]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_ipc(self):
|
||||||
|
"""Return True if addon run on host IPC namespace."""
|
||||||
|
return self._mesh[ATTR_HOST_IPC]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_dbus(self):
|
||||||
|
"""Return True if addon run on host DBUS."""
|
||||||
|
return self._mesh[ATTR_HOST_DBUS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def devices(self):
|
def devices(self):
|
||||||
"""Return devices of addon."""
|
"""Return devices of addon."""
|
||||||
return self._mesh.get(ATTR_DEVICES)
|
return self._mesh.get(ATTR_DEVICES)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto_uart(self):
|
||||||
|
"""Return True if we should map all uart device."""
|
||||||
|
return self._mesh.get(ATTR_AUTO_UART)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tmpfs(self):
|
def tmpfs(self):
|
||||||
"""Return tmpfs of addon."""
|
"""Return tmpfs of addon."""
|
||||||
@@ -293,50 +332,59 @@ class Addon(object):
|
|||||||
if not self.with_audio:
|
if not self.with_audio:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
setting = self.config.audio_output
|
setting = self._config.audio_output
|
||||||
if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]:
|
if self.is_installed and \
|
||||||
setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT]
|
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
|
||||||
|
setting = self._data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||||
return setting
|
return setting
|
||||||
|
|
||||||
@audio_output.setter
|
@audio_output.setter
|
||||||
def audio_output(self, value):
|
def audio_output(self, value):
|
||||||
"""Set/remove custom audio output settings."""
|
"""Set/remove custom audio output settings."""
|
||||||
if value is None:
|
if value is None:
|
||||||
self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||||
else:
|
else:
|
||||||
self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
self._data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||||
self.data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def audio_input(self):
|
def audio_input(self):
|
||||||
"""Return ALSA config for input or None."""
|
"""Return ALSA config for input or None."""
|
||||||
if not self.with_audio:
|
if not self.with_audio:
|
||||||
return
|
return None
|
||||||
|
|
||||||
setting = self.config.audio_input
|
setting = self._config.audio_input
|
||||||
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
|
||||||
setting = self.data.user[self._id][ATTR_AUDIO_INPUT]
|
setting = self._data.user[self._id][ATTR_AUDIO_INPUT]
|
||||||
return setting
|
return setting
|
||||||
|
|
||||||
@audio_input.setter
|
@audio_input.setter
|
||||||
def audio_input(self, value):
|
def audio_input(self, value):
|
||||||
"""Set/remove custom audio input settings."""
|
"""Set/remove custom audio input settings."""
|
||||||
if value is None:
|
if value is None:
|
||||||
self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||||
else:
|
else:
|
||||||
self.data.user[self._id][ATTR_AUDIO_INPUT] = value
|
self._data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||||
self.data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
"""Return url of addon."""
|
"""Return url of addon."""
|
||||||
return self._mesh.get(ATTR_URL)
|
return self._mesh.get(ATTR_URL)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_icon(self):
|
||||||
|
"""Return True if a icon exists."""
|
||||||
|
return self.path_icon.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_logo(self):
|
def with_logo(self):
|
||||||
"""Return True if a logo exists."""
|
"""Return True if a logo exists."""
|
||||||
return self.path_logo.exists()
|
return self.path_logo.exists()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_changelog(self):
|
||||||
|
"""Return True if a changelog exists."""
|
||||||
|
return self.path_changelog.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_arch(self):
|
def supported_arch(self):
|
||||||
"""Return list of supported arch."""
|
"""Return list of supported arch."""
|
||||||
@@ -349,11 +397,11 @@ class Addon(object):
|
|||||||
|
|
||||||
# Repository with dockerhub images
|
# Repository with dockerhub images
|
||||||
if ATTR_IMAGE in addon_data:
|
if ATTR_IMAGE in addon_data:
|
||||||
return addon_data[ATTR_IMAGE].format(arch=self.config.arch)
|
return addon_data[ATTR_IMAGE].format(arch=self._arch)
|
||||||
|
|
||||||
# local build
|
# local build
|
||||||
return "{}/{}-addon-{}".format(
|
return "{}/{}-addon-{}".format(
|
||||||
addon_data[ATTR_REPOSITORY], self.config.arch,
|
addon_data[ATTR_REPOSITORY], self._arch,
|
||||||
addon_data[ATTR_SLUG])
|
addon_data[ATTR_SLUG])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -374,12 +422,12 @@ class Addon(object):
|
|||||||
@property
|
@property
|
||||||
def path_data(self):
|
def path_data(self):
|
||||||
"""Return addon data path inside supervisor."""
|
"""Return addon data path inside supervisor."""
|
||||||
return Path(self.config.path_addons_data, self._id)
|
return Path(self._config.path_addons_data, self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_extern_data(self):
|
def path_extern_data(self):
|
||||||
"""Return addon data path external for docker."""
|
"""Return addon data path external for docker."""
|
||||||
return PurePath(self.config.path_extern_addons_data, self._id)
|
return PurePath(self._config.path_extern_addons_data, self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_options(self):
|
def path_options(self):
|
||||||
@@ -391,11 +439,25 @@ class Addon(object):
|
|||||||
"""Return path to this addon."""
|
"""Return path to this addon."""
|
||||||
return Path(self._mesh[ATTR_LOCATON])
|
return Path(self._mesh[ATTR_LOCATON])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_icon(self):
|
||||||
|
"""Return path to addon icon."""
|
||||||
|
return Path(self.path_location, 'icon.png')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_logo(self):
|
def path_logo(self):
|
||||||
"""Return path to addon logo."""
|
"""Return path to addon logo."""
|
||||||
return Path(self.path_location, 'logo.png')
|
return Path(self.path_location, 'logo.png')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_changelog(self):
|
||||||
|
"""Return path to addon changelog."""
|
||||||
|
return Path(self.path_location, 'CHANGELOG.md')
|
||||||
|
|
||||||
|
def save_data(self):
|
||||||
|
"""Save data of addon."""
|
||||||
|
self._addons.data.save_data()
|
||||||
|
|
||||||
def write_options(self):
|
def write_options(self):
|
||||||
"""Return True if addon options is written to data."""
|
"""Return True if addon options is written to data."""
|
||||||
schema = self.schema
|
schema = self.schema
|
||||||
@@ -403,10 +465,14 @@ class Addon(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
schema(options)
|
schema(options)
|
||||||
return write_json_file(self.path_options, options)
|
write_json_file(self.path_options, options)
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
|
_LOGGER.error("Addon %s have wrong options: %s", self._id,
|
||||||
humanize_error(options, ex))
|
humanize_error(options, ex))
|
||||||
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
|
_LOGGER.error("Addon %s can't write options: %s", self._id, err)
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -425,8 +491,8 @@ class Addon(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# load next schema
|
# load next schema
|
||||||
new_raw_schema = self.data.cache[self._id][ATTR_SCHEMA]
|
new_raw_schema = self._data.cache[self._id][ATTR_SCHEMA]
|
||||||
default_options = self.data.cache[self._id][ATTR_OPTIONS]
|
default_options = self._data.cache[self._id][ATTR_OPTIONS]
|
||||||
|
|
||||||
# if disabled
|
# if disabled
|
||||||
if isinstance(new_raw_schema, bool):
|
if isinstance(new_raw_schema, bool):
|
||||||
@@ -434,7 +500,7 @@ class Addon(object):
|
|||||||
|
|
||||||
# merge options
|
# merge options
|
||||||
options = {
|
options = {
|
||||||
**self.data.user[self._id][ATTR_OPTIONS],
|
**self._data.user[self._id][ATTR_OPTIONS],
|
||||||
**default_options,
|
**default_options,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -451,9 +517,9 @@ class Addon(object):
|
|||||||
|
|
||||||
async def install(self):
|
async def install(self):
|
||||||
"""Install a addon."""
|
"""Install a addon."""
|
||||||
if self.config.arch not in self.supported_arch:
|
if self._arch not in self.supported_arch:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Addon %s not supported on %s", self._id, self.config.arch)
|
"Addon %s not supported on %s", self._id, self._arch)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
@@ -465,7 +531,7 @@ class Addon(object):
|
|||||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||||
self.path_data.mkdir()
|
self.path_data.mkdir()
|
||||||
|
|
||||||
if not await self.docker.install(self.last_version):
|
if not await self.instance.install(self.last_version):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._set_install(self.last_version)
|
self._set_install(self.last_version)
|
||||||
@@ -474,7 +540,7 @@ class Addon(object):
|
|||||||
@check_installed
|
@check_installed
|
||||||
async def uninstall(self):
|
async def uninstall(self):
|
||||||
"""Remove a addon."""
|
"""Remove a addon."""
|
||||||
if not await self.docker.remove():
|
if not await self.instance.remove():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.path_data.is_dir():
|
if self.path_data.is_dir():
|
||||||
@@ -490,7 +556,7 @@ class Addon(object):
|
|||||||
if not self.is_installed:
|
if not self.is_installed:
|
||||||
return STATE_NONE
|
return STATE_NONE
|
||||||
|
|
||||||
if await self.docker.is_running():
|
if await self.instance.is_running():
|
||||||
return STATE_STARTED
|
return STATE_STARTED
|
||||||
return STATE_STOPPED
|
return STATE_STOPPED
|
||||||
|
|
||||||
@@ -500,7 +566,7 @@ class Addon(object):
|
|||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.run()
|
return self.instance.run()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
def stop(self):
|
def stop(self):
|
||||||
@@ -508,7 +574,7 @@ class Addon(object):
|
|||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.stop()
|
return self.instance.stop()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def update(self):
|
async def update(self):
|
||||||
@@ -516,17 +582,16 @@ class Addon(object):
|
|||||||
last_state = await self.state()
|
last_state = await self.state()
|
||||||
|
|
||||||
if self.last_version == self.version_installed:
|
if self.last_version == self.version_installed:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning("No update available for Addon %s", self._id)
|
||||||
"No update available for Addon %s", self._id)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not await self.docker.update(self.last_version):
|
if not await self.instance.update(self.last_version):
|
||||||
return False
|
return False
|
||||||
self._set_update(self.last_version)
|
self._set_update(self.last_version)
|
||||||
|
|
||||||
# restore state
|
# restore state
|
||||||
if last_state == STATE_STARTED:
|
if last_state == STATE_STARTED:
|
||||||
await self.docker.run()
|
await self.instance.run()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
@@ -535,7 +600,7 @@ class Addon(object):
|
|||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.restart()
|
return self.instance.restart()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
def logs(self):
|
def logs(self):
|
||||||
@@ -543,7 +608,15 @@ class Addon(object):
|
|||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.logs()
|
return self.instance.logs()
|
||||||
|
|
||||||
|
@check_installed
|
||||||
|
def stats(self):
|
||||||
|
"""Return stats of container.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.stats()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def rebuild(self):
|
async def rebuild(self):
|
||||||
@@ -555,15 +628,15 @@ class Addon(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# remove docker container but not addon config
|
# remove docker container but not addon config
|
||||||
if not await self.docker.remove():
|
if not await self.instance.remove():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not await self.docker.install(self.version_installed):
|
if not await self.instance.install(self.version_installed):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# restore state
|
# restore state
|
||||||
if last_state == STATE_STARTED:
|
if last_state == STATE_STARTED:
|
||||||
await self.docker.run()
|
await self.instance.run()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
@@ -576,27 +649,29 @@ class Addon(object):
|
|||||||
_LOGGER.error("Add-on don't support write to stdin!")
|
_LOGGER.error("Add-on don't support write to stdin!")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return await self.docker.write_stdin(data)
|
return await self.instance.write_stdin(data)
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def snapshot(self, tar_file):
|
async def snapshot(self, tar_file):
|
||||||
"""Snapshot a state of a addon."""
|
"""Snapshot a state of a addon."""
|
||||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
||||||
# store local image
|
# store local image
|
||||||
if self.need_build and not await \
|
if self.need_build and not await \
|
||||||
self.docker.export_image(Path(temp, "image.tar")):
|
self.instance.export_image(Path(temp, "image.tar")):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
ATTR_USER: self.data.user.get(self._id, {}),
|
ATTR_USER: self._data.user.get(self._id, {}),
|
||||||
ATTR_SYSTEM: self.data.system.get(self._id, {}),
|
ATTR_SYSTEM: self._data.system.get(self._id, {}),
|
||||||
ATTR_VERSION: self.version_installed,
|
ATTR_VERSION: self.version_installed,
|
||||||
ATTR_STATE: await self.state(),
|
ATTR_STATE: await self.state(),
|
||||||
}
|
}
|
||||||
|
|
||||||
# store local configs/state
|
# store local configs/state
|
||||||
if not write_json_file(Path(temp, "addon.json"), data):
|
try:
|
||||||
_LOGGER.error("Can't write addon.json for %s", self._id)
|
write_json_file(Path(temp, "addon.json"), data)
|
||||||
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
|
_LOGGER.error("Can't save meta for %s: %s", self._id, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# write into tarfile
|
# write into tarfile
|
||||||
@@ -609,9 +684,9 @@ class Addon(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Build snapshot for addon %s", self._id)
|
_LOGGER.info("Build snapshot for addon %s", self._id)
|
||||||
await self.loop.run_in_executor(None, _create_tar)
|
await self._loop.run_in_executor(None, _create_tar)
|
||||||
except tarfile.TarError as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
|
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info("Finish snapshot for addon %s", self._id)
|
_LOGGER.info("Finish snapshot for addon %s", self._id)
|
||||||
@@ -619,7 +694,7 @@ class Addon(object):
|
|||||||
|
|
||||||
async def restore(self, tar_file):
|
async def restore(self, tar_file):
|
||||||
"""Restore a state of a addon."""
|
"""Restore a state of a addon."""
|
||||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
||||||
# extract snapshot
|
# extract snapshot
|
||||||
def _extract_tar():
|
def _extract_tar():
|
||||||
"""Extract tar snapshot."""
|
"""Extract tar snapshot."""
|
||||||
@@ -627,39 +702,42 @@ class Addon(object):
|
|||||||
snapshot.extractall(path=Path(temp))
|
snapshot.extractall(path=Path(temp))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.loop.run_in_executor(None, _extract_tar)
|
await self._loop.run_in_executor(None, _extract_tar)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
_LOGGER.error("Can't read tarfile %s -> %s", tar_file, err)
|
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# read snapshot data
|
# read snapshot data
|
||||||
try:
|
try:
|
||||||
data = read_json_file(Path(temp, "addon.json"))
|
data = read_json_file(Path(temp, "addon.json"))
|
||||||
except (OSError, json.JSONDecodeError) as err:
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
_LOGGER.error("Can't read addon.json -> %s", err)
|
_LOGGER.error("Can't read addon.json: %s", err)
|
||||||
|
|
||||||
# validate
|
# validate
|
||||||
try:
|
try:
|
||||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||||
except vol.Invalid as err:
|
except vol.Invalid as err:
|
||||||
_LOGGER.error("Can't validate %s, snapshot data -> %s",
|
_LOGGER.error("Can't validate %s, snapshot data: %s",
|
||||||
self._id, humanize_error(data, err))
|
self._id, humanize_error(data, err))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# restore data / reload addon
|
# restore data / reload addon
|
||||||
|
_LOGGER.info("Restore config for addon %s", self._id)
|
||||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
|
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
|
||||||
|
|
||||||
# check version / restore image
|
# check version / restore image
|
||||||
version = data[ATTR_VERSION]
|
version = data[ATTR_VERSION]
|
||||||
if version != self.docker.version:
|
if not await self.instance.exists():
|
||||||
|
_LOGGER.info("Restore image for addon %s", self._id)
|
||||||
|
|
||||||
image_file = Path(temp, "image.tar")
|
image_file = Path(temp, "image.tar")
|
||||||
if image_file.is_file():
|
if image_file.is_file():
|
||||||
await self.docker.import_image(image_file, version)
|
await self.instance.import_image(image_file, version)
|
||||||
else:
|
else:
|
||||||
if await self.docker.install(version):
|
if await self.instance.install(version):
|
||||||
await self.docker.cleanup()
|
await self.instance.cleanup()
|
||||||
else:
|
else:
|
||||||
await self.docker.stop()
|
await self.instance.stop()
|
||||||
|
|
||||||
# restore data
|
# restore data
|
||||||
def _restore_data():
|
def _restore_data():
|
||||||
@@ -670,9 +748,9 @@ class Addon(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Restore data for addon %s", self._id)
|
_LOGGER.info("Restore data for addon %s", self._id)
|
||||||
await self.loop.run_in_executor(None, _restore_data)
|
await self._loop.run_in_executor(None, _restore_data)
|
||||||
except shutil.Error as err:
|
except shutil.Error as err:
|
||||||
_LOGGER.error("Can't restore origin data -> %s", err)
|
_LOGGER.error("Can't restore origin data: %s", err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# run addon
|
# run addon
|
||||||
|
@@ -1,30 +1,37 @@
|
|||||||
"""HassIO addons build environment."""
|
"""HassIO addons build environment."""
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from .validate import SCHEMA_BUILD_CONFIG
|
from .validate import SCHEMA_BUILD_CONFIG, BASE_IMAGE
|
||||||
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
|
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
|
||||||
from ..tools import JsonConfig
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..utils.json import JsonConfig
|
||||||
|
|
||||||
|
|
||||||
class AddonBuild(JsonConfig):
|
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||||
"""Handle build options for addons."""
|
"""Handle build options for addons."""
|
||||||
|
|
||||||
def __init__(self, config, addon):
|
def __init__(self, coresys, slug):
|
||||||
"""Initialize addon builder."""
|
"""Initialize addon builder."""
|
||||||
self.config = config
|
self.coresys = coresys
|
||||||
self.addon = addon
|
self._id = slug
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
Path(addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||||
|
|
||||||
def save(self):
|
def save_data(self):
|
||||||
"""Ignore save function."""
|
"""Ignore save function."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def addon(self):
|
||||||
|
"""Return addon of build data."""
|
||||||
|
return self._addons.get(self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def base_image(self):
|
def base_image(self):
|
||||||
"""Base images for this addon."""
|
"""Base images for this addon."""
|
||||||
return self._data[ATTR_BUILD_FROM][self.config.arch]
|
return self._data[ATTR_BUILD_FROM].get(
|
||||||
|
self._arch, BASE_IMAGE[self._arch])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def squash(self):
|
def squash(self):
|
||||||
@@ -40,13 +47,13 @@ class AddonBuild(JsonConfig):
|
|||||||
"""Create a dict with docker build arguments."""
|
"""Create a dict with docker build arguments."""
|
||||||
args = {
|
args = {
|
||||||
'path': str(self.addon.path_location),
|
'path': str(self.addon.path_location),
|
||||||
'tag': "{}:{}".format(self.addon.image, version),
|
'tag': f"{self.addon.image}:{version}",
|
||||||
'pull': True,
|
'pull': True,
|
||||||
'forcerm': True,
|
'forcerm': True,
|
||||||
'squash': self.squash,
|
'squash': self.squash,
|
||||||
'labels': {
|
'labels': {
|
||||||
'io.hass.version': version,
|
'io.hass.version': version,
|
||||||
'io.hass.arch': self.config.arch,
|
'io.hass.arch': self._arch,
|
||||||
'io.hass.type': META_ADDON,
|
'io.hass.type': META_ADDON,
|
||||||
'io.hass.name': self.addon.name,
|
'io.hass.name': self.addon.name,
|
||||||
'io.hass.description': self.addon.description,
|
'io.hass.description': self.addon.description,
|
||||||
@@ -54,7 +61,7 @@ class AddonBuild(JsonConfig):
|
|||||||
'buildargs': {
|
'buildargs': {
|
||||||
'BUILD_FROM': self.base_image,
|
'BUILD_FROM': self.base_image,
|
||||||
'BUILD_VERSION': version,
|
'BUILD_VERSION': version,
|
||||||
'BUILD_ARCH': self.config.arch,
|
'BUILD_ARCH': self._arch,
|
||||||
**self.additional_args,
|
**self.additional_args,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
"local": {
|
"local": {
|
||||||
"name": "Local Add-Ons",
|
"name": "Local Add-Ons",
|
||||||
"url": "https://home-assistant.io/hassio",
|
"url": "https://home-assistant.io/hassio",
|
||||||
"maintainer": "By our self"
|
"maintainer": "you"
|
||||||
},
|
},
|
||||||
"core": {
|
"core": {
|
||||||
"name": "Built-in Add-Ons",
|
"name": "Built-in Add-Ons",
|
||||||
|
@@ -7,24 +7,25 @@ from pathlib import Path
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .util import extract_hash_from_path
|
from .utils import extract_hash_from_path
|
||||||
from .validate import (
|
from .validate import (
|
||||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG)
|
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||||
from ..const import (
|
from ..const import (
|
||||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||||
from ..tools import JsonConfig, read_json_file
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..utils.json import JsonConfig, read_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Data(JsonConfig):
|
class Data(JsonConfig, CoreSysAttributes):
|
||||||
"""Hold data for addons inside HassIO."""
|
"""Hold data for addons inside HassIO."""
|
||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, coresys):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
|
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
|
||||||
self.config = config
|
self.coresys = coresys
|
||||||
self._repositories = {}
|
self._repositories = {}
|
||||||
self._cache = {}
|
self._cache = {}
|
||||||
|
|
||||||
@@ -55,17 +56,17 @@ class Data(JsonConfig):
|
|||||||
|
|
||||||
# read core repository
|
# read core repository
|
||||||
self._read_addons_folder(
|
self._read_addons_folder(
|
||||||
self.config.path_addons_core, REPOSITORY_CORE)
|
self._config.path_addons_core, REPOSITORY_CORE)
|
||||||
|
|
||||||
# read local repository
|
# read local repository
|
||||||
self._read_addons_folder(
|
self._read_addons_folder(
|
||||||
self.config.path_addons_local, REPOSITORY_LOCAL)
|
self._config.path_addons_local, REPOSITORY_LOCAL)
|
||||||
|
|
||||||
# add built-in repositories information
|
# add built-in repositories information
|
||||||
self._set_builtin_repositories()
|
self._set_builtin_repositories()
|
||||||
|
|
||||||
# read custom git repositories
|
# read custom git repositories
|
||||||
for repository_element in self.config.path_addons_git.iterdir():
|
for repository_element in self._config.path_addons_git.iterdir():
|
||||||
if repository_element.is_dir():
|
if repository_element.is_dir():
|
||||||
self._read_git_repository(repository_element)
|
self._read_git_repository(repository_element)
|
||||||
|
|
||||||
@@ -118,7 +119,7 @@ class Data(JsonConfig):
|
|||||||
_LOGGER.warning("Can't read %s", addon)
|
_LOGGER.warning("Can't read %s", addon)
|
||||||
|
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
_LOGGER.warning("Can't read %s -> %s", addon,
|
_LOGGER.warning("Can't read %s: %s", addon,
|
||||||
humanize_error(addon_config, ex))
|
humanize_error(addon_config, ex))
|
||||||
|
|
||||||
def _set_builtin_repositories(self):
|
def _set_builtin_repositories(self):
|
||||||
@@ -127,7 +128,7 @@ class Data(JsonConfig):
|
|||||||
builtin_file = Path(__file__).parent.joinpath('built-in.json')
|
builtin_file = Path(__file__).parent.joinpath('built-in.json')
|
||||||
builtin_data = read_json_file(builtin_file)
|
builtin_data = read_json_file(builtin_file)
|
||||||
except (OSError, json.JSONDecodeError) as err:
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
_LOGGER.warning("Can't read built-in.json -> %s", err)
|
_LOGGER.warning("Can't read built-in json: %s", err)
|
||||||
return
|
return
|
||||||
|
|
||||||
# core repository
|
# core repository
|
||||||
@@ -158,4 +159,4 @@ class Data(JsonConfig):
|
|||||||
have_change = True
|
have_change = True
|
||||||
|
|
||||||
if have_change:
|
if have_change:
|
||||||
self.save()
|
self.save_data()
|
||||||
|
@@ -7,33 +7,33 @@ import shutil
|
|||||||
|
|
||||||
import git
|
import git
|
||||||
|
|
||||||
from .util import get_hash_from_repository
|
from .utils import get_hash_from_repository
|
||||||
from ..const import URL_HASSIO_ADDONS
|
from ..const import URL_HASSIO_ADDONS
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class GitRepo(object):
|
class GitRepo(CoreSysAttributes):
|
||||||
"""Manage addons git repo."""
|
"""Manage addons git repo."""
|
||||||
|
|
||||||
def __init__(self, config, loop, path, url):
|
def __init__(self, coresys, path, url):
|
||||||
"""Initialize git base wrapper."""
|
"""Initialize git base wrapper."""
|
||||||
self.config = config
|
self.coresys = coresys
|
||||||
self.loop = loop
|
|
||||||
self.repo = None
|
self.repo = None
|
||||||
self.path = path
|
self.path = path
|
||||||
self.url = url
|
self.url = url
|
||||||
self._lock = asyncio.Lock(loop=loop)
|
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Init git addon repo."""
|
"""Init git addon repo."""
|
||||||
if not self.path.is_dir():
|
if not self.path.is_dir():
|
||||||
return await self.clone()
|
return await self.clone()
|
||||||
|
|
||||||
async with self._lock:
|
async with self.lock:
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Load addon %s repository", self.path)
|
_LOGGER.info("Load addon %s repository", self.path)
|
||||||
self.repo = await self.loop.run_in_executor(
|
self.repo = await self._loop.run_in_executor(
|
||||||
None, git.Repo, str(self.path))
|
None, git.Repo, str(self.path))
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||||
@@ -45,10 +45,10 @@ class GitRepo(object):
|
|||||||
|
|
||||||
async def clone(self):
|
async def clone(self):
|
||||||
"""Clone git addon repo."""
|
"""Clone git addon repo."""
|
||||||
async with self._lock:
|
async with self.lock:
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Clone addon %s repository", self.url)
|
_LOGGER.info("Clone addon %s repository", self.url)
|
||||||
self.repo = await self.loop.run_in_executor(
|
self.repo = await self._loop.run_in_executor(
|
||||||
None, ft.partial(
|
None, ft.partial(
|
||||||
git.Repo.clone_from, self.url, str(self.path),
|
git.Repo.clone_from, self.url, str(self.path),
|
||||||
recursive=True))
|
recursive=True))
|
||||||
@@ -62,14 +62,14 @@ class GitRepo(object):
|
|||||||
|
|
||||||
async def pull(self):
|
async def pull(self):
|
||||||
"""Pull git addon repo."""
|
"""Pull git addon repo."""
|
||||||
if self._lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.warning("It is already a task in progress.")
|
_LOGGER.warning("It is already a task in progress.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async with self._lock:
|
async with self.lock:
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Pull addon %s repository", self.url)
|
_LOGGER.info("Pull addon %s repository", self.url)
|
||||||
await self.loop.run_in_executor(
|
await self._loop.run_in_executor(
|
||||||
None, self.repo.remotes.origin.pull)
|
None, self.repo.remotes.origin.pull)
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||||
@@ -83,20 +83,22 @@ class GitRepo(object):
|
|||||||
class GitRepoHassIO(GitRepo):
|
class GitRepoHassIO(GitRepo):
|
||||||
"""HassIO addons repository."""
|
"""HassIO addons repository."""
|
||||||
|
|
||||||
def __init__(self, config, loop):
|
def __init__(self, coresys):
|
||||||
"""Initialize git hassio addon repository."""
|
"""Initialize git hassio addon repository."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
|
coresys, coresys.config.path_addons_core, URL_HASSIO_ADDONS)
|
||||||
|
|
||||||
|
|
||||||
class GitRepoCustom(GitRepo):
|
class GitRepoCustom(GitRepo):
|
||||||
"""Custom addons repository."""
|
"""Custom addons repository."""
|
||||||
|
|
||||||
def __init__(self, config, loop, url):
|
def __init__(self, coresys, url):
|
||||||
"""Initialize git hassio addon repository."""
|
"""Initialize git hassio addon repository."""
|
||||||
path = Path(config.path_addons_git, get_hash_from_repository(url))
|
path = Path(
|
||||||
|
coresys.config.path_addons_git,
|
||||||
|
get_hash_from_repository(url))
|
||||||
|
|
||||||
super().__init__(config, loop, path, url)
|
super().__init__(coresys, path, url)
|
||||||
|
|
||||||
def remove(self):
|
def remove(self):
|
||||||
"""Remove a custom addon."""
|
"""Remove a custom addon."""
|
||||||
|
@@ -1,18 +1,19 @@
|
|||||||
"""Represent a HassIO repository."""
|
"""Represent a HassIO repository."""
|
||||||
from .git import GitRepoHassIO, GitRepoCustom
|
from .git import GitRepoHassIO, GitRepoCustom
|
||||||
from .util import get_hash_from_repository
|
from .utils import get_hash_from_repository
|
||||||
from ..const import (
|
from ..const import (
|
||||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
|
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
UNKNOWN = 'unknown'
|
UNKNOWN = 'unknown'
|
||||||
|
|
||||||
|
|
||||||
class Repository(object):
|
class Repository(CoreSysAttributes):
|
||||||
"""Repository in HassIO."""
|
"""Repository in HassIO."""
|
||||||
|
|
||||||
def __init__(self, config, loop, data, repository):
|
def __init__(self, coresys, repository):
|
||||||
"""Initialize repository object."""
|
"""Initialize repository object."""
|
||||||
self.data = data
|
self.coresys = coresys
|
||||||
self.source = None
|
self.source = None
|
||||||
self.git = None
|
self.git = None
|
||||||
|
|
||||||
@@ -20,16 +21,16 @@ class Repository(object):
|
|||||||
self._id = repository
|
self._id = repository
|
||||||
elif repository == REPOSITORY_CORE:
|
elif repository == REPOSITORY_CORE:
|
||||||
self._id = repository
|
self._id = repository
|
||||||
self.git = GitRepoHassIO(config, loop)
|
self.git = GitRepoHassIO(coresys)
|
||||||
else:
|
else:
|
||||||
self._id = get_hash_from_repository(repository)
|
self._id = get_hash_from_repository(repository)
|
||||||
self.git = GitRepoCustom(config, loop, repository)
|
self.git = GitRepoCustom(coresys, repository)
|
||||||
self.source = repository
|
self.source = repository
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _mesh(self):
|
def _mesh(self):
|
||||||
"""Return data struct repository."""
|
"""Return data struct repository."""
|
||||||
return self.data.repositories.get(self._id, {})
|
return self._addons.data.repositories.get(self._id, {})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def slug(self):
|
def slug(self):
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
"""Validate addons options schema."""
|
"""Validate addons options schema."""
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
import uuid
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -12,10 +13,11 @@ from ..const import (
|
|||||||
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK, ATTR_UUID,
|
||||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
||||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY)
|
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
||||||
|
ATTR_HOST_DBUS, ATTR_AUTO_UART)
|
||||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -55,7 +57,9 @@ STARTUP_ALL = [
|
|||||||
PRIVILEGED_ALL = [
|
PRIVILEGED_ALL = [
|
||||||
"NET_ADMIN",
|
"NET_ADMIN",
|
||||||
"SYS_ADMIN",
|
"SYS_ADMIN",
|
||||||
"SYS_RAWIO"
|
"SYS_RAWIO",
|
||||||
|
"SYS_TIME",
|
||||||
|
"SYS_NICE"
|
||||||
]
|
]
|
||||||
|
|
||||||
BASE_IMAGE = {
|
BASE_IMAGE = {
|
||||||
@@ -91,10 +95,13 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_WEBUI):
|
vol.Optional(ATTR_WEBUI):
|
||||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||||
|
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_TMPFS):
|
vol.Optional(ATTR_TMPFS):
|
||||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(RE_VOLUME)],
|
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||||
@@ -116,7 +123,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
}), False),
|
}), False),
|
||||||
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"),
|
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"),
|
||||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
@@ -134,23 +141,25 @@ SCHEMA_BUILD_CONFIG = vol.Schema({
|
|||||||
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
||||||
}),
|
}),
|
||||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ARGS, default={}): vol.Schema({
|
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
||||||
vol.Coerce(str): vol.Coerce(str)
|
vol.Coerce(str): vol.Coerce(str)
|
||||||
}),
|
}),
|
||||||
})
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_ADDON_USER = vol.Schema({
|
SCHEMA_ADDON_USER = vol.Schema({
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
||||||
|
vol.Match(r"^[0-9a-f]{32}$"),
|
||||||
|
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_BOOT):
|
vol.Optional(ATTR_BOOT):
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
})
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||||
@@ -160,10 +169,10 @@ SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
|||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_FILE = vol.Schema({
|
SCHEMA_ADDON_FILE = vol.Schema({
|
||||||
vol.Optional(ATTR_USER, default={}): {
|
vol.Optional(ATTR_USER, default=dict): {
|
||||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||||
},
|
},
|
||||||
vol.Optional(ATTR_SYSTEM, default={}): {
|
vol.Optional(ATTR_SYSTEM, default=dict): {
|
||||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -174,7 +183,7 @@ SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
|||||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
})
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
def validate_options(raw_schema):
|
def validate_options(raw_schema):
|
||||||
@@ -202,8 +211,7 @@ def validate_options(raw_schema):
|
|||||||
# normal value
|
# normal value
|
||||||
options[key] = _single_validate(typ, value, key)
|
options[key] = _single_validate(typ, value, key)
|
||||||
except (IndexError, KeyError):
|
except (IndexError, KeyError):
|
||||||
raise vol.Invalid(
|
raise vol.Invalid(f"Type error for {key}") from None
|
||||||
"Type error for {}.".format(key)) from None
|
|
||||||
|
|
||||||
_check_missing_options(raw_schema, options, 'root')
|
_check_missing_options(raw_schema, options, 'root')
|
||||||
return options
|
return options
|
||||||
@@ -212,11 +220,12 @@ def validate_options(raw_schema):
|
|||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
|
# pylint: disable=inconsistent-return-statements
|
||||||
def _single_validate(typ, value, key):
|
def _single_validate(typ, value, key):
|
||||||
"""Validate a single element."""
|
"""Validate a single element."""
|
||||||
# if required argument
|
# if required argument
|
||||||
if value is None:
|
if value is None:
|
||||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
raise vol.Invalid(f"Missing required option '{key}'")
|
||||||
|
|
||||||
# parse extend data from type
|
# parse extend data from type
|
||||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||||
@@ -245,7 +254,7 @@ def _single_validate(typ, value, key):
|
|||||||
elif typ.startswith(V_MATCH):
|
elif typ.startswith(V_MATCH):
|
||||||
return vol.Match(match.group('match'))(str(value))
|
return vol.Match(match.group('match'))(str(value))
|
||||||
|
|
||||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||||
|
|
||||||
|
|
||||||
def _nested_validate_list(typ, data_list, key):
|
def _nested_validate_list(typ, data_list, key):
|
||||||
@@ -291,5 +300,4 @@ def _check_missing_options(origin, exists, root):
|
|||||||
if isinstance(origin[miss_opt], str) and \
|
if isinstance(origin[miss_opt], str) and \
|
||||||
origin[miss_opt].endswith("?"):
|
origin[miss_opt].endswith("?"):
|
||||||
continue
|
continue
|
||||||
raise vol.Invalid(
|
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||||
"Missing option {} in {}".format(miss_opt, root))
|
|
||||||
|
@@ -8,29 +8,41 @@ from .addons import APIAddons
|
|||||||
from .homeassistant import APIHomeAssistant
|
from .homeassistant import APIHomeAssistant
|
||||||
from .host import APIHost
|
from .host import APIHost
|
||||||
from .network import APINetwork
|
from .network import APINetwork
|
||||||
|
from .proxy import APIProxy
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
from .security import APISecurity
|
|
||||||
from .snapshots import APISnapshots
|
from .snapshots import APISnapshots
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RestAPI(object):
|
class RestAPI(CoreSysAttributes):
|
||||||
"""Handle rest api for hassio."""
|
"""Handle rest api for hassio."""
|
||||||
|
|
||||||
def __init__(self, config, loop):
|
def __init__(self, coresys):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.config = config
|
self.coresys = coresys
|
||||||
self.loop = loop
|
self.webapp = web.Application(loop=self._loop)
|
||||||
self.webapp = web.Application(loop=self.loop)
|
|
||||||
|
|
||||||
# service stuff
|
# service stuff
|
||||||
self._handler = None
|
self._handler = None
|
||||||
self.server = None
|
self.server = None
|
||||||
|
|
||||||
def register_host(self, host_control, hardware):
|
async def load(self):
|
||||||
|
"""Register REST API Calls."""
|
||||||
|
self._register_supervisor()
|
||||||
|
self._register_host()
|
||||||
|
self._register_homeassistant()
|
||||||
|
self._register_proxy()
|
||||||
|
self._register_panel()
|
||||||
|
self._register_addons()
|
||||||
|
self._register_snapshots()
|
||||||
|
self._register_network()
|
||||||
|
|
||||||
|
def _register_host(self):
|
||||||
"""Register hostcontrol function."""
|
"""Register hostcontrol function."""
|
||||||
api_host = APIHost(self.config, self.loop, host_control, hardware)
|
api_host = APIHost()
|
||||||
|
api_host.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.router.add_get('/host/info', api_host.info)
|
self.webapp.router.add_get('/host/info', api_host.info)
|
||||||
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||||
@@ -38,23 +50,24 @@ class RestAPI(object):
|
|||||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||||
self.webapp.router.add_post('/host/update', api_host.update)
|
self.webapp.router.add_post('/host/update', api_host.update)
|
||||||
self.webapp.router.add_post('/host/options', api_host.options)
|
self.webapp.router.add_post('/host/options', api_host.options)
|
||||||
|
self.webapp.router.add_post('/host/reload', api_host.reload)
|
||||||
|
|
||||||
def register_network(self, host_control):
|
def _register_network(self):
|
||||||
"""Register network function."""
|
"""Register network function."""
|
||||||
api_net = APINetwork(self.config, self.loop, host_control)
|
api_net = APINetwork()
|
||||||
|
api_net.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.router.add_get('/network/info', api_net.info)
|
self.webapp.router.add_get('/network/info', api_net.info)
|
||||||
self.webapp.router.add_post('/network/options', api_net.options)
|
self.webapp.router.add_post('/network/options', api_net.options)
|
||||||
|
|
||||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
def _register_supervisor(self):
|
||||||
updater):
|
|
||||||
"""Register supervisor function."""
|
"""Register supervisor function."""
|
||||||
api_supervisor = APISupervisor(
|
api_supervisor = APISupervisor()
|
||||||
self.config, self.loop, supervisor, snapshots, addons,
|
api_supervisor.coresys = self.coresys
|
||||||
host_control, updater)
|
|
||||||
|
|
||||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||||
|
self.webapp.router.add_get('/supervisor/stats', api_supervisor.stats)
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/supervisor/update', api_supervisor.update)
|
'/supervisor/update', api_supervisor.update)
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
@@ -63,30 +76,46 @@ class RestAPI(object):
|
|||||||
'/supervisor/options', api_supervisor.options)
|
'/supervisor/options', api_supervisor.options)
|
||||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||||
|
|
||||||
def register_homeassistant(self, dock_homeassistant):
|
def _register_homeassistant(self):
|
||||||
"""Register homeassistant function."""
|
"""Register homeassistant function."""
|
||||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
api_hass = APIHomeAssistant()
|
||||||
|
api_hass.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||||
|
self.webapp.router.add_get('/homeassistant/stats', api_hass.stats)
|
||||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||||
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop)
|
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop)
|
||||||
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
||||||
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||||
self.webapp.router.add_post(
|
|
||||||
'/homeassistant/api/{path:.+}', api_hass.api)
|
|
||||||
self.webapp.router.add_get(
|
|
||||||
'/homeassistant/api/{path:.+}', api_hass.api)
|
|
||||||
|
|
||||||
def register_addons(self, addons):
|
def _register_proxy(self):
|
||||||
|
"""Register HomeAssistant API Proxy."""
|
||||||
|
api_proxy = APIProxy()
|
||||||
|
api_proxy.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api/websocket', api_proxy.websocket)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/websocket', api_proxy.websocket)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api/stream', api_proxy.stream)
|
||||||
|
self.webapp.router.add_post(
|
||||||
|
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api/', api_proxy.api)
|
||||||
|
|
||||||
|
def _register_addons(self):
|
||||||
"""Register homeassistant function."""
|
"""Register homeassistant function."""
|
||||||
api_addons = APIAddons(self.config, self.loop, addons)
|
api_addons = APIAddons()
|
||||||
|
api_addons.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.router.add_get('/addons', api_addons.list)
|
self.webapp.router.add_get('/addons', api_addons.list)
|
||||||
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
||||||
|
|
||||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/addons/{addon}/install', api_addons.install)
|
'/addons/{addon}/install', api_addons.install)
|
||||||
@@ -103,21 +132,17 @@ class RestAPI(object):
|
|||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/addons/{addon}/rebuild', api_addons.rebuild)
|
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||||
|
self.webapp.router.add_get('/addons/{addon}/icon', api_addons.icon)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/addons/{addon}/changelog', api_addons.changelog)
|
||||||
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
||||||
|
self.webapp.router.add_get('/addons/{addon}/stats', api_addons.stats)
|
||||||
|
|
||||||
def register_security(self):
|
def _register_snapshots(self):
|
||||||
"""Register security function."""
|
|
||||||
api_security = APISecurity(self.config, self.loop)
|
|
||||||
|
|
||||||
self.webapp.router.add_get('/security/info', api_security.info)
|
|
||||||
self.webapp.router.add_post('/security/options', api_security.options)
|
|
||||||
self.webapp.router.add_post('/security/totp', api_security.totp)
|
|
||||||
self.webapp.router.add_post('/security/session', api_security.session)
|
|
||||||
|
|
||||||
def register_snapshots(self, snapshots):
|
|
||||||
"""Register snapshots function."""
|
"""Register snapshots function."""
|
||||||
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
|
api_snapshots = APISnapshots()
|
||||||
|
api_snapshots.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
||||||
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
||||||
@@ -137,22 +162,36 @@ class RestAPI(object):
|
|||||||
'/snapshots/{snapshot}/restore/partial',
|
'/snapshots/{snapshot}/restore/partial',
|
||||||
api_snapshots.restore_partial)
|
api_snapshots.restore_partial)
|
||||||
|
|
||||||
def register_panel(self):
|
def _register_panel(self):
|
||||||
"""Register panel for homeassistant."""
|
"""Register panel for homeassistant."""
|
||||||
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
|
def create_panel_response(build_type):
|
||||||
|
"""Create a function to generate a response."""
|
||||||
|
path = Path(__file__).parent.joinpath(
|
||||||
|
f"panel/{build_type}.html")
|
||||||
|
return lambda request: web.FileResponse(path)
|
||||||
|
|
||||||
def get_panel(request):
|
# This route is for backwards compatibility with HA < 0.58
|
||||||
"""Return file response with panel."""
|
self.webapp.router.add_get(
|
||||||
return web.FileResponse(panel)
|
'/panel', create_panel_response('hassio-main-es5'))
|
||||||
|
|
||||||
self.webapp.router.add_get('/panel', get_panel)
|
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/panel_es5', create_panel_response('hassio-main-es5'))
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/panel_latest', create_panel_response('hassio-main-latest'))
|
||||||
|
|
||||||
|
# This route is for HA > 0.61
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/app-es5/index.html', create_panel_response('index'))
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/app-es5/hassio-app.html', create_panel_response('hassio-app'))
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
"""Run rest api webserver."""
|
"""Run rest api webserver."""
|
||||||
self._handler = self.webapp.make_handler(loop=self.loop)
|
self._handler = self.webapp.make_handler(loop=self._loop)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.server = await self.loop.create_server(
|
self.server = await self._loop.create_server(
|
||||||
self._handler, "0.0.0.0", "80")
|
self._handler, "0.0.0.0", "80")
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.fatal(
|
_LOGGER.fatal(
|
||||||
|
@@ -5,7 +5,7 @@ import logging
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .util import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||||
@@ -14,7 +14,11 @@ from ..const import (
|
|||||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||||
|
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||||
|
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON,
|
||||||
|
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
from ..validate import DOCKER_PORTS
|
from ..validate import DOCKER_PORTS
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -31,18 +35,12 @@ SCHEMA_OPTIONS = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
class APIAddons(object):
|
class APIAddons(CoreSysAttributes):
|
||||||
"""Handle rest api for addons functions."""
|
"""Handle rest api for addons functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, addons):
|
|
||||||
"""Initialize homeassistant rest api part."""
|
|
||||||
self.config = config
|
|
||||||
self.loop = loop
|
|
||||||
self.addons = addons
|
|
||||||
|
|
||||||
def _extract_addon(self, request, check_installed=True):
|
def _extract_addon(self, request, check_installed=True):
|
||||||
"""Return addon and if not exists trow a exception."""
|
"""Return addon and if not exists trow a exception."""
|
||||||
addon = self.addons.get(request.match_info.get('addon'))
|
addon = self._addons.get(request.match_info.get('addon'))
|
||||||
if not addon:
|
if not addon:
|
||||||
raise RuntimeError("Addon not exists")
|
raise RuntimeError("Addon not exists")
|
||||||
|
|
||||||
@@ -56,14 +54,14 @@ class APIAddons(object):
|
|||||||
"""Return a simplified device list."""
|
"""Return a simplified device list."""
|
||||||
dev_list = addon.devices
|
dev_list = addon.devices
|
||||||
if not dev_list:
|
if not dev_list:
|
||||||
return
|
return None
|
||||||
return [row.split(':')[0] for row in dev_list]
|
return [row.split(':')[0] for row in dev_list]
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Return all addons / repositories ."""
|
"""Return all addons / repositories ."""
|
||||||
data_addons = []
|
data_addons = []
|
||||||
for addon in self.addons.list_addons:
|
for addon in self._addons.list_addons:
|
||||||
data_addons.append({
|
data_addons.append({
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
ATTR_SLUG: addon.slug,
|
ATTR_SLUG: addon.slug,
|
||||||
@@ -74,19 +72,13 @@ class APIAddons(object):
|
|||||||
ATTR_DETACHED: addon.is_detached,
|
ATTR_DETACHED: addon.is_detached,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
ATTR_PRIVILEGED: addon.privileged,
|
|
||||||
ATTR_DEVICES: self._pretty_devices(addon),
|
|
||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
ATTR_STDIN: addon.with_stdin,
|
|
||||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
|
||||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
|
||||||
ATTR_AUDIO: addon.with_audio,
|
|
||||||
ATTR_GPIO: addon.with_gpio,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
data_repositories = []
|
data_repositories = []
|
||||||
for repository in self.addons.list_repositories:
|
for repository in self._addons.list_repositories:
|
||||||
data_repositories.append({
|
data_repositories.append({
|
||||||
ATTR_SLUG: repository.slug,
|
ATTR_SLUG: repository.slug,
|
||||||
ATTR_NAME: repository.name,
|
ATTR_NAME: repository.name,
|
||||||
@@ -103,7 +95,7 @@ class APIAddons(object):
|
|||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request):
|
async def reload(self, request):
|
||||||
"""Reload all addons data."""
|
"""Reload all addons data."""
|
||||||
await asyncio.shield(self.addons.reload(), loop=self.loop)
|
await asyncio.shield(self._addons.reload(), loop=self._loop)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -114,6 +106,7 @@ class APIAddons(object):
|
|||||||
return {
|
return {
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
ATTR_DESCRIPTON: addon.description,
|
ATTR_DESCRIPTON: addon.description,
|
||||||
|
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||||
ATTR_VERSION: addon.version_installed,
|
ATTR_VERSION: addon.version_installed,
|
||||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
@@ -126,9 +119,13 @@ class APIAddons(object):
|
|||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
ATTR_NETWORK: addon.ports,
|
ATTR_NETWORK: addon.ports,
|
||||||
ATTR_HOST_NETWORK: addon.host_network,
|
ATTR_HOST_NETWORK: addon.host_network,
|
||||||
|
ATTR_HOST_IPC: addon.host_ipc,
|
||||||
|
ATTR_HOST_DBUS: addon.host_dbus,
|
||||||
ATTR_PRIVILEGED: addon.privileged,
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
ATTR_DEVICES: self._pretty_devices(addon),
|
ATTR_DEVICES: self._pretty_devices(addon),
|
||||||
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
ATTR_CHANGELOG: addon.with_changelog,
|
||||||
ATTR_WEBUI: addon.webui,
|
ATTR_WEBUI: addon.webui,
|
||||||
ATTR_STDIN: addon.with_stdin,
|
ATTR_STDIN: addon.with_stdin,
|
||||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||||
@@ -145,7 +142,7 @@ class APIAddons(object):
|
|||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
addon_schema = SCHEMA_OPTIONS.extend({
|
addon_schema = SCHEMA_OPTIONS.extend({
|
||||||
vol.Optional(ATTR_OPTIONS): addon.schema,
|
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||||
})
|
})
|
||||||
|
|
||||||
body = await api_validate(addon_schema, request)
|
body = await api_validate(addon_schema, request)
|
||||||
@@ -163,19 +160,39 @@ class APIAddons(object):
|
|||||||
if ATTR_AUDIO_OUTPUT in body:
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
|
||||||
|
addon.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stats(self, request):
|
||||||
|
"""Return resource information."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
stats = await addon.stats()
|
||||||
|
|
||||||
|
if not stats:
|
||||||
|
raise RuntimeError("No stats available")
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
ATTR_BLK_WRITE: stats.blk_write,
|
||||||
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def install(self, request):
|
def install(self, request):
|
||||||
"""Install addon."""
|
"""Install addon."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
return asyncio.shield(addon.install(), loop=self.loop)
|
return asyncio.shield(addon.install(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def uninstall(self, request):
|
def uninstall(self, request):
|
||||||
"""Uninstall addon."""
|
"""Uninstall addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.uninstall(), loop=self.loop)
|
return asyncio.shield(addon.uninstall(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request):
|
def start(self, request):
|
||||||
@@ -189,13 +206,13 @@ class APIAddons(object):
|
|||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
raise RuntimeError(humanize_error(options, ex)) from None
|
raise RuntimeError(humanize_error(options, ex)) from None
|
||||||
|
|
||||||
return asyncio.shield(addon.start(), loop=self.loop)
|
return asyncio.shield(addon.start(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request):
|
def stop(self, request):
|
||||||
"""Stop addon."""
|
"""Stop addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.stop(), loop=self.loop)
|
return asyncio.shield(addon.stop(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def update(self, request):
|
def update(self, request):
|
||||||
@@ -205,13 +222,13 @@ class APIAddons(object):
|
|||||||
if addon.last_version == addon.version_installed:
|
if addon.last_version == addon.version_installed:
|
||||||
raise RuntimeError("No update available!")
|
raise RuntimeError("No update available!")
|
||||||
|
|
||||||
return asyncio.shield(addon.update(), loop=self.loop)
|
return asyncio.shield(addon.update(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request):
|
def restart(self, request):
|
||||||
"""Restart addon."""
|
"""Restart addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.restart(), loop=self.loop)
|
return asyncio.shield(addon.restart(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def rebuild(self, request):
|
def rebuild(self, request):
|
||||||
@@ -220,7 +237,7 @@ class APIAddons(object):
|
|||||||
if not addon.need_build:
|
if not addon.need_build:
|
||||||
raise RuntimeError("Only local build addons are supported")
|
raise RuntimeError("Only local build addons are supported")
|
||||||
|
|
||||||
return asyncio.shield(addon.rebuild(), loop=self.loop)
|
return asyncio.shield(addon.rebuild(), loop=self._loop)
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
@@ -228,22 +245,42 @@ class APIAddons(object):
|
|||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return addon.logs()
|
return addon.logs()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
|
async def icon(self, request):
|
||||||
|
"""Return icon from addon."""
|
||||||
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_icon:
|
||||||
|
raise RuntimeError("No icon found!")
|
||||||
|
|
||||||
|
with addon.path_icon.open('rb') as png:
|
||||||
|
return png.read()
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_PNG)
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
async def logo(self, request):
|
async def logo(self, request):
|
||||||
"""Return logo from addon."""
|
"""Return logo from addon."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
if not addon.with_logo:
|
if not addon.with_logo:
|
||||||
raise RuntimeError("No image found!")
|
raise RuntimeError("No logo found!")
|
||||||
|
|
||||||
with addon.path_logo.open('rb') as png:
|
with addon.path_logo.open('rb') as png:
|
||||||
return png.read()
|
return png.read()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||||
|
async def changelog(self, request):
|
||||||
|
"""Return changelog from addon."""
|
||||||
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_changelog:
|
||||||
|
raise RuntimeError("No changelog found!")
|
||||||
|
|
||||||
|
with addon.path_changelog.open('r') as changelog:
|
||||||
|
return changelog.read()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stdin(self, request):
|
async def stdin(self, request):
|
||||||
"""Write to stdin of addon."""
|
"""Write to stdin of addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
if not addon.with_stdin:
|
if not addon.with_stdin:
|
||||||
raise RuntimeError("STDIN not supported by addons")
|
raise RuntimeError("STDIN not supported by addon")
|
||||||
|
|
||||||
data = await request.read()
|
data = await request.read()
|
||||||
return await asyncio.shield(addon.write_stdin(data), loop=self.loop)
|
return await asyncio.shield(addon.write_stdin(data), loop=self._loop)
|
||||||
|
@@ -2,30 +2,27 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
from aiohttp import web
|
|
||||||
from aiohttp.web_exceptions import HTTPBadGateway
|
|
||||||
from aiohttp.hdrs import CONTENT_TYPE
|
|
||||||
import async_timeout
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .util import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
|
||||||
ATTR_BOOT, ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG,
|
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
|
||||||
CONTENT_TYPE_BINARY, HEADER_HA_ACCESS)
|
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
|
||||||
from ..validate import HASS_DEVICES, NETWORK_PORT
|
ATTR_BLK_READ, ATTR_BLK_WRITE, CONTENT_TYPE_BINARY)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..validate import NETWORK_PORT, DOCKER_IMAGE
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_OPTIONS = vol.Schema({
|
SCHEMA_OPTIONS = vol.Schema({
|
||||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
|
||||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
|
||||||
vol.Any(None, vol.Coerce(str)),
|
vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||||
|
vol.Any(None, DOCKER_IMAGE),
|
||||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||||
@@ -37,67 +34,21 @@ SCHEMA_VERSION = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
class APIHomeAssistant(object):
|
class APIHomeAssistant(CoreSysAttributes):
|
||||||
"""Handle rest api for homeassistant functions."""
|
"""Handle rest api for homeassistant functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, homeassistant):
|
|
||||||
"""Initialize homeassistant rest api part."""
|
|
||||||
self.config = config
|
|
||||||
self.loop = loop
|
|
||||||
self.homeassistant = homeassistant
|
|
||||||
|
|
||||||
async def homeassistant_proxy(self, path, request):
|
|
||||||
"""Return a client request with proxy origin for Home-Assistant."""
|
|
||||||
url = "{}/api/{}".format(self.homeassistant.api_url, path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = None
|
|
||||||
headers = {}
|
|
||||||
method = getattr(
|
|
||||||
self.homeassistant.websession, request.method.lower())
|
|
||||||
|
|
||||||
# read data
|
|
||||||
with async_timeout.timeout(10, loop=self.loop):
|
|
||||||
data = await request.read()
|
|
||||||
|
|
||||||
if data:
|
|
||||||
headers.update({CONTENT_TYPE: request.content_type})
|
|
||||||
|
|
||||||
# need api password?
|
|
||||||
if self.homeassistant.api_password:
|
|
||||||
headers = {HEADER_HA_ACCESS: self.homeassistant.api_password}
|
|
||||||
|
|
||||||
# reset headers
|
|
||||||
if not headers:
|
|
||||||
headers = None
|
|
||||||
|
|
||||||
client = await method(
|
|
||||||
url, data=data, headers=headers, timeout=300
|
|
||||||
)
|
|
||||||
|
|
||||||
return client
|
|
||||||
|
|
||||||
except aiohttp.ClientError as err:
|
|
||||||
_LOGGER.error("Client error on api %s request %s.", path, err)
|
|
||||||
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
_LOGGER.error("Client timeout error on api request %s.", path)
|
|
||||||
|
|
||||||
raise HTTPBadGateway()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
return {
|
return {
|
||||||
ATTR_VERSION: self.homeassistant.version,
|
ATTR_VERSION: self._homeassistant.version,
|
||||||
ATTR_LAST_VERSION: self.homeassistant.last_version,
|
ATTR_LAST_VERSION: self._homeassistant.last_version,
|
||||||
ATTR_IMAGE: self.homeassistant.image,
|
ATTR_IMAGE: self._homeassistant.image,
|
||||||
ATTR_DEVICES: self.homeassistant.devices,
|
ATTR_CUSTOM: self._homeassistant.is_custom_image,
|
||||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
ATTR_BOOT: self._homeassistant.boot,
|
||||||
ATTR_BOOT: self.homeassistant.boot,
|
ATTR_PORT: self._homeassistant.api_port,
|
||||||
ATTR_PORT: self.homeassistant.api_port,
|
ATTR_SSL: self._homeassistant.api_ssl,
|
||||||
ATTR_SSL: self.homeassistant.api_ssl,
|
ATTR_WATCHDOG: self._homeassistant.watchdog,
|
||||||
ATTR_WATCHDOG: self.homeassistant.watchdog,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -105,78 +56,82 @@ class APIHomeAssistant(object):
|
|||||||
"""Set homeassistant options."""
|
"""Set homeassistant options."""
|
||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_DEVICES in body:
|
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||||
self.homeassistant.devices = body[ATTR_DEVICES]
|
self._homeassistant.image = body[ATTR_IMAGE]
|
||||||
|
self._homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||||
if ATTR_IMAGE in body:
|
|
||||||
self.homeassistant.set_custom(
|
|
||||||
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
|
||||||
|
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
self.homeassistant.boot = body[ATTR_BOOT]
|
self._homeassistant.boot = body[ATTR_BOOT]
|
||||||
|
|
||||||
if ATTR_PORT in body:
|
if ATTR_PORT in body:
|
||||||
self.homeassistant.api_port = body[ATTR_PORT]
|
self._homeassistant.api_port = body[ATTR_PORT]
|
||||||
|
|
||||||
if ATTR_PASSWORD in body:
|
if ATTR_PASSWORD in body:
|
||||||
self.homeassistant.api_password = body[ATTR_PASSWORD]
|
self._homeassistant.api_password = body[ATTR_PASSWORD]
|
||||||
|
|
||||||
if ATTR_SSL in body:
|
if ATTR_SSL in body:
|
||||||
self.homeassistant.api_ssl = body[ATTR_SSL]
|
self._homeassistant.api_ssl = body[ATTR_SSL]
|
||||||
|
|
||||||
if ATTR_WATCHDOG in body:
|
if ATTR_WATCHDOG in body:
|
||||||
self.homeassistant.watchdog = body[ATTR_WATCHDOG]
|
self._homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||||
|
|
||||||
|
self._homeassistant.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stats(self, request):
|
||||||
|
"""Return resource information."""
|
||||||
|
stats = await self._homeassistant.stats()
|
||||||
|
if not stats:
|
||||||
|
raise RuntimeError("No stats available")
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
ATTR_BLK_WRITE: stats.blk_write,
|
||||||
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update homeassistant."""
|
"""Update homeassistant."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
|
version = body.get(ATTR_VERSION, self._homeassistant.last_version)
|
||||||
|
|
||||||
if version == self.homeassistant.version:
|
if version == self._homeassistant.version:
|
||||||
raise RuntimeError("Version {} is already in use".format(version))
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.homeassistant.update(version), loop=self.loop)
|
self._homeassistant.update(version), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request):
|
def stop(self, request):
|
||||||
"""Stop homeassistant."""
|
"""Stop homeassistant."""
|
||||||
return asyncio.shield(self.homeassistant.stop(), loop=self.loop)
|
return asyncio.shield(self._homeassistant.stop(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request):
|
def start(self, request):
|
||||||
"""Start homeassistant."""
|
"""Start homeassistant."""
|
||||||
return asyncio.shield(self.homeassistant.run(), loop=self.loop)
|
return asyncio.shield(self._homeassistant.run(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request):
|
def restart(self, request):
|
||||||
"""Restart homeassistant."""
|
"""Restart homeassistant."""
|
||||||
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
|
return asyncio.shield(self._homeassistant.restart(), loop=self._loop)
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return homeassistant docker logs."""
|
"""Return homeassistant docker logs."""
|
||||||
return self.homeassistant.logs()
|
return self._homeassistant.logs()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def check(self, request):
|
async def check(self, request):
|
||||||
"""Check config of homeassistant."""
|
"""Check config of homeassistant."""
|
||||||
code, message = await self.homeassistant.check_config()
|
code, message = await self._homeassistant.check_config()
|
||||||
if not code:
|
if not code:
|
||||||
raise RuntimeError(message)
|
raise RuntimeError(message)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def api(self, request):
|
|
||||||
"""Proxy API request to Home-Assistant."""
|
|
||||||
path = request.match_info.get('path')
|
|
||||||
|
|
||||||
client = await self.homeassistant_proxy(path, request)
|
|
||||||
return web.Response(
|
|
||||||
body=await client.read(),
|
|
||||||
status=client.status,
|
|
||||||
content_type=client.content_type
|
|
||||||
)
|
|
||||||
|
@@ -4,11 +4,12 @@ import logging
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .util import api_process_hostcontrol, api_process, api_validate
|
from .utils import api_process_hostcontrol, api_process, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||||
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
|
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
from ..validate import ALSA_CHANNEL
|
from ..validate import ALSA_CHANNEL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -23,26 +24,19 @@ SCHEMA_OPTIONS = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
class APIHost(object):
|
class APIHost(CoreSysAttributes):
|
||||||
"""Handle rest api for host functions."""
|
"""Handle rest api for host functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, host_control, hardware):
|
|
||||||
"""Initialize host rest api part."""
|
|
||||||
self.config = config
|
|
||||||
self.loop = loop
|
|
||||||
self.host_control = host_control
|
|
||||||
self.local_hw = hardware
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
return {
|
return {
|
||||||
ATTR_TYPE: self.host_control.type,
|
ATTR_TYPE: self._host_control.type,
|
||||||
ATTR_VERSION: self.host_control.version,
|
ATTR_VERSION: self._host_control.version,
|
||||||
ATTR_LAST_VERSION: self.host_control.last_version,
|
ATTR_LAST_VERSION: self._host_control.last_version,
|
||||||
ATTR_FEATURES: self.host_control.features,
|
ATTR_FEATURES: self._host_control.features,
|
||||||
ATTR_HOSTNAME: self.host_control.hostname,
|
ATTR_HOSTNAME: self._host_control.hostname,
|
||||||
ATTR_OS: self.host_control.os_info,
|
ATTR_OS: self._host_control.os_info,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -51,41 +45,48 @@ class APIHost(object):
|
|||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_AUDIO_OUTPUT in body:
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
self._config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
if ATTR_AUDIO_INPUT in body:
|
if ATTR_AUDIO_INPUT in body:
|
||||||
self.config.audio_input = body[ATTR_AUDIO_INPUT]
|
self._config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||||
|
|
||||||
|
self._config.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process_hostcontrol
|
@api_process_hostcontrol
|
||||||
def reboot(self, request):
|
def reboot(self, request):
|
||||||
"""Reboot host."""
|
"""Reboot host."""
|
||||||
return self.host_control.reboot()
|
return self._host_control.reboot()
|
||||||
|
|
||||||
@api_process_hostcontrol
|
@api_process_hostcontrol
|
||||||
def shutdown(self, request):
|
def shutdown(self, request):
|
||||||
"""Poweroff host."""
|
"""Poweroff host."""
|
||||||
return self.host_control.shutdown()
|
return self._host_control.shutdown()
|
||||||
|
|
||||||
|
@api_process_hostcontrol
|
||||||
|
async def reload(self, request):
|
||||||
|
"""Reload host data."""
|
||||||
|
await self._host_control.load()
|
||||||
|
return True
|
||||||
|
|
||||||
@api_process_hostcontrol
|
@api_process_hostcontrol
|
||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update host OS."""
|
"""Update host OS."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
version = body.get(ATTR_VERSION, self._host_control.last_version)
|
||||||
|
|
||||||
if version == self.host_control.version:
|
if version == self._host_control.version:
|
||||||
raise RuntimeError("Version {} is already in use".format(version))
|
raise RuntimeError(f"Version {version} is already in use")
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.host_control.update(version=version), loop=self.loop)
|
self._host_control.update(version=version), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def hardware(self, request):
|
async def hardware(self, request):
|
||||||
"""Return local hardware infos."""
|
"""Return local hardware infos."""
|
||||||
return {
|
return {
|
||||||
ATTR_SERIAL: list(self.local_hw.serial_devices),
|
ATTR_SERIAL: list(self._hardware.serial_devices),
|
||||||
ATTR_INPUT: list(self.local_hw.input_devices),
|
ATTR_INPUT: list(self._hardware.input_devices),
|
||||||
ATTR_DISK: list(self.local_hw.disk_devices),
|
ATTR_DISK: list(self._hardware.disk_devices),
|
||||||
ATTR_GPIO: list(self.local_hw.gpio_devices),
|
ATTR_GPIO: list(self._hardware.gpio_devices),
|
||||||
ATTR_AUDIO: self.local_hw.audio_devices,
|
ATTR_AUDIO: self._hardware.audio_devices,
|
||||||
}
|
}
|
||||||
|
@@ -3,8 +3,9 @@ import logging
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .util import api_process, api_process_hostcontrol, api_validate
|
from .utils import api_process, api_process_hostcontrol, api_validate
|
||||||
from ..const import ATTR_HOSTNAME
|
from ..const import ATTR_HOSTNAME
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -14,20 +15,14 @@ SCHEMA_OPTIONS = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
class APINetwork(object):
|
class APINetwork(CoreSysAttributes):
|
||||||
"""Handle rest api for network functions."""
|
"""Handle rest api for network functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, host_control):
|
|
||||||
"""Initialize network rest api part."""
|
|
||||||
self.config = config
|
|
||||||
self.loop = loop
|
|
||||||
self.host_control = host_control
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Show network settings."""
|
"""Show network settings."""
|
||||||
return {
|
return {
|
||||||
ATTR_HOSTNAME: self.host_control.hostname,
|
ATTR_HOSTNAME: self._host_control.hostname,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process_hostcontrol
|
@api_process_hostcontrol
|
||||||
@@ -37,7 +32,7 @@ class APINetwork(object):
|
|||||||
|
|
||||||
# hostname
|
# hostname
|
||||||
if ATTR_HOSTNAME in body:
|
if ATTR_HOSTNAME in body:
|
||||||
if self.host_control.hostname != body[ATTR_HOSTNAME]:
|
if self._host_control.hostname != body[ATTR_HOSTNAME]:
|
||||||
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
|
await self._host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
78
hassio/api/panel/hassio-app.html
Normal file
78
hassio/api/panel/hassio-app.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
Binary file not shown.
72
hassio/api/panel/hassio-main-es5.html
Normal file
72
hassio/api/panel/hassio-main-es5.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-main-es5.html.gz
Normal file
BIN
hassio/api/panel/hassio-main-es5.html.gz
Normal file
Binary file not shown.
72
hassio/api/panel/hassio-main-latest.html
Normal file
72
hassio/api/panel/hassio-main-latest.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-main-latest.html.gz
Normal file
BIN
hassio/api/panel/hassio-main-latest.html.gz
Normal file
Binary file not shown.
37
hassio/api/panel/index.html
Normal file
37
hassio/api/panel/index.html
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>Hass.io</title>
|
||||||
|
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
height: 100vh;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<hassio-app></hassio-app>
|
||||||
|
<script>
|
||||||
|
function addScript(src) {
|
||||||
|
var e = document.createElement('script');
|
||||||
|
e.src = src;
|
||||||
|
document.head.appendChild(e);
|
||||||
|
}
|
||||||
|
if (!window.parent.HASS_DEV) {
|
||||||
|
addScript('/frontend_es5/custom-elements-es5-adapter.js');
|
||||||
|
}
|
||||||
|
var webComponentsSupported = (
|
||||||
|
'customElements' in window &&
|
||||||
|
'import' in document.createElement('link') &&
|
||||||
|
'content' in document.createElement('template'));
|
||||||
|
if (!webComponentsSupported) {
|
||||||
|
addScript('/static/webcomponents-lite.js');
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<link rel='import' href='./hassio-app.html'>
|
||||||
|
<link rel='import' href='/static/mdi.html' async>
|
||||||
|
</body>
|
||||||
|
</html>
|
BIN
hassio/api/panel/index.html.gz
Normal file
BIN
hassio/api/panel/index.html.gz
Normal file
Binary file not shown.
200
hassio/api/proxy.py
Normal file
200
hassio/api/proxy.py
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
"""Utils for HomeAssistant Proxy."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from aiohttp import web
|
||||||
|
from aiohttp.web_exceptions import HTTPBadGateway, HTTPInternalServerError
|
||||||
|
from aiohttp.hdrs import CONTENT_TYPE
|
||||||
|
import async_timeout
|
||||||
|
|
||||||
|
from ..const import HEADER_HA_ACCESS
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class APIProxy(CoreSysAttributes):
|
||||||
|
"""API Proxy for Home-Assistant."""
|
||||||
|
|
||||||
|
async def _api_client(self, request, path, timeout=300):
|
||||||
|
"""Return a client request with proxy origin for Home-Assistant."""
|
||||||
|
url = f"{self._homeassistant.api_url}/api/{path}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = None
|
||||||
|
headers = {}
|
||||||
|
method = getattr(self._websession_ssl, request.method.lower())
|
||||||
|
params = request.query or None
|
||||||
|
|
||||||
|
# read data
|
||||||
|
with async_timeout.timeout(30, loop=self._loop):
|
||||||
|
data = await request.read()
|
||||||
|
|
||||||
|
if data:
|
||||||
|
headers.update({CONTENT_TYPE: request.content_type})
|
||||||
|
|
||||||
|
# need api password?
|
||||||
|
if self._homeassistant.api_password:
|
||||||
|
headers = {HEADER_HA_ACCESS: self._homeassistant.api_password}
|
||||||
|
|
||||||
|
# reset headers
|
||||||
|
if not headers:
|
||||||
|
headers = None
|
||||||
|
|
||||||
|
client = await method(
|
||||||
|
url, data=data, headers=headers, timeout=timeout,
|
||||||
|
params=params
|
||||||
|
)
|
||||||
|
|
||||||
|
return client
|
||||||
|
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
|
_LOGGER.error("Client error on API %s request %s.", path, err)
|
||||||
|
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
_LOGGER.error("Client timeout error on API request %s.", path)
|
||||||
|
|
||||||
|
raise HTTPBadGateway()
|
||||||
|
|
||||||
|
async def stream(self, request):
|
||||||
|
"""Proxy HomeAssistant EventStream Requests."""
|
||||||
|
_LOGGER.info("Home-Assistant EventStream start")
|
||||||
|
client = await self._api_client(request, 'stream', timeout=None)
|
||||||
|
|
||||||
|
response = web.StreamResponse()
|
||||||
|
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||||
|
try:
|
||||||
|
await response.prepare(request)
|
||||||
|
while True:
|
||||||
|
data = await client.content.read(10)
|
||||||
|
if not data:
|
||||||
|
await response.write_eof()
|
||||||
|
break
|
||||||
|
response.write(data)
|
||||||
|
|
||||||
|
except aiohttp.ClientError:
|
||||||
|
await response.write_eof()
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
finally:
|
||||||
|
client.close()
|
||||||
|
_LOGGER.info("Home-Assistant EventStream close")
|
||||||
|
|
||||||
|
async def api(self, request):
|
||||||
|
"""Proxy HomeAssistant API Requests."""
|
||||||
|
path = request.match_info.get('path', '')
|
||||||
|
|
||||||
|
# Normal request
|
||||||
|
_LOGGER.info("Home-Assistant /api/%s request", path)
|
||||||
|
client = await self._api_client(request, path)
|
||||||
|
|
||||||
|
data = await client.read()
|
||||||
|
return web.Response(
|
||||||
|
body=data,
|
||||||
|
status=client.status,
|
||||||
|
content_type=client.content_type
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _websocket_client(self):
|
||||||
|
"""Initialize a websocket api connection."""
|
||||||
|
url = f"{self._homeassistant.api_url}/api/websocket"
|
||||||
|
|
||||||
|
try:
|
||||||
|
client = await self._websession_ssl.ws_connect(
|
||||||
|
url, heartbeat=60, verify_ssl=False)
|
||||||
|
|
||||||
|
# handle authentication
|
||||||
|
for _ in range(2):
|
||||||
|
data = await client.receive_json()
|
||||||
|
if data.get('type') == 'auth_ok':
|
||||||
|
return client
|
||||||
|
elif data.get('type') == 'auth_required':
|
||||||
|
await client.send_json({
|
||||||
|
'type': 'auth',
|
||||||
|
'api_password': self._homeassistant.api_password,
|
||||||
|
})
|
||||||
|
|
||||||
|
_LOGGER.error("Authentication to Home-Assistant websocket")
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, RuntimeError) as err:
|
||||||
|
_LOGGER.error("Client error on websocket API %s.", err)
|
||||||
|
|
||||||
|
raise HTTPBadGateway()
|
||||||
|
|
||||||
|
async def websocket(self, request):
|
||||||
|
"""Initialize a websocket api connection."""
|
||||||
|
_LOGGER.info("Home-Assistant Websocket API request initialze")
|
||||||
|
|
||||||
|
# init server
|
||||||
|
server = web.WebSocketResponse(heartbeat=60)
|
||||||
|
await server.prepare(request)
|
||||||
|
|
||||||
|
# handle authentication
|
||||||
|
try:
|
||||||
|
await server.send_json({
|
||||||
|
'type': 'auth_required',
|
||||||
|
'ha_version': self._homeassistant.version,
|
||||||
|
})
|
||||||
|
await server.receive_json() # get internal token
|
||||||
|
await server.send_json({
|
||||||
|
'type': 'auth_ok',
|
||||||
|
'ha_version': self._homeassistant.version,
|
||||||
|
})
|
||||||
|
except (RuntimeError, ValueError) as err:
|
||||||
|
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||||
|
raise HTTPInternalServerError() from None
|
||||||
|
|
||||||
|
# init connection to hass
|
||||||
|
client = await self._websocket_client()
|
||||||
|
|
||||||
|
_LOGGER.info("Home-Assistant Websocket API request running")
|
||||||
|
try:
|
||||||
|
client_read = None
|
||||||
|
server_read = None
|
||||||
|
while not server.closed and not client.closed:
|
||||||
|
if not client_read:
|
||||||
|
client_read = asyncio.ensure_future(
|
||||||
|
client.receive_str(), loop=self._loop)
|
||||||
|
if not server_read:
|
||||||
|
server_read = asyncio.ensure_future(
|
||||||
|
server.receive_str(), loop=self._loop)
|
||||||
|
|
||||||
|
# wait until data need to be processed
|
||||||
|
await asyncio.wait(
|
||||||
|
[client_read, server_read],
|
||||||
|
loop=self._loop, return_when=asyncio.FIRST_COMPLETED
|
||||||
|
)
|
||||||
|
|
||||||
|
# server
|
||||||
|
if server_read.done() and not client.closed:
|
||||||
|
server_read.exception()
|
||||||
|
await client.send_str(server_read.result())
|
||||||
|
server_read = None
|
||||||
|
|
||||||
|
# client
|
||||||
|
if client_read.done() and not server.closed:
|
||||||
|
client_read.exception()
|
||||||
|
await server.send_str(client_read.result())
|
||||||
|
client_read = None
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
except RuntimeError as err:
|
||||||
|
_LOGGER.info("Home-Assistant Websocket API error: %s", err)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if client_read:
|
||||||
|
client_read.cancel()
|
||||||
|
if server_read:
|
||||||
|
server_read.cancel()
|
||||||
|
|
||||||
|
# close connections
|
||||||
|
await client.close()
|
||||||
|
await server.close()
|
||||||
|
|
||||||
|
_LOGGER.info("Home-Assistant Websocket API connection is closed")
|
||||||
|
return server
|
@@ -1,102 +0,0 @@
|
|||||||
"""Init file for HassIO security rest api."""
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import io
|
|
||||||
import logging
|
|
||||||
import hashlib
|
|
||||||
import os
|
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
import voluptuous as vol
|
|
||||||
import pyotp
|
|
||||||
import pyqrcode
|
|
||||||
|
|
||||||
from .util import api_process, api_validate, hash_password
|
|
||||||
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
SCHEMA_PASSWORD = vol.Schema({
|
|
||||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
|
||||||
})
|
|
||||||
|
|
||||||
SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
|
|
||||||
vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
class APISecurity(object):
|
|
||||||
"""Handle rest api for security functions."""
|
|
||||||
|
|
||||||
def __init__(self, config, loop):
|
|
||||||
"""Initialize security rest api part."""
|
|
||||||
self.config = config
|
|
||||||
self.loop = loop
|
|
||||||
|
|
||||||
def _check_password(self, body):
|
|
||||||
"""Check if password is valid and security is initialize."""
|
|
||||||
if not self.config.security_initialize:
|
|
||||||
raise RuntimeError("First set a password")
|
|
||||||
|
|
||||||
password = hash_password(body[ATTR_PASSWORD])
|
|
||||||
if password != self.config.security_password:
|
|
||||||
raise RuntimeError("Wrong password")
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request):
|
|
||||||
"""Return host information."""
|
|
||||||
return {
|
|
||||||
ATTR_INITIALIZE: self.config.security_initialize,
|
|
||||||
ATTR_TOTP: self.config.security_totp is not None,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def options(self, request):
|
|
||||||
"""Set options / password."""
|
|
||||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
|
||||||
|
|
||||||
if self.config.security_initialize:
|
|
||||||
raise RuntimeError("Password is already set!")
|
|
||||||
|
|
||||||
self.config.security_password = hash_password(body[ATTR_PASSWORD])
|
|
||||||
self.config.security_initialize = True
|
|
||||||
return True
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def totp(self, request):
|
|
||||||
"""Set and initialze TOTP."""
|
|
||||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
|
||||||
self._check_password(body)
|
|
||||||
|
|
||||||
# generate TOTP
|
|
||||||
totp_init_key = pyotp.random_base32()
|
|
||||||
totp = pyotp.TOTP(totp_init_key)
|
|
||||||
|
|
||||||
# init qrcode
|
|
||||||
buff = io.BytesIO()
|
|
||||||
|
|
||||||
qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO"))
|
|
||||||
qrcode.svg(buff)
|
|
||||||
|
|
||||||
# finish
|
|
||||||
self.config.security_totp = totp_init_key
|
|
||||||
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def session(self, request):
|
|
||||||
"""Set and initialze session."""
|
|
||||||
body = await api_validate(SCHEMA_SESSION, request)
|
|
||||||
self._check_password(body)
|
|
||||||
|
|
||||||
# check TOTP
|
|
||||||
if self.config.security_totp:
|
|
||||||
totp = pyotp.TOTP(self.config.security_totp)
|
|
||||||
if body[ATTR_TOTP] != totp.now():
|
|
||||||
raise RuntimeError("Invalid TOTP token!")
|
|
||||||
|
|
||||||
# create session
|
|
||||||
valid_until = datetime.now() + timedelta(days=1)
|
|
||||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
|
||||||
|
|
||||||
# store session
|
|
||||||
self.config.add_security_session(session, valid_until)
|
|
||||||
return {ATTR_SESSION: session}
|
|
@@ -4,12 +4,13 @@ import logging
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .util import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
from ..snapshots.validate import ALL_FOLDERS
|
from ..snapshots.validate import ALL_FOLDERS
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||||
ATTR_DEVICES, ATTR_SNAPSHOTS)
|
ATTR_SNAPSHOTS)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -17,8 +18,10 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
vol.Optional(ATTR_ADDONS):
|
||||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||||
|
vol.Optional(ATTR_FOLDERS):
|
||||||
|
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||||
})
|
})
|
||||||
|
|
||||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||||
@@ -26,23 +29,19 @@ SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
vol.Optional(ATTR_ADDONS):
|
||||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||||
|
vol.Optional(ATTR_FOLDERS):
|
||||||
|
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
class APISnapshots(object):
|
class APISnapshots(CoreSysAttributes):
|
||||||
"""Handle rest api for snapshot functions."""
|
"""Handle rest api for snapshot functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, snapshots):
|
|
||||||
"""Initialize network rest api part."""
|
|
||||||
self.config = config
|
|
||||||
self.loop = loop
|
|
||||||
self.snapshots = snapshots
|
|
||||||
|
|
||||||
def _extract_snapshot(self, request):
|
def _extract_snapshot(self, request):
|
||||||
"""Return addon and if not exists trow a exception."""
|
"""Return addon and if not exists trow a exception."""
|
||||||
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
|
snapshot = self._snapshots.get(request.match_info.get('snapshot'))
|
||||||
if not snapshot:
|
if not snapshot:
|
||||||
raise RuntimeError("Snapshot not exists")
|
raise RuntimeError("Snapshot not exists")
|
||||||
return snapshot
|
return snapshot
|
||||||
@@ -51,11 +50,12 @@ class APISnapshots(object):
|
|||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Return snapshot list."""
|
"""Return snapshot list."""
|
||||||
data_snapshots = []
|
data_snapshots = []
|
||||||
for snapshot in self.snapshots.list_snapshots:
|
for snapshot in self._snapshots.list_snapshots:
|
||||||
data_snapshots.append({
|
data_snapshots.append({
|
||||||
ATTR_SLUG: snapshot.slug,
|
ATTR_SLUG: snapshot.slug,
|
||||||
ATTR_NAME: snapshot.name,
|
ATTR_NAME: snapshot.name,
|
||||||
ATTR_DATE: snapshot.date,
|
ATTR_DATE: snapshot.date,
|
||||||
|
ATTR_TYPE: snapshot.sys_type,
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -65,7 +65,7 @@ class APISnapshots(object):
|
|||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request):
|
async def reload(self, request):
|
||||||
"""Reload snapshot list."""
|
"""Reload snapshot list."""
|
||||||
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
|
await asyncio.shield(self._snapshots.reload(), loop=self._loop)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -87,10 +87,7 @@ class APISnapshots(object):
|
|||||||
ATTR_NAME: snapshot.name,
|
ATTR_NAME: snapshot.name,
|
||||||
ATTR_DATE: snapshot.date,
|
ATTR_DATE: snapshot.date,
|
||||||
ATTR_SIZE: snapshot.size,
|
ATTR_SIZE: snapshot.size,
|
||||||
ATTR_HOMEASSISTANT: {
|
ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
|
||||||
ATTR_VERSION: snapshot.homeassistant_version,
|
|
||||||
ATTR_DEVICES: snapshot.homeassistant_devices,
|
|
||||||
},
|
|
||||||
ATTR_ADDONS: data_addons,
|
ATTR_ADDONS: data_addons,
|
||||||
ATTR_REPOSITORIES: snapshot.repositories,
|
ATTR_REPOSITORIES: snapshot.repositories,
|
||||||
ATTR_FOLDERS: snapshot.folders,
|
ATTR_FOLDERS: snapshot.folders,
|
||||||
@@ -101,21 +98,21 @@ class APISnapshots(object):
|
|||||||
"""Full-Snapshot a snapshot."""
|
"""Full-Snapshot a snapshot."""
|
||||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.snapshots.do_snapshot_full(**body), loop=self.loop)
|
self._snapshots.do_snapshot_full(**body), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def snapshot_partial(self, request):
|
async def snapshot_partial(self, request):
|
||||||
"""Partial-Snapshot a snapshot."""
|
"""Partial-Snapshot a snapshot."""
|
||||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
self._snapshots.do_snapshot_partial(**body), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restore_full(self, request):
|
def restore_full(self, request):
|
||||||
"""Full-Restore a snapshot."""
|
"""Full-Restore a snapshot."""
|
||||||
snapshot = self._extract_snapshot(request)
|
snapshot = self._extract_snapshot(request)
|
||||||
return asyncio.shield(
|
return asyncio.shield(
|
||||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
self._snapshots.do_restore_full(snapshot), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_partial(self, request):
|
async def restore_partial(self, request):
|
||||||
@@ -124,12 +121,12 @@ class APISnapshots(object):
|
|||||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.snapshots.do_restore_partial(snapshot, **body),
|
self._snapshots.do_restore_partial(snapshot, **body),
|
||||||
loop=self.loop
|
loop=self._loop
|
||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove(self, request):
|
async def remove(self, request):
|
||||||
"""Remove a snapshot."""
|
"""Remove a snapshot."""
|
||||||
snapshot = self._extract_snapshot(request)
|
snapshot = self._extract_snapshot(request)
|
||||||
return self.snapshots.remove(snapshot)
|
return self._snapshots.remove(snapshot)
|
||||||
|
@@ -4,21 +4,25 @@ import logging
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .util import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||||
ATTR_STATE, CONTENT_TYPE_BINARY)
|
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||||
from ..validate import validate_timezone
|
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||||
|
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema({
|
SCHEMA_OPTIONS = vol.Schema({
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
|
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||||
|
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||||
})
|
})
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({
|
SCHEMA_VERSION = vol.Schema({
|
||||||
@@ -26,20 +30,9 @@ SCHEMA_VERSION = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
class APISupervisor(object):
|
class APISupervisor(CoreSysAttributes):
|
||||||
"""Handle rest api for supervisor functions."""
|
"""Handle rest api for supervisor functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
|
||||||
host_control, updater):
|
|
||||||
"""Initialize supervisor rest api part."""
|
|
||||||
self.config = config
|
|
||||||
self.loop = loop
|
|
||||||
self.supervisor = supervisor
|
|
||||||
self.addons = addons
|
|
||||||
self.snapshots = snapshots
|
|
||||||
self.host_control = host_control
|
|
||||||
self.updater = updater
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def ping(self, request):
|
async def ping(self, request):
|
||||||
"""Return ok for signal that the api is ready."""
|
"""Return ok for signal that the api is ready."""
|
||||||
@@ -49,7 +42,7 @@ class APISupervisor(object):
|
|||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
list_addons = []
|
list_addons = []
|
||||||
for addon in self.addons.list_addons:
|
for addon in self._addons.list_addons:
|
||||||
if addon.is_installed:
|
if addon.is_installed:
|
||||||
list_addons.append({
|
list_addons.append({
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
@@ -59,17 +52,19 @@ class APISupervisor(object):
|
|||||||
ATTR_VERSION: addon.last_version,
|
ATTR_VERSION: addon.last_version,
|
||||||
ATTR_INSTALLED: addon.version_installed,
|
ATTR_INSTALLED: addon.version_installed,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ATTR_VERSION: HASSIO_VERSION,
|
ATTR_VERSION: HASSIO_VERSION,
|
||||||
ATTR_LAST_VERSION: self.updater.version_hassio,
|
ATTR_LAST_VERSION: self._updater.version_hassio,
|
||||||
ATTR_BETA_CHANNEL: self.updater.beta_channel,
|
ATTR_BETA_CHANNEL: self._updater.beta_channel,
|
||||||
ATTR_ARCH: self.config.arch,
|
ATTR_ARCH: self._arch,
|
||||||
ATTR_TIMEZONE: self.config.timezone,
|
ATTR_WAIT_BOOT: self._config.wait_boot,
|
||||||
|
ATTR_TIMEZONE: self._config.timezone,
|
||||||
ATTR_ADDONS: list_addons,
|
ATTR_ADDONS: list_addons,
|
||||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -78,40 +73,59 @@ class APISupervisor(object):
|
|||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_BETA_CHANNEL in body:
|
if ATTR_BETA_CHANNEL in body:
|
||||||
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
self._updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||||
|
|
||||||
if ATTR_TIMEZONE in body:
|
if ATTR_TIMEZONE in body:
|
||||||
self.config.timezone = body[ATTR_TIMEZONE]
|
self._config.timezone = body[ATTR_TIMEZONE]
|
||||||
|
|
||||||
|
if ATTR_WAIT_BOOT in body:
|
||||||
|
self._config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
if ATTR_ADDONS_REPOSITORIES in body:
|
if ATTR_ADDONS_REPOSITORIES in body:
|
||||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||||
await asyncio.shield(self.addons.load_repositories(new))
|
await asyncio.shield(self._addons.load_repositories(new))
|
||||||
|
|
||||||
|
self._updater.save_data()
|
||||||
|
self._config.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stats(self, request):
|
||||||
|
"""Return resource information."""
|
||||||
|
stats = await self._supervisor.stats()
|
||||||
|
if not stats:
|
||||||
|
raise RuntimeError("No stats available")
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
ATTR_BLK_WRITE: stats.blk_write,
|
||||||
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update supervisor OS."""
|
"""Update supervisor OS."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self.updater.version_hassio)
|
version = body.get(ATTR_VERSION, self._updater.version_hassio)
|
||||||
|
|
||||||
if version == self.supervisor.version:
|
if version == self._supervisor.version:
|
||||||
raise RuntimeError("Version {} is already in use".format(version))
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.supervisor.update(version), loop=self.loop)
|
self._supervisor.update(version), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request):
|
async def reload(self, request):
|
||||||
"""Reload addons, config ect."""
|
"""Reload addons, config ect."""
|
||||||
tasks = [
|
tasks = [
|
||||||
self.addons.reload(),
|
self._updater.reload(),
|
||||||
self.snapshots.reload(),
|
|
||||||
self.updater.fetch_data(),
|
|
||||||
self.host_control.load()
|
|
||||||
]
|
]
|
||||||
results, _ = await asyncio.shield(
|
results, _ = await asyncio.shield(
|
||||||
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
asyncio.wait(tasks, loop=self._loop), loop=self._loop)
|
||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
if result.exception() is not None:
|
if result.exception() is not None:
|
||||||
@@ -122,4 +136,4 @@ class APISupervisor(object):
|
|||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return supervisor docker logs."""
|
"""Return supervisor docker logs."""
|
||||||
return self.supervisor.logs()
|
return self._supervisor.logs()
|
||||||
|
@@ -17,10 +17,12 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
def json_loads(data):
|
def json_loads(data):
|
||||||
"""Extract json from string with support for '' and None."""
|
"""Extract json from string with support for '' and None."""
|
||||||
|
if not data:
|
||||||
|
return {}
|
||||||
try:
|
try:
|
||||||
return json.loads(data)
|
return json.loads(data)
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
return {}
|
raise RuntimeError("Invalid json")
|
||||||
|
|
||||||
|
|
||||||
def api_process(method):
|
def api_process(method):
|
||||||
@@ -47,7 +49,8 @@ def api_process_hostcontrol(method):
|
|||||||
"""Wrap HostControl calls to rest api."""
|
"""Wrap HostControl calls to rest api."""
|
||||||
async def wrap_hostcontrol(api, *args, **kwargs):
|
async def wrap_hostcontrol(api, *args, **kwargs):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
if not api.host_control.active:
|
# pylint: disable=protected-access
|
||||||
|
if not api._host_control.active:
|
||||||
raise HTTPServiceUnavailable()
|
raise HTTPServiceUnavailable()
|
||||||
|
|
||||||
try:
|
try:
|
@@ -7,15 +7,41 @@ from pathlib import Path
|
|||||||
|
|
||||||
from colorlog import ColoredFormatter
|
from colorlog import ColoredFormatter
|
||||||
|
|
||||||
|
from .addons import AddonManager
|
||||||
|
from .api import RestAPI
|
||||||
from .const import SOCKET_DOCKER
|
from .const import SOCKET_DOCKER
|
||||||
from .config import CoreConfig
|
from .coresys import CoreSys
|
||||||
|
from .supervisor import Supervisor
|
||||||
|
from .homeassistant import HomeAssistant
|
||||||
|
from .snapshots import SnapshotsManager
|
||||||
|
from .tasks import Tasks
|
||||||
|
from .updater import Updater
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def initialize_system_data():
|
def initialize_coresys(loop):
|
||||||
|
"""Initialize HassIO coresys/objects."""
|
||||||
|
coresys = CoreSys(loop)
|
||||||
|
|
||||||
|
# Initialize core objects
|
||||||
|
coresys.updater = Updater(coresys)
|
||||||
|
coresys.api = RestAPI(coresys)
|
||||||
|
coresys.supervisor = Supervisor(coresys)
|
||||||
|
coresys.homeassistant = HomeAssistant(coresys)
|
||||||
|
coresys.addons = AddonManager(coresys)
|
||||||
|
coresys.snapshots = SnapshotsManager(coresys)
|
||||||
|
coresys.tasks = Tasks(coresys)
|
||||||
|
|
||||||
|
# bootstrap config
|
||||||
|
initialize_system_data(coresys)
|
||||||
|
|
||||||
|
return coresys
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_system_data(coresys):
|
||||||
"""Setup default config and create folders."""
|
"""Setup default config and create folders."""
|
||||||
config = CoreConfig()
|
config = coresys.config
|
||||||
|
|
||||||
# homeassistant config folder
|
# homeassistant config folder
|
||||||
if not config.path_config.is_dir():
|
if not config.path_config.is_dir():
|
||||||
@@ -62,8 +88,9 @@ def initialize_system_data():
|
|||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def migrate_system_env(config):
|
def migrate_system_env(coresys):
|
||||||
"""Cleanup some stuff after update."""
|
"""Cleanup some stuff after update."""
|
||||||
|
config = coresys.config
|
||||||
|
|
||||||
# hass.io 0.37 -> 0.38
|
# hass.io 0.37 -> 0.38
|
||||||
old_build = Path(config.path_hassio, "addons/build")
|
old_build = Path(config.path_hassio, "addons/build")
|
||||||
|
@@ -5,10 +5,10 @@ import os
|
|||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||||
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||||
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT)
|
from .utils.dt import parse_datetime
|
||||||
from .tools import JsonConfig, parse_datetime
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_HASSIO_CONFIG
|
from .validate import SCHEMA_HASSIO_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -35,7 +35,6 @@ class CoreConfig(JsonConfig):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize config object."""
|
"""Initialize config object."""
|
||||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||||
self.arch = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timezone(self):
|
def timezone(self):
|
||||||
@@ -46,7 +45,16 @@ class CoreConfig(JsonConfig):
|
|||||||
def timezone(self, value):
|
def timezone(self, value):
|
||||||
"""Set system timezone."""
|
"""Set system timezone."""
|
||||||
self._data[ATTR_TIMEZONE] = value
|
self._data[ATTR_TIMEZONE] = value
|
||||||
self.save()
|
|
||||||
|
@property
|
||||||
|
def wait_boot(self):
|
||||||
|
"""Return wait time for auto boot stages."""
|
||||||
|
return self._data[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
|
@wait_boot.setter
|
||||||
|
def wait_boot(self, value):
|
||||||
|
"""Set wait boot time."""
|
||||||
|
self._data[ATTR_WAIT_BOOT] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_boot(self):
|
def last_boot(self):
|
||||||
@@ -62,7 +70,6 @@ class CoreConfig(JsonConfig):
|
|||||||
def last_boot(self, value):
|
def last_boot(self, value):
|
||||||
"""Set last boot datetime."""
|
"""Set last boot datetime."""
|
||||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_hassio(self):
|
def path_hassio(self):
|
||||||
@@ -160,7 +167,6 @@ class CoreConfig(JsonConfig):
|
|||||||
return
|
return
|
||||||
|
|
||||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||||
self.save()
|
|
||||||
|
|
||||||
def drop_addon_repository(self, repo):
|
def drop_addon_repository(self, repo):
|
||||||
"""Remove a custom repository from list."""
|
"""Remove a custom repository from list."""
|
||||||
@@ -168,60 +174,6 @@ class CoreConfig(JsonConfig):
|
|||||||
return
|
return
|
||||||
|
|
||||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_initialize(self):
|
|
||||||
"""Return is security was initialize."""
|
|
||||||
return self._data[ATTR_SECURITY]
|
|
||||||
|
|
||||||
@security_initialize.setter
|
|
||||||
def security_initialize(self, value):
|
|
||||||
"""Set is security initialize."""
|
|
||||||
self._data[ATTR_SECURITY] = value
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_totp(self):
|
|
||||||
"""Return the TOTP key."""
|
|
||||||
return self._data.get(ATTR_TOTP)
|
|
||||||
|
|
||||||
@security_totp.setter
|
|
||||||
def security_totp(self, value):
|
|
||||||
"""Set the TOTP key."""
|
|
||||||
self._data[ATTR_TOTP] = value
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_password(self):
|
|
||||||
"""Return the password key."""
|
|
||||||
return self._data.get(ATTR_PASSWORD)
|
|
||||||
|
|
||||||
@security_password.setter
|
|
||||||
def security_password(self, value):
|
|
||||||
"""Set the password key."""
|
|
||||||
self._data[ATTR_PASSWORD] = value
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_sessions(self):
|
|
||||||
"""Return api sessions."""
|
|
||||||
return {
|
|
||||||
session: parse_datetime(until) for
|
|
||||||
session, until in self._data[ATTR_SESSIONS].items()
|
|
||||||
}
|
|
||||||
|
|
||||||
def add_security_session(self, session, valid):
|
|
||||||
"""Set the a new session."""
|
|
||||||
self._data[ATTR_SESSIONS].update(
|
|
||||||
{session: valid.isoformat()}
|
|
||||||
)
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
def drop_security_session(self, session):
|
|
||||||
"""Delete the a session."""
|
|
||||||
self._data[ATTR_SESSIONS].pop(session, None)
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def audio_output(self):
|
def audio_output(self):
|
||||||
@@ -232,7 +184,6 @@ class CoreConfig(JsonConfig):
|
|||||||
def audio_output(self, value):
|
def audio_output(self, value):
|
||||||
"""Set ALSA audio output card,dev."""
|
"""Set ALSA audio output card,dev."""
|
||||||
self._data[ATTR_AUDIO_OUTPUT] = value
|
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def audio_input(self):
|
def audio_input(self):
|
||||||
@@ -243,4 +194,3 @@ class CoreConfig(JsonConfig):
|
|||||||
def audio_input(self, value):
|
def audio_input(self, value):
|
||||||
"""Set ALSA audio input card,dev."""
|
"""Set ALSA audio input card,dev."""
|
||||||
self._data[ATTR_AUDIO_INPUT] = value
|
self._data[ATTR_AUDIO_INPUT] = value
|
||||||
self.save()
|
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
|
|
||||||
HASSIO_VERSION = '0.73'
|
HASSIO_VERSION = '0.85'
|
||||||
|
|
||||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||||
'hassio/{}/version.json')
|
'hassio/{}/version.json')
|
||||||
@@ -11,15 +11,6 @@ URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
|||||||
|
|
||||||
HASSIO_DATA = Path("/data")
|
HASSIO_DATA = Path("/data")
|
||||||
|
|
||||||
RUN_UPDATE_INFO_TASKS = 28800
|
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
|
||||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
|
||||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
|
||||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
|
||||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
|
||||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
|
||||||
RUN_CLEANUP_API_SESSIONS = 900
|
|
||||||
|
|
||||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||||
@@ -50,11 +41,15 @@ RESULT_OK = 'ok'
|
|||||||
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||||
CONTENT_TYPE_PNG = 'image/png'
|
CONTENT_TYPE_PNG = 'image/png'
|
||||||
CONTENT_TYPE_JSON = 'application/json'
|
CONTENT_TYPE_JSON = 'application/json'
|
||||||
|
CONTENT_TYPE_TEXT = 'text/plain'
|
||||||
HEADER_HA_ACCESS = 'x-ha-access'
|
HEADER_HA_ACCESS = 'x-ha-access'
|
||||||
|
|
||||||
|
ATTR_WAIT_BOOT = 'wait_boot'
|
||||||
ATTR_WATCHDOG = 'watchdog'
|
ATTR_WATCHDOG = 'watchdog'
|
||||||
|
ATTR_CHANGELOG = 'changelog'
|
||||||
ATTR_DATE = 'date'
|
ATTR_DATE = 'date'
|
||||||
ATTR_ARCH = 'arch'
|
ATTR_ARCH = 'arch'
|
||||||
|
ATTR_LONG_DESCRIPTION = 'long_description'
|
||||||
ATTR_HOSTNAME = 'hostname'
|
ATTR_HOSTNAME = 'hostname'
|
||||||
ATTR_TIMEZONE = 'timezone'
|
ATTR_TIMEZONE = 'timezone'
|
||||||
ATTR_ARGS = 'args'
|
ATTR_ARGS = 'args'
|
||||||
@@ -64,6 +59,7 @@ ATTR_SOURCE = 'source'
|
|||||||
ATTR_FEATURES = 'features'
|
ATTR_FEATURES = 'features'
|
||||||
ATTR_ADDONS = 'addons'
|
ATTR_ADDONS = 'addons'
|
||||||
ATTR_VERSION = 'version'
|
ATTR_VERSION = 'version'
|
||||||
|
ATTR_AUTO_UART = 'auto_uart'
|
||||||
ATTR_LAST_BOOT = 'last_boot'
|
ATTR_LAST_BOOT = 'last_boot'
|
||||||
ATTR_LAST_VERSION = 'last_version'
|
ATTR_LAST_VERSION = 'last_version'
|
||||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||||
@@ -83,6 +79,7 @@ ATTR_DETACHED = 'detached'
|
|||||||
ATTR_STATE = 'state'
|
ATTR_STATE = 'state'
|
||||||
ATTR_SCHEMA = 'schema'
|
ATTR_SCHEMA = 'schema'
|
||||||
ATTR_IMAGE = 'image'
|
ATTR_IMAGE = 'image'
|
||||||
|
ATTR_ICON = 'icon'
|
||||||
ATTR_LOGO = 'logo'
|
ATTR_LOGO = 'logo'
|
||||||
ATTR_STDIN = 'stdin'
|
ATTR_STDIN = 'stdin'
|
||||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||||
@@ -100,6 +97,8 @@ ATTR_BUILD = 'build'
|
|||||||
ATTR_DEVICES = 'devices'
|
ATTR_DEVICES = 'devices'
|
||||||
ATTR_ENVIRONMENT = 'environment'
|
ATTR_ENVIRONMENT = 'environment'
|
||||||
ATTR_HOST_NETWORK = 'host_network'
|
ATTR_HOST_NETWORK = 'host_network'
|
||||||
|
ATTR_HOST_IPC = 'host_ipc'
|
||||||
|
ATTR_HOST_DBUS = 'host_dbus'
|
||||||
ATTR_NETWORK = 'network'
|
ATTR_NETWORK = 'network'
|
||||||
ATTR_TMPFS = 'tmpfs'
|
ATTR_TMPFS = 'tmpfs'
|
||||||
ATTR_PRIVILEGED = 'privileged'
|
ATTR_PRIVILEGED = 'privileged'
|
||||||
@@ -110,6 +109,7 @@ ATTR_HOMEASSISTANT = 'homeassistant'
|
|||||||
ATTR_HASSIO = 'hassio'
|
ATTR_HASSIO = 'hassio'
|
||||||
ATTR_HASSIO_API = 'hassio_api'
|
ATTR_HASSIO_API = 'hassio_api'
|
||||||
ATTR_HOMEASSISTANT_API = 'homeassistant_api'
|
ATTR_HOMEASSISTANT_API = 'homeassistant_api'
|
||||||
|
ATTR_UUID = 'uuid'
|
||||||
ATTR_FOLDERS = 'folders'
|
ATTR_FOLDERS = 'folders'
|
||||||
ATTR_SIZE = 'size'
|
ATTR_SIZE = 'size'
|
||||||
ATTR_TYPE = 'type'
|
ATTR_TYPE = 'type'
|
||||||
@@ -127,8 +127,15 @@ ATTR_SECURITY = 'security'
|
|||||||
ATTR_BUILD_FROM = 'build_from'
|
ATTR_BUILD_FROM = 'build_from'
|
||||||
ATTR_SQUASH = 'squash'
|
ATTR_SQUASH = 'squash'
|
||||||
ATTR_GPIO = 'gpio'
|
ATTR_GPIO = 'gpio'
|
||||||
ATTR_LEGACY = 'ATTR_LEGACY'
|
ATTR_LEGACY = 'legacy'
|
||||||
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||||
|
ATTR_CPU_PERCENT = 'cpu_percent'
|
||||||
|
ATTR_NETWORK_RX = 'network_rx'
|
||||||
|
ATTR_NETWORK_TX = 'network_tx'
|
||||||
|
ATTR_MEMORY_LIMIT = 'memory_limit'
|
||||||
|
ATTR_MEMORY_USAGE = 'memory_usage'
|
||||||
|
ATTR_BLK_READ = 'blk_read'
|
||||||
|
ATTR_BLK_WRITE = 'blk_write'
|
||||||
|
|
||||||
STARTUP_INITIALIZE = 'initialize'
|
STARTUP_INITIALIZE = 'initialize'
|
||||||
STARTUP_SYSTEM = 'system'
|
STARTUP_SYSTEM = 'system'
|
||||||
|
176
hassio/core.py
176
hassio/core.py
@@ -2,189 +2,109 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import aiohttp
|
from .coresys import CoreSysAttributes
|
||||||
|
|
||||||
from .addons import AddonManager
|
|
||||||
from .api import RestAPI
|
|
||||||
from .host_control import HostControl
|
|
||||||
from .const import (
|
from .const import (
|
||||||
RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT_DOCKER,
|
from .utils.dt import fetch_timezone
|
||||||
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
|
||||||
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
|
||||||
RUN_UPDATE_ADDONS_TASKS)
|
|
||||||
from .hardware import Hardware
|
|
||||||
from .homeassistant import HomeAssistant
|
|
||||||
from .scheduler import Scheduler
|
|
||||||
from .dock import DockerAPI
|
|
||||||
from .dock.supervisor import DockerSupervisor
|
|
||||||
from .dns import DNSForward
|
|
||||||
from .snapshots import SnapshotsManager
|
|
||||||
from .updater import Updater
|
|
||||||
from .tasks import (
|
|
||||||
hassio_update, homeassistant_watchdog_docker, api_sessions_cleanup,
|
|
||||||
addons_update)
|
|
||||||
from .tools import fetch_timezone
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HassIO(object):
|
class HassIO(CoreSysAttributes):
|
||||||
"""Main object of hassio."""
|
"""Main object of hassio."""
|
||||||
|
|
||||||
def __init__(self, loop, config):
|
def __init__(self, coresys):
|
||||||
"""Initialize hassio object."""
|
"""Initialize hassio object."""
|
||||||
self.exit_code = 0
|
self.coresys = coresys
|
||||||
self.loop = loop
|
|
||||||
self.config = config
|
|
||||||
self.websession = aiohttp.ClientSession(loop=loop)
|
|
||||||
self.updater = Updater(config, loop, self.websession)
|
|
||||||
self.scheduler = Scheduler(loop)
|
|
||||||
self.api = RestAPI(config, loop)
|
|
||||||
self.hardware = Hardware()
|
|
||||||
self.docker = DockerAPI()
|
|
||||||
self.dns = DNSForward()
|
|
||||||
|
|
||||||
# init basic docker container
|
|
||||||
self.supervisor = DockerSupervisor(
|
|
||||||
config, loop, self.docker, self.stop)
|
|
||||||
|
|
||||||
# init homeassistant
|
|
||||||
self.homeassistant = HomeAssistant(
|
|
||||||
config, loop, self.docker, self.updater)
|
|
||||||
|
|
||||||
# init HostControl
|
|
||||||
self.host_control = HostControl(loop)
|
|
||||||
|
|
||||||
# init addon system
|
|
||||||
self.addons = AddonManager(config, loop, self.docker)
|
|
||||||
|
|
||||||
# init snapshot system
|
|
||||||
self.snapshots = SnapshotsManager(
|
|
||||||
config, loop, self.scheduler, self.addons, self.homeassistant)
|
|
||||||
|
|
||||||
async def setup(self):
|
async def setup(self):
|
||||||
"""Setup HassIO orchestration."""
|
"""Setup HassIO orchestration."""
|
||||||
# supervisor
|
|
||||||
if not await self.supervisor.attach():
|
|
||||||
_LOGGER.fatal("Can't setup supervisor docker container!")
|
|
||||||
await self.supervisor.cleanup()
|
|
||||||
|
|
||||||
# set running arch
|
|
||||||
self.config.arch = self.supervisor.arch
|
|
||||||
|
|
||||||
# update timezone
|
# update timezone
|
||||||
if self.config.timezone == 'UTC':
|
if self._config.timezone == 'UTC':
|
||||||
self.config.timezone = await fetch_timezone(self.websession)
|
self._config.timezone = await fetch_timezone(self._websession)
|
||||||
|
|
||||||
|
# supervisor
|
||||||
|
await self._supervisor.load()
|
||||||
|
|
||||||
# hostcontrol
|
# hostcontrol
|
||||||
await self.host_control.load()
|
await self._host_control.load()
|
||||||
|
|
||||||
# schedule update info tasks
|
|
||||||
self.scheduler.register_task(
|
|
||||||
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
|
||||||
|
|
||||||
# rest api views
|
|
||||||
self.api.register_host(self.host_control, self.hardware)
|
|
||||||
self.api.register_network(self.host_control)
|
|
||||||
self.api.register_supervisor(
|
|
||||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
|
||||||
self.updater)
|
|
||||||
self.api.register_homeassistant(self.homeassistant)
|
|
||||||
self.api.register_addons(self.addons)
|
|
||||||
self.api.register_security()
|
|
||||||
self.api.register_snapshots(self.snapshots)
|
|
||||||
self.api.register_panel()
|
|
||||||
|
|
||||||
# schedule api session cleanup
|
|
||||||
self.scheduler.register_task(
|
|
||||||
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
|
|
||||||
now=True)
|
|
||||||
|
|
||||||
# Load homeassistant
|
# Load homeassistant
|
||||||
await self.homeassistant.prepare()
|
await self._homeassistant.load()
|
||||||
|
|
||||||
# Load addons
|
# Load addons
|
||||||
await self.addons.prepare()
|
await self._addons.load()
|
||||||
|
|
||||||
# schedule addon update task
|
# rest api views
|
||||||
self.scheduler.register_task(
|
await self._api.load()
|
||||||
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
|
|
||||||
self.scheduler.register_task(
|
|
||||||
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
|
|
||||||
|
|
||||||
# schedule self update task
|
# load last available data
|
||||||
self.scheduler.register_task(
|
await self._updater.load()
|
||||||
hassio_update(self.supervisor, self.updater),
|
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
|
||||||
|
|
||||||
# schedule snapshot update tasks
|
# load last available data
|
||||||
self.scheduler.register_task(
|
await self._snapshots.load()
|
||||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
|
||||||
|
|
||||||
# start dns forwarding
|
# start dns forwarding
|
||||||
self.loop.create_task(self.dns.start())
|
self._loop.create_task(self._dns.start())
|
||||||
|
|
||||||
# start addon mark as initialize
|
# start addon mark as initialize
|
||||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
await self._addons.auto_boot(STARTUP_INITIALIZE)
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
"""Start HassIO orchestration."""
|
"""Start HassIO orchestration."""
|
||||||
# on release channel, try update itself
|
# on release channel, try update itself
|
||||||
# on beta channel, only read new versions
|
# on beta channel, only read new versions
|
||||||
await asyncio.wait(
|
if not self._updater.beta_channel and self._supervisor.need_update:
|
||||||
[hassio_update(self.supervisor, self.updater)()],
|
if await self._supervisor.update():
|
||||||
loop=self.loop
|
return
|
||||||
)
|
else:
|
||||||
|
_LOGGER.info("Ignore Hass.io auto updates on beta mode")
|
||||||
|
|
||||||
# start api
|
# start api
|
||||||
await self.api.start()
|
await self._api.start()
|
||||||
_LOGGER.info("Start hassio api on %s", self.docker.network.supervisor)
|
_LOGGER.info("Start API on %s", self._docker.network.supervisor)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# HomeAssistant is already running / supervisor have only reboot
|
# HomeAssistant is already running / supervisor have only reboot
|
||||||
if self.hardware.last_boot == self.config.last_boot:
|
if self._hardware.last_boot == self._config.last_boot:
|
||||||
_LOGGER.info("HassIO reboot detected")
|
_LOGGER.info("Hass.io reboot detected")
|
||||||
return
|
return
|
||||||
|
|
||||||
# start addon mark as system
|
# start addon mark as system
|
||||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
await self._addons.auto_boot(STARTUP_SYSTEM)
|
||||||
|
|
||||||
# start addon mark as services
|
# start addon mark as services
|
||||||
await self.addons.auto_boot(STARTUP_SERVICES)
|
await self._addons.auto_boot(STARTUP_SERVICES)
|
||||||
|
|
||||||
# run HomeAssistant
|
# run HomeAssistant
|
||||||
if self.homeassistant.boot:
|
if self._homeassistant.boot:
|
||||||
await self.homeassistant.run()
|
await self._homeassistant.run()
|
||||||
|
|
||||||
# start addon mark as application
|
# start addon mark as application
|
||||||
await self.addons.auto_boot(STARTUP_APPLICATION)
|
await self._addons.auto_boot(STARTUP_APPLICATION)
|
||||||
|
|
||||||
# store new last boot
|
# store new last boot
|
||||||
self.config.last_boot = self.hardware.last_boot
|
self._config.last_boot = self._hardware.last_boot
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# schedule homeassistant watchdog
|
# Add core tasks into scheduler
|
||||||
self.scheduler.register_task(
|
await self._tasks.load()
|
||||||
homeassistant_watchdog_docker(self.loop, self.homeassistant),
|
|
||||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER)
|
|
||||||
|
|
||||||
# self.scheduler.register_task(
|
|
||||||
# homeassistant_watchdog_api(self.loop, self.homeassistant),
|
|
||||||
# RUN_WATCHDOG_HOMEASSISTANT_API)
|
|
||||||
|
|
||||||
# If landingpage / run upgrade in background
|
# If landingpage / run upgrade in background
|
||||||
if self.homeassistant.version == 'landingpage':
|
if self._homeassistant.version == 'landingpage':
|
||||||
self.loop.create_task(self.homeassistant.install())
|
self._loop.create_task(self._homeassistant.install())
|
||||||
|
|
||||||
|
_LOGGER.info("Hass.io is up and running")
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self):
|
||||||
"""Stop a running orchestration."""
|
"""Stop a running orchestration."""
|
||||||
# don't process scheduler anymore
|
# don't process scheduler anymore
|
||||||
self.scheduler.suspend = True
|
self._scheduler.suspend = True
|
||||||
|
|
||||||
# process stop tasks
|
# process stop tasks
|
||||||
self.websession.close()
|
self._websession.close()
|
||||||
self.homeassistant.websession.close()
|
self._websession_ssl.close()
|
||||||
|
|
||||||
# process async stop tasks
|
# process async stop tasks
|
||||||
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
|
await asyncio.wait(
|
||||||
|
[self._api.stop(), self._dns.stop()], loop=self._loop)
|
||||||
|
190
hassio/coresys.py
Normal file
190
hassio/coresys.py
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
"""Handle core shared data."""
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
from .config import CoreConfig
|
||||||
|
from .docker import DockerAPI
|
||||||
|
from .misc.dns import DNSForward
|
||||||
|
from .misc.hardware import Hardware
|
||||||
|
from .misc.host_control import HostControl
|
||||||
|
from .misc.scheduler import Scheduler
|
||||||
|
|
||||||
|
|
||||||
|
class CoreSys(object):
|
||||||
|
"""Class that handle all shared data."""
|
||||||
|
|
||||||
|
def __init__(self, loop):
|
||||||
|
"""Initialize coresys."""
|
||||||
|
# Static attributes
|
||||||
|
self.exit_code = 0
|
||||||
|
|
||||||
|
# External objects
|
||||||
|
self._loop = loop
|
||||||
|
self._websession = aiohttp.ClientSession(loop=loop)
|
||||||
|
self._websession_ssl = aiohttp.ClientSession(
|
||||||
|
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
|
||||||
|
|
||||||
|
# Global objects
|
||||||
|
self._config = CoreConfig()
|
||||||
|
self._hardware = Hardware()
|
||||||
|
self._docker = DockerAPI()
|
||||||
|
self._scheduler = Scheduler(loop=loop)
|
||||||
|
self._dns = DNSForward(loop=loop)
|
||||||
|
self._host_control = HostControl(loop=loop)
|
||||||
|
|
||||||
|
# Internal objects pointers
|
||||||
|
self._homeassistant = None
|
||||||
|
self._supervisor = None
|
||||||
|
self._addons = None
|
||||||
|
self._api = None
|
||||||
|
self._updater = None
|
||||||
|
self._snapshots = None
|
||||||
|
self._tasks = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arch(self):
|
||||||
|
"""Return running arch of hass.io system."""
|
||||||
|
if self._supervisor:
|
||||||
|
return self._supervisor.arch
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def loop(self):
|
||||||
|
"""Return loop object."""
|
||||||
|
return self._loop
|
||||||
|
|
||||||
|
@property
|
||||||
|
def websession(self):
|
||||||
|
"""Return websession object."""
|
||||||
|
return self._websession
|
||||||
|
|
||||||
|
@property
|
||||||
|
def websession_ssl(self):
|
||||||
|
"""Return websession object with disabled SSL."""
|
||||||
|
return self._websession_ssl
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self):
|
||||||
|
"""Return CoreConfig object."""
|
||||||
|
return self._config
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hardware(self):
|
||||||
|
"""Return Hardware object."""
|
||||||
|
return self._hardware
|
||||||
|
|
||||||
|
@property
|
||||||
|
def docker(self):
|
||||||
|
"""Return DockerAPI object."""
|
||||||
|
return self._docker
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scheduler(self):
|
||||||
|
"""Return Scheduler object."""
|
||||||
|
return self._scheduler
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dns(self):
|
||||||
|
"""Return DNSForward object."""
|
||||||
|
return self._dns
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_control(self):
|
||||||
|
"""Return HostControl object."""
|
||||||
|
return self._host_control
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant(self):
|
||||||
|
"""Return HomeAssistant object."""
|
||||||
|
return self._homeassistant
|
||||||
|
|
||||||
|
@homeassistant.setter
|
||||||
|
def homeassistant(self, value):
|
||||||
|
"""Set a HomeAssistant object."""
|
||||||
|
if self._homeassistant:
|
||||||
|
raise RuntimeError("HomeAssistant already set!")
|
||||||
|
self._homeassistant = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supervisor(self):
|
||||||
|
"""Return Supervisor object."""
|
||||||
|
return self._supervisor
|
||||||
|
|
||||||
|
@supervisor.setter
|
||||||
|
def supervisor(self, value):
|
||||||
|
"""Set a Supervisor object."""
|
||||||
|
if self._supervisor:
|
||||||
|
raise RuntimeError("Supervisor already set!")
|
||||||
|
self._supervisor = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api(self):
|
||||||
|
"""Return API object."""
|
||||||
|
return self._api
|
||||||
|
|
||||||
|
@api.setter
|
||||||
|
def api(self, value):
|
||||||
|
"""Set a API object."""
|
||||||
|
if self._api:
|
||||||
|
raise RuntimeError("API already set!")
|
||||||
|
self._api = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def updater(self):
|
||||||
|
"""Return Updater object."""
|
||||||
|
return self._updater
|
||||||
|
|
||||||
|
@updater.setter
|
||||||
|
def updater(self, value):
|
||||||
|
"""Set a Updater object."""
|
||||||
|
if self._updater:
|
||||||
|
raise RuntimeError("Updater already set!")
|
||||||
|
self._updater = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def addons(self):
|
||||||
|
"""Return AddonManager object."""
|
||||||
|
return self._addons
|
||||||
|
|
||||||
|
@addons.setter
|
||||||
|
def addons(self, value):
|
||||||
|
"""Set a AddonManager object."""
|
||||||
|
if self._addons:
|
||||||
|
raise RuntimeError("AddonManager already set!")
|
||||||
|
self._addons = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def snapshots(self):
|
||||||
|
"""Return SnapshotsManager object."""
|
||||||
|
return self._snapshots
|
||||||
|
|
||||||
|
@snapshots.setter
|
||||||
|
def snapshots(self, value):
|
||||||
|
"""Set a SnapshotsManager object."""
|
||||||
|
if self._snapshots:
|
||||||
|
raise RuntimeError("SnapshotsManager already set!")
|
||||||
|
self._snapshots = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tasks(self):
|
||||||
|
"""Return SnapshotsManager object."""
|
||||||
|
return self._tasks
|
||||||
|
|
||||||
|
@tasks.setter
|
||||||
|
def tasks(self, value):
|
||||||
|
"""Set a Tasks object."""
|
||||||
|
if self._tasks:
|
||||||
|
raise RuntimeError("Tasks already set!")
|
||||||
|
self._tasks = value
|
||||||
|
|
||||||
|
|
||||||
|
class CoreSysAttributes(object):
|
||||||
|
"""Inheret basic CoreSysAttributes."""
|
||||||
|
|
||||||
|
coresys = None
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
"""Mapping to coresys."""
|
||||||
|
if hasattr(self.coresys, name[1:]):
|
||||||
|
return getattr(self.coresys, name[1:])
|
||||||
|
raise AttributeError(f"Can't find {name} on {self.__class__}")
|
@@ -1,77 +0,0 @@
|
|||||||
"""Init file for HassIO docker object."""
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
import docker
|
|
||||||
|
|
||||||
from .interface import DockerInterface
|
|
||||||
from .util import docker_process
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DockerSupervisor(DockerInterface):
|
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
|
||||||
|
|
||||||
def __init__(self, config, loop, api, stop_callback, image=None):
|
|
||||||
"""Initialize docker base wrapper."""
|
|
||||||
super().__init__(config, loop, api, image=image)
|
|
||||||
self.stop_callback = stop_callback
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""Return name of docker container."""
|
|
||||||
return os.environ['SUPERVISOR_NAME']
|
|
||||||
|
|
||||||
def _attach(self):
|
|
||||||
"""Attach to running docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
container = self.docker.containers.get(self.name)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.process_metadata(container.attrs)
|
|
||||||
_LOGGER.info("Attach to supervisor %s with version %s",
|
|
||||||
self.image, self.version)
|
|
||||||
|
|
||||||
# if already attach
|
|
||||||
if container in self.docker.network.containers:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# attach to network
|
|
||||||
return self.docker.network.attach_container(
|
|
||||||
container, alias=['hassio'], ipv4=self.docker.network.supervisor)
|
|
||||||
|
|
||||||
@docker_process
|
|
||||||
async def update(self, tag):
|
|
||||||
"""Update a supervisor docker image."""
|
|
||||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
|
||||||
|
|
||||||
if await self.loop.run_in_executor(None, self._install, tag):
|
|
||||||
self.loop.call_later(1, self.loop.stop)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def run(self):
|
|
||||||
"""Run docker image."""
|
|
||||||
raise RuntimeError("Not support on supervisor docker container!")
|
|
||||||
|
|
||||||
async def install(self, tag):
|
|
||||||
"""Pull docker image."""
|
|
||||||
raise RuntimeError("Not support on supervisor docker container!")
|
|
||||||
|
|
||||||
async def stop(self):
|
|
||||||
"""Stop/remove docker container."""
|
|
||||||
raise RuntimeError("Not support on supervisor docker container!")
|
|
||||||
|
|
||||||
async def remove(self):
|
|
||||||
"""Remove docker image."""
|
|
||||||
raise RuntimeError("Not support on supervisor docker container!")
|
|
||||||
|
|
||||||
async def restart(self):
|
|
||||||
"""Restart docker container."""
|
|
||||||
raise RuntimeError("Not support on supervisor docker container!")
|
|
@@ -47,8 +47,10 @@ class DockerAPI(object):
|
|||||||
hostname = kwargs.get('hostname')
|
hostname = kwargs.get('hostname')
|
||||||
|
|
||||||
# setup network
|
# setup network
|
||||||
|
kwargs['dns_search'] = ["."]
|
||||||
if network_mode:
|
if network_mode:
|
||||||
kwargs['dns'] = [str(self.network.supervisor)]
|
kwargs['dns'] = [str(self.network.supervisor)]
|
||||||
|
kwargs['dns_opt'] = ["ndots:0"]
|
||||||
else:
|
else:
|
||||||
kwargs['network'] = None
|
kwargs['network'] = None
|
||||||
|
|
||||||
@@ -56,7 +58,7 @@ class DockerAPI(object):
|
|||||||
try:
|
try:
|
||||||
container = self.docker.containers.create(image, **kwargs)
|
container = self.docker.containers.create(image, **kwargs)
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't create container from %s -> %s", name, err)
|
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# attach network
|
# attach network
|
||||||
@@ -71,7 +73,7 @@ class DockerAPI(object):
|
|||||||
try:
|
try:
|
||||||
container.start()
|
container.start()
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't start %s -> %s", name, err)
|
_LOGGER.error("Can't start %s: %s", name, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@@ -98,7 +100,7 @@ class DockerAPI(object):
|
|||||||
output = container.logs(stdout=stdout, stderr=stderr)
|
output = container.logs(stdout=stdout, stderr=stderr)
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't execute command -> %s", err)
|
_LOGGER.error("Can't execute command: %s", err)
|
||||||
return (None, b"")
|
return (None, b"")
|
||||||
|
|
||||||
# cleanup container
|
# cleanup container
|
@@ -6,7 +6,7 @@ import docker
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
from .interface import DockerInterface
|
from .interface import DockerInterface
|
||||||
from .util import docker_process
|
from .utils import docker_process
|
||||||
from ..addons.build import AddonBuild
|
from ..addons.build import AddonBuild
|
||||||
from ..const import (
|
from ..const import (
|
||||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||||
@@ -19,32 +19,52 @@ AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
|||||||
class DockerAddon(DockerInterface):
|
class DockerAddon(DockerInterface):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
def __init__(self, config, loop, api, addon):
|
def __init__(self, coresys, slug):
|
||||||
"""Initialize docker homeassistant wrapper."""
|
"""Initialize docker homeassistant wrapper."""
|
||||||
super().__init__(
|
super().__init__(coresys)
|
||||||
config, loop, api, image=addon.image, timeout=addon.timeout)
|
self._id = slug
|
||||||
self.addon = addon
|
|
||||||
|
|
||||||
def process_metadata(self, metadata, force=False):
|
@property
|
||||||
"""Use addon data instead meta data with legacy."""
|
def addon(self):
|
||||||
|
"""Return addon of docker image."""
|
||||||
|
return self._addons.get(self._id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image(self):
|
||||||
|
"""Return name of docker image."""
|
||||||
|
return self.addon.image
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timeout(self):
|
||||||
|
"""Return timeout for docker actions."""
|
||||||
|
return self.addon.timeout
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
"""Return version of docker image."""
|
||||||
if not self.addon.legacy:
|
if not self.addon.legacy:
|
||||||
return super().process_metadata(metadata, force=force)
|
return super().version
|
||||||
|
return self.addon.version_installed
|
||||||
|
|
||||||
# set meta data
|
@property
|
||||||
if not self.version or force:
|
def arch(self):
|
||||||
if force: # called on install/update/build
|
"""Return arch of docker image."""
|
||||||
self.version = self.addon.last_version
|
if not self.addon.legacy:
|
||||||
else:
|
return super().arch
|
||||||
self.version = self.addon.version_installed
|
return self._arch
|
||||||
|
|
||||||
if not self.arch:
|
|
||||||
self.arch = self.config.arch
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""Return name of docker container."""
|
"""Return name of docker container."""
|
||||||
return "addon_{}".format(self.addon.slug)
|
return "addon_{}".format(self.addon.slug)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ipc(self):
|
||||||
|
"""Return the IPC namespace."""
|
||||||
|
if self.addon.host_ipc:
|
||||||
|
return 'host'
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hostname(self):
|
def hostname(self):
|
||||||
"""Return slug/id of addon."""
|
"""Return slug/id of addon."""
|
||||||
@@ -60,9 +80,13 @@ class DockerAddon(DockerInterface):
|
|||||||
'ALSA_INPUT': self.addon.audio_input,
|
'ALSA_INPUT': self.addon.audio_input,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
# Set api token if any API access is needed
|
||||||
|
if self.addon.access_hassio_api or self.addon.access_homeassistant_api:
|
||||||
|
addon_env['HASSIO_TOKEN'] = self.addon.uuid
|
||||||
|
|
||||||
return {
|
return {
|
||||||
**addon_env,
|
**addon_env,
|
||||||
'TZ': self.config.timezone,
|
'TZ': self._config.timezone,
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -70,14 +94,17 @@ class DockerAddon(DockerInterface):
|
|||||||
"""Return needed devices."""
|
"""Return needed devices."""
|
||||||
devices = self.addon.devices or []
|
devices = self.addon.devices or []
|
||||||
|
|
||||||
# use audio devices
|
# Use audio devices
|
||||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||||
devices.append(AUDIO_DEVICE)
|
devices.append(AUDIO_DEVICE)
|
||||||
|
|
||||||
|
# Auto mapping UART devices
|
||||||
|
if self.addon.auto_uart:
|
||||||
|
for device in self._hardware.serial_devices:
|
||||||
|
devices.append(f"{device}:{device}:rwm")
|
||||||
|
|
||||||
# Return None if no devices is present
|
# Return None if no devices is present
|
||||||
if devices:
|
return devices or None
|
||||||
return devices
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ports(self):
|
def ports(self):
|
||||||
@@ -91,20 +118,32 @@ class DockerAddon(DockerInterface):
|
|||||||
if host_port
|
if host_port
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def security_opt(self):
|
||||||
|
"""Controlling security opt."""
|
||||||
|
privileged = self.addon.privileged or []
|
||||||
|
|
||||||
|
# Disable AppArmor sinse it make troubles wit SYS_ADMIN
|
||||||
|
if 'SYS_ADMIN' in privileged:
|
||||||
|
return [
|
||||||
|
"apparmor:unconfined",
|
||||||
|
]
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tmpfs(self):
|
def tmpfs(self):
|
||||||
"""Return tmpfs for docker add-on."""
|
"""Return tmpfs for docker add-on."""
|
||||||
options = self.addon.tmpfs
|
options = self.addon.tmpfs
|
||||||
if options:
|
if options:
|
||||||
return {"/tmpfs": "{}".format(options)}
|
return {"/tmpfs": f"{options}"}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def network_mapping(self):
|
def network_mapping(self):
|
||||||
"""Return hosts mapping."""
|
"""Return hosts mapping."""
|
||||||
return {
|
return {
|
||||||
'homeassistant': self.docker.network.gateway,
|
'homeassistant': self._docker.network.gateway,
|
||||||
'hassio': self.docker.network.supervisor,
|
'hassio': self._docker.network.supervisor,
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -119,7 +158,7 @@ class DockerAddon(DockerInterface):
|
|||||||
"""Generate volumes for mappings."""
|
"""Generate volumes for mappings."""
|
||||||
volumes = {
|
volumes = {
|
||||||
str(self.addon.path_extern_data): {
|
str(self.addon.path_extern_data): {
|
||||||
'bind': '/data', 'mode': 'rw'
|
'bind': "/data", 'mode': 'rw'
|
||||||
}}
|
}}
|
||||||
|
|
||||||
addon_mapping = self.addon.map_volumes
|
addon_mapping = self.addon.map_volumes
|
||||||
@@ -127,45 +166,52 @@ class DockerAddon(DockerInterface):
|
|||||||
# setup config mappings
|
# setup config mappings
|
||||||
if MAP_CONFIG in addon_mapping:
|
if MAP_CONFIG in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_config): {
|
str(self._config.path_extern_config): {
|
||||||
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
|
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_SSL in addon_mapping:
|
if MAP_SSL in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_ssl): {
|
str(self._config.path_extern_ssl): {
|
||||||
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
|
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_ADDONS in addon_mapping:
|
if MAP_ADDONS in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_addons_local): {
|
str(self._config.path_extern_addons_local): {
|
||||||
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
|
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_BACKUP in addon_mapping:
|
if MAP_BACKUP in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_backup): {
|
str(self._config.path_extern_backup): {
|
||||||
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
|
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_SHARE in addon_mapping:
|
if MAP_SHARE in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_share): {
|
str(self._config.path_extern_share): {
|
||||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
# init other hardware mappings
|
# init other hardware mappings
|
||||||
if self.addon.with_gpio:
|
if self.addon.with_gpio:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
'/sys/class/gpio': {
|
"/sys/class/gpio": {
|
||||||
'bind': '/sys/class/gpio', 'mode': "rw"
|
'bind': "/sys/class/gpio", 'mode': 'rw'
|
||||||
},
|
},
|
||||||
'/sys/devices/platform/soc': {
|
"/sys/devices/platform/soc": {
|
||||||
'bind': '/sys/devices/platform/soc', 'mode': "rw"
|
'bind': "/sys/devices/platform/soc", 'mode': 'rw'
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
# host dbus system
|
||||||
|
if self.addon.host_dbus:
|
||||||
|
volumes.update({
|
||||||
|
"/var/run/dbus": {
|
||||||
|
'bind': "/var/run/dbus", 'mode': 'rw'
|
||||||
|
}})
|
||||||
|
|
||||||
return volumes
|
return volumes
|
||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
@@ -183,17 +229,20 @@ class DockerAddon(DockerInterface):
|
|||||||
if not self.addon.write_options():
|
if not self.addon.write_options():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
ret = self.docker.run(
|
ret = self._docker.run(
|
||||||
self.image,
|
self.image,
|
||||||
name=self.name,
|
name=self.name,
|
||||||
hostname=self.hostname,
|
hostname=self.hostname,
|
||||||
detach=True,
|
detach=True,
|
||||||
|
init=True,
|
||||||
|
ipc_mode=self.ipc,
|
||||||
stdin_open=self.addon.with_stdin,
|
stdin_open=self.addon.with_stdin,
|
||||||
network_mode=self.network_mode,
|
network_mode=self.network_mode,
|
||||||
ports=self.ports,
|
ports=self.ports,
|
||||||
extra_hosts=self.network_mapping,
|
extra_hosts=self.network_mapping,
|
||||||
devices=self.devices,
|
devices=self.devices,
|
||||||
cap_add=self.addon.privileged,
|
cap_add=self.addon.privileged,
|
||||||
|
security_opt=self.security_opt,
|
||||||
environment=self.environment,
|
environment=self.environment,
|
||||||
volumes=self.volumes,
|
volumes=self.volumes,
|
||||||
tmpfs=self.tmpfs
|
tmpfs=self.tmpfs
|
||||||
@@ -220,17 +269,17 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
build_env = AddonBuild(self.config, self.addon)
|
build_env = AddonBuild(self.coresys, self._id)
|
||||||
|
|
||||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||||
try:
|
try:
|
||||||
image = self.docker.images.build(**build_env.get_docker_args(tag))
|
image = self._docker.images.build(**build_env.get_docker_args(tag))
|
||||||
|
|
||||||
image.tag(self.image, tag='latest')
|
image.tag(self.image, tag='latest')
|
||||||
self.process_metadata(image.attrs, force=True)
|
self._meta = image.attrs
|
||||||
|
|
||||||
except (docker.errors.DockerException) as err:
|
except (docker.errors.DockerException) as err:
|
||||||
_LOGGER.error("Can't build %s:%s -> %s", self.image, tag, err)
|
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||||
@@ -239,7 +288,7 @@ class DockerAddon(DockerInterface):
|
|||||||
@docker_process
|
@docker_process
|
||||||
def export_image(self, path):
|
def export_image(self, path):
|
||||||
"""Export current images into a tar file."""
|
"""Export current images into a tar file."""
|
||||||
return self.loop.run_in_executor(None, self._export_image, path)
|
return self._loop.run_in_executor(None, self._export_image, path)
|
||||||
|
|
||||||
def _export_image(self, tar_file):
|
def _export_image(self, tar_file):
|
||||||
"""Export current images into a tar file.
|
"""Export current images into a tar file.
|
||||||
@@ -247,9 +296,9 @@ class DockerAddon(DockerInterface):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
image = self.docker.api.get_image(self.image)
|
image = self._docker.api.get_image(self.image)
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -257,7 +306,7 @@ class DockerAddon(DockerInterface):
|
|||||||
for chunk in image.stream():
|
for chunk in image.stream():
|
||||||
write_tar.write(chunk)
|
write_tar.write(chunk)
|
||||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||||
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
|
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||||
@@ -266,7 +315,7 @@ class DockerAddon(DockerInterface):
|
|||||||
@docker_process
|
@docker_process
|
||||||
def import_image(self, path, tag):
|
def import_image(self, path, tag):
|
||||||
"""Import a tar file as image."""
|
"""Import a tar file as image."""
|
||||||
return self.loop.run_in_executor(None, self._import_image, path, tag)
|
return self._loop.run_in_executor(None, self._import_image, path, tag)
|
||||||
|
|
||||||
def _import_image(self, tar_file, tag):
|
def _import_image(self, tar_file, tag):
|
||||||
"""Import a tar file as image.
|
"""Import a tar file as image.
|
||||||
@@ -275,16 +324,16 @@ class DockerAddon(DockerInterface):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with tar_file.open("rb") as read_tar:
|
with tar_file.open("rb") as read_tar:
|
||||||
self.docker.api.load_image(read_tar)
|
self._docker.api.load_image(read_tar, quiet=True)
|
||||||
|
|
||||||
image = self.docker.images.get(self.image)
|
image = self._docker.images.get(self.image)
|
||||||
image.tag(self.image, tag=tag)
|
image.tag(self.image, tag=tag)
|
||||||
except (docker.errors.DockerException, OSError) as err:
|
except (docker.errors.DockerException, OSError) as err:
|
||||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||||
self.process_metadata(image.attrs, force=True)
|
self._meta = image.attrs
|
||||||
self._cleanup()
|
self._cleanup()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -300,7 +349,7 @@ class DockerAddon(DockerInterface):
|
|||||||
@docker_process
|
@docker_process
|
||||||
def write_stdin(self, data):
|
def write_stdin(self, data):
|
||||||
"""Write to add-on stdin."""
|
"""Write to add-on stdin."""
|
||||||
return self.loop.run_in_executor(None, self._write_stdin, data)
|
return self._loop.run_in_executor(None, self._write_stdin, data)
|
||||||
|
|
||||||
def _write_stdin(self, data):
|
def _write_stdin(self, data):
|
||||||
"""Write to add-on stdin.
|
"""Write to add-on stdin.
|
||||||
@@ -312,10 +361,10 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# load needed docker objects
|
# load needed docker objects
|
||||||
container = self.docker.containers.get(self.name)
|
container = self._docker.containers.get(self.name)
|
||||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't attach to %s stdin -> %s", self.name, err)
|
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -324,7 +373,7 @@ class DockerAddon(DockerInterface):
|
|||||||
os.write(socket.fileno(), data)
|
os.write(socket.fileno(), data)
|
||||||
socket.close()
|
socket.close()
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't write to %s stdin -> %s", self.name, err)
|
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
@@ -13,10 +13,10 @@ HASS_DOCKER_NAME = 'homeassistant'
|
|||||||
class DockerHomeAssistant(DockerInterface):
|
class DockerHomeAssistant(DockerInterface):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
def __init__(self, config, loop, api, data):
|
@property
|
||||||
"""Initialize docker homeassistant wrapper."""
|
def image(self):
|
||||||
super().__init__(config, loop, api, image=data.image)
|
"""Return name of docker image."""
|
||||||
self.data = data
|
return self._homeassistant.image
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@@ -26,14 +26,10 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
@property
|
@property
|
||||||
def devices(self):
|
def devices(self):
|
||||||
"""Create list of special device to map into docker."""
|
"""Create list of special device to map into docker."""
|
||||||
if not self.data.devices:
|
|
||||||
return
|
|
||||||
|
|
||||||
devices = []
|
devices = []
|
||||||
for device in self.data.devices:
|
for device in self._hardware.serial_devices:
|
||||||
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
|
devices.append(f"{device}:{device}:rwm")
|
||||||
|
return devices or None
|
||||||
return devices
|
|
||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
"""Run docker image.
|
"""Run docker image.
|
||||||
@@ -41,29 +37,31 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
if self._is_running():
|
if self._is_running():
|
||||||
return
|
return False
|
||||||
|
|
||||||
# cleanup
|
# cleanup
|
||||||
self._stop()
|
self._stop()
|
||||||
|
|
||||||
ret = self.docker.run(
|
ret = self._docker.run(
|
||||||
self.image,
|
self.image,
|
||||||
name=self.name,
|
name=self.name,
|
||||||
hostname=self.name,
|
hostname=self.name,
|
||||||
detach=True,
|
detach=True,
|
||||||
privileged=True,
|
privileged=True,
|
||||||
|
init=True,
|
||||||
devices=self.devices,
|
devices=self.devices,
|
||||||
network_mode='host',
|
network_mode='host',
|
||||||
environment={
|
environment={
|
||||||
'HASSIO': self.docker.network.supervisor,
|
'HASSIO': self._docker.network.supervisor,
|
||||||
'TZ': self.config.timezone,
|
'TZ': self._config.timezone,
|
||||||
|
'HASSIO_TOKEN': self._homeassistant.uuid,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
str(self.config.path_extern_config):
|
str(self._config.path_extern_config):
|
||||||
{'bind': '/config', 'mode': 'rw'},
|
{'bind': '/config', 'mode': 'rw'},
|
||||||
str(self.config.path_extern_ssl):
|
str(self._config.path_extern_ssl):
|
||||||
{'bind': '/ssl', 'mode': 'ro'},
|
{'bind': '/ssl', 'mode': 'ro'},
|
||||||
str(self.config.path_extern_share):
|
str(self._config.path_extern_share):
|
||||||
{'bind': '/share', 'mode': 'rw'},
|
{'bind': '/share', 'mode': 'rw'},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -79,26 +77,26 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
return self.docker.run_command(
|
return self._docker.run_command(
|
||||||
self.image,
|
self.image,
|
||||||
command,
|
command,
|
||||||
detach=True,
|
detach=True,
|
||||||
stdout=True,
|
stdout=True,
|
||||||
stderr=True,
|
stderr=True,
|
||||||
environment={
|
environment={
|
||||||
'TZ': self.config.timezone,
|
'TZ': self._config.timezone,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
str(self.config.path_extern_config):
|
str(self._config.path_extern_config):
|
||||||
{'bind': '/config', 'mode': 'ro'},
|
{'bind': '/config', 'mode': 'ro'},
|
||||||
str(self.config.path_extern_ssl):
|
str(self._config.path_extern_ssl):
|
||||||
{'bind': '/ssl', 'mode': 'ro'},
|
{'bind': '/ssl', 'mode': 'ro'},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_initialize(self):
|
def is_initialize(self):
|
||||||
"""Return True if docker container exists."""
|
"""Return True if docker container exists."""
|
||||||
return self.loop.run_in_executor(None, self._is_initialize)
|
return self._loop.run_in_executor(None, self._is_initialize)
|
||||||
|
|
||||||
def _is_initialize(self):
|
def _is_initialize(self):
|
||||||
"""Return True if docker container exists.
|
"""Return True if docker container exists.
|
||||||
@@ -106,7 +104,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self.docker.containers.get(self.name)
|
self._docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
@@ -5,59 +5,63 @@ import logging
|
|||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from .util import docker_process
|
from .utils import docker_process
|
||||||
|
from .stats import DockerStats
|
||||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerInterface(object):
|
class DockerInterface(CoreSysAttributes):
|
||||||
"""Docker hassio interface."""
|
"""Docker hassio interface."""
|
||||||
|
|
||||||
def __init__(self, config, loop, api, image=None, timeout=30):
|
def __init__(self, coresys):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.config = config
|
self.coresys = coresys
|
||||||
self.loop = loop
|
self._meta = None
|
||||||
self.docker = api
|
self.lock = asyncio.Lock(loop=self._loop)
|
||||||
|
|
||||||
self.image = image
|
@property
|
||||||
self.timeout = timeout
|
def timeout(self):
|
||||||
self.version = None
|
"""Return timeout for docker actions."""
|
||||||
self.arch = None
|
return 30
|
||||||
self._lock = asyncio.Lock(loop=loop)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""Return name of docker container."""
|
"""Return name of docker container."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image(self):
|
||||||
|
"""Return name of docker image."""
|
||||||
|
if not self._meta:
|
||||||
|
return None
|
||||||
|
return self._meta['Config']['Image']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
"""Return version of docker image."""
|
||||||
|
if self._meta and LABEL_VERSION in self._meta['Config']['Labels']:
|
||||||
|
return self._meta['Config']['Labels'][LABEL_VERSION]
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arch(self):
|
||||||
|
"""Return arch of docker image."""
|
||||||
|
if self._meta and LABEL_ARCH in self._meta['Config']['Labels']:
|
||||||
|
return self._meta['Config']['Labels'][LABEL_ARCH]
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_progress(self):
|
def in_progress(self):
|
||||||
"""Return True if a task is in progress."""
|
"""Return True if a task is in progress."""
|
||||||
return self._lock.locked()
|
return self.lock.locked()
|
||||||
|
|
||||||
def process_metadata(self, metadata, force=False):
|
|
||||||
"""Read metadata and set it to object."""
|
|
||||||
# read image
|
|
||||||
if not self.image:
|
|
||||||
self.image = metadata['Config']['Image']
|
|
||||||
|
|
||||||
# read version
|
|
||||||
need_version = force or not self.version
|
|
||||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
|
||||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
|
||||||
elif need_version:
|
|
||||||
_LOGGER.warning("Can't read version from %s", self.name)
|
|
||||||
|
|
||||||
# read arch
|
|
||||||
need_arch = force or not self.arch
|
|
||||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
|
||||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
|
||||||
|
|
||||||
@docker_process
|
@docker_process
|
||||||
def install(self, tag):
|
def install(self, tag):
|
||||||
"""Pull docker image."""
|
"""Pull docker image."""
|
||||||
return self.loop.run_in_executor(None, self._install, tag)
|
return self._loop.run_in_executor(None, self._install, tag)
|
||||||
|
|
||||||
def _install(self, tag):
|
def _install(self, tag):
|
||||||
"""Pull docker image.
|
"""Pull docker image.
|
||||||
@@ -66,10 +70,10 @@ class DockerInterface(object):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||||
image = self.docker.images.pull("{}:{}".format(self.image, tag))
|
image = self._docker.images.pull(f"{self.image}:{tag}")
|
||||||
|
|
||||||
image.tag(self.image, tag='latest')
|
image.tag(self.image, tag='latest')
|
||||||
self.process_metadata(image.attrs, force=True)
|
self._meta = image.attrs
|
||||||
except docker.errors.APIError as err:
|
except docker.errors.APIError as err:
|
||||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||||
return False
|
return False
|
||||||
@@ -79,7 +83,7 @@ class DockerInterface(object):
|
|||||||
|
|
||||||
def exists(self):
|
def exists(self):
|
||||||
"""Return True if docker image exists in local repo."""
|
"""Return True if docker image exists in local repo."""
|
||||||
return self.loop.run_in_executor(None, self._exists)
|
return self._loop.run_in_executor(None, self._exists)
|
||||||
|
|
||||||
def _exists(self):
|
def _exists(self):
|
||||||
"""Return True if docker image exists in local repo.
|
"""Return True if docker image exists in local repo.
|
||||||
@@ -87,8 +91,9 @@ class DockerInterface(object):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self.docker.images.get(self.image)
|
image = self._docker.images.get(self.image)
|
||||||
except docker.errors.DockerException:
|
assert f"{self.image}:{self.version}" in image.tags
|
||||||
|
except (docker.errors.DockerException, AssertionError):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@@ -98,7 +103,7 @@ class DockerInterface(object):
|
|||||||
|
|
||||||
Return a Future.
|
Return a Future.
|
||||||
"""
|
"""
|
||||||
return self.loop.run_in_executor(None, self._is_running)
|
return self._loop.run_in_executor(None, self._is_running)
|
||||||
|
|
||||||
def _is_running(self):
|
def _is_running(self):
|
||||||
"""Return True if docker is Running.
|
"""Return True if docker is Running.
|
||||||
@@ -106,8 +111,8 @@ class DockerInterface(object):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self.docker.containers.get(self.name)
|
container = self._docker.containers.get(self.name)
|
||||||
image = self.docker.images.get(self.image)
|
image = self._docker.images.get(self.image)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -124,7 +129,7 @@ class DockerInterface(object):
|
|||||||
@docker_process
|
@docker_process
|
||||||
def attach(self):
|
def attach(self):
|
||||||
"""Attach to running docker container."""
|
"""Attach to running docker container."""
|
||||||
return self.loop.run_in_executor(None, self._attach)
|
return self._loop.run_in_executor(None, self._attach)
|
||||||
|
|
||||||
def _attach(self):
|
def _attach(self):
|
||||||
"""Attach to running docker container.
|
"""Attach to running docker container.
|
||||||
@@ -133,13 +138,12 @@ class DockerInterface(object):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
if self.image:
|
if self.image:
|
||||||
obj_data = self.docker.images.get(self.image).attrs
|
self._meta = self._docker.images.get(self.image).attrs
|
||||||
else:
|
else:
|
||||||
obj_data = self.docker.containers.get(self.name).attrs
|
self._meta = self._docker.containers.get(self.name).attrs
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.process_metadata(obj_data)
|
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Attach to image %s with version %s", self.image, self.version)
|
"Attach to image %s with version %s", self.image, self.version)
|
||||||
|
|
||||||
@@ -148,7 +152,7 @@ class DockerInterface(object):
|
|||||||
@docker_process
|
@docker_process
|
||||||
def run(self):
|
def run(self):
|
||||||
"""Run docker image."""
|
"""Run docker image."""
|
||||||
return self.loop.run_in_executor(None, self._run)
|
return self._loop.run_in_executor(None, self._run)
|
||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
"""Run docker image.
|
"""Run docker image.
|
||||||
@@ -160,7 +164,7 @@ class DockerInterface(object):
|
|||||||
@docker_process
|
@docker_process
|
||||||
def stop(self):
|
def stop(self):
|
||||||
"""Stop/remove docker container."""
|
"""Stop/remove docker container."""
|
||||||
return self.loop.run_in_executor(None, self._stop)
|
return self._loop.run_in_executor(None, self._stop)
|
||||||
|
|
||||||
def _stop(self):
|
def _stop(self):
|
||||||
"""Stop/remove and remove docker container.
|
"""Stop/remove and remove docker container.
|
||||||
@@ -168,7 +172,7 @@ class DockerInterface(object):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self.docker.containers.get(self.name)
|
container = self._docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -186,7 +190,7 @@ class DockerInterface(object):
|
|||||||
@docker_process
|
@docker_process
|
||||||
def remove(self):
|
def remove(self):
|
||||||
"""Remove docker images."""
|
"""Remove docker images."""
|
||||||
return self.loop.run_in_executor(None, self._remove)
|
return self._loop.run_in_executor(None, self._remove)
|
||||||
|
|
||||||
def _remove(self):
|
def _remove(self):
|
||||||
"""remove docker images.
|
"""remove docker images.
|
||||||
@@ -201,27 +205,24 @@ class DockerInterface(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with suppress(docker.errors.ImageNotFound):
|
with suppress(docker.errors.ImageNotFound):
|
||||||
self.docker.images.remove(
|
self._docker.images.remove(
|
||||||
image="{}:latest".format(self.image), force=True)
|
image=f"{self.image}:latest", force=True)
|
||||||
|
|
||||||
with suppress(docker.errors.ImageNotFound):
|
with suppress(docker.errors.ImageNotFound):
|
||||||
self.docker.images.remove(
|
self._docker.images.remove(
|
||||||
image="{}:{}".format(self.image, self.version), force=True)
|
image=f"{self.image}:{self.version}", force=True)
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# clean metadata
|
self._meta = None
|
||||||
self.version = None
|
|
||||||
self.arch = None
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@docker_process
|
@docker_process
|
||||||
def update(self, tag):
|
def update(self, tag):
|
||||||
"""Update a docker image."""
|
"""Update a docker image."""
|
||||||
return self.loop.run_in_executor(None, self._update, tag)
|
return self._loop.run_in_executor(None, self._update, tag)
|
||||||
|
|
||||||
def _update(self, tag):
|
def _update(self, tag):
|
||||||
"""Update a docker image.
|
"""Update a docker image.
|
||||||
@@ -246,7 +247,7 @@ class DockerInterface(object):
|
|||||||
|
|
||||||
Return a Future.
|
Return a Future.
|
||||||
"""
|
"""
|
||||||
return self.loop.run_in_executor(None, self._logs)
|
return self._loop.run_in_executor(None, self._logs)
|
||||||
|
|
||||||
def _logs(self):
|
def _logs(self):
|
||||||
"""Return docker logs of container.
|
"""Return docker logs of container.
|
||||||
@@ -254,19 +255,19 @@ class DockerInterface(object):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self.docker.containers.get(self.name)
|
container = self._docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return b""
|
return b""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return container.logs(tail=100, stdout=True, stderr=True)
|
return container.logs(tail=100, stdout=True, stderr=True)
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
_LOGGER.warning("Can't grap logs from %s: %s", self.image, err)
|
||||||
|
|
||||||
@docker_process
|
@docker_process
|
||||||
def restart(self):
|
def restart(self):
|
||||||
"""Restart docker container."""
|
"""Restart docker container."""
|
||||||
return self.loop.run_in_executor(None, self._restart)
|
return self._loop.run_in_executor(None, self._restart)
|
||||||
|
|
||||||
def _restart(self):
|
def _restart(self):
|
||||||
"""Restart docker container.
|
"""Restart docker container.
|
||||||
@@ -274,7 +275,7 @@ class DockerInterface(object):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self.docker.containers.get(self.name)
|
container = self._docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -283,7 +284,7 @@ class DockerInterface(object):
|
|||||||
try:
|
try:
|
||||||
container.restart(timeout=self.timeout)
|
container.restart(timeout=self.timeout)
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
_LOGGER.warning("Can't restart %s: %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@@ -291,7 +292,7 @@ class DockerInterface(object):
|
|||||||
@docker_process
|
@docker_process
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
"""Check if old version exists and cleanup."""
|
"""Check if old version exists and cleanup."""
|
||||||
return self.loop.run_in_executor(None, self._cleanup)
|
return self._loop.run_in_executor(None, self._cleanup)
|
||||||
|
|
||||||
def _cleanup(self):
|
def _cleanup(self):
|
||||||
"""Check if old version exists and cleanup.
|
"""Check if old version exists and cleanup.
|
||||||
@@ -299,25 +300,25 @@ class DockerInterface(object):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
latest = self.docker.images.get(self.image)
|
latest = self._docker.images.get(self.image)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for image in self.docker.images.list(name=self.image):
|
for image in self._docker.images.list(name=self.image):
|
||||||
if latest.id == image.id:
|
if latest.id == image.id:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with suppress(docker.errors.DockerException):
|
with suppress(docker.errors.DockerException):
|
||||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||||
self.docker.images.remove(image.id, force=True)
|
self._docker.images.remove(image.id, force=True)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@docker_process
|
@docker_process
|
||||||
def execute_command(self, command):
|
def execute_command(self, command):
|
||||||
"""Create a temporary container and run command."""
|
"""Create a temporary container and run command."""
|
||||||
return self.loop.run_in_executor(None, self._execute_command, command)
|
return self._loop.run_in_executor(None, self._execute_command, command)
|
||||||
|
|
||||||
def _execute_command(self, command):
|
def _execute_command(self, command):
|
||||||
"""Create a temporary container and run command.
|
"""Create a temporary container and run command.
|
||||||
@@ -325,3 +326,24 @@ class DockerInterface(object):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def stats(self):
|
||||||
|
"""Read and return stats from container."""
|
||||||
|
return self._loop.run_in_executor(None, self._stats)
|
||||||
|
|
||||||
|
def _stats(self):
|
||||||
|
"""Create a temporary container and run command.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self._docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
stats = container.stats(stream=False)
|
||||||
|
return DockerStats(stats)
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||||
|
return None
|
@@ -9,7 +9,10 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class DockerNetwork(object):
|
class DockerNetwork(object):
|
||||||
"""Internal HassIO Network."""
|
"""Internal HassIO Network.
|
||||||
|
|
||||||
|
This class is not AsyncIO safe!
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, dock):
|
def __init__(self, dock):
|
||||||
"""Initialize internal hassio network."""
|
"""Initialize internal hassio network."""
|
||||||
@@ -52,7 +55,8 @@ class DockerNetwork(object):
|
|||||||
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||||
|
|
||||||
return self.docker.networks.create(
|
return self.docker.networks.create(
|
||||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config, options={
|
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
|
||||||
|
enable_ipv6=False, options={
|
||||||
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -66,7 +70,7 @@ class DockerNetwork(object):
|
|||||||
try:
|
try:
|
||||||
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||||
except docker.errors.APIError as err:
|
except docker.errors.APIError as err:
|
||||||
_LOGGER.error("Can't link container to hassio-net -> %s", err)
|
_LOGGER.error("Can't link container to hassio-net: %s", err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.network.reload()
|
self.network.reload()
|
||||||
@@ -86,4 +90,4 @@ class DockerNetwork(object):
|
|||||||
|
|
||||||
except docker.errors.APIError as err:
|
except docker.errors.APIError as err:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Can't disconnect container from default -> %s", err)
|
"Can't disconnect container from default: %s", err)
|
90
hassio/docker/stats.py
Normal file
90
hassio/docker/stats.py
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
"""Calc & represent docker stats data."""
|
||||||
|
from contextlib import suppress
|
||||||
|
|
||||||
|
|
||||||
|
class DockerStats(object):
|
||||||
|
"""Hold stats data from container inside."""
|
||||||
|
|
||||||
|
def __init__(self, stats):
|
||||||
|
"""Initialize docker stats."""
|
||||||
|
self._cpu = 0.0
|
||||||
|
self._network_rx = 0
|
||||||
|
self._network_tx = 0
|
||||||
|
self._blk_read = 0
|
||||||
|
self._blk_write = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._memory_usage = stats['memory_stats']['usage']
|
||||||
|
self._memory_limit = stats['memory_stats']['limit']
|
||||||
|
except KeyError:
|
||||||
|
self._memory_usage = 0
|
||||||
|
self._memory_limit = 0
|
||||||
|
|
||||||
|
with suppress(KeyError):
|
||||||
|
self._calc_cpu_percent(stats)
|
||||||
|
|
||||||
|
with suppress(KeyError):
|
||||||
|
self._calc_network(stats['networks'])
|
||||||
|
|
||||||
|
with suppress(KeyError):
|
||||||
|
self._calc_block_io(stats['blkio_stats'])
|
||||||
|
|
||||||
|
def _calc_cpu_percent(self, stats):
|
||||||
|
"""Calculate CPU percent."""
|
||||||
|
cpu_delta = stats['cpu_stats']['cpu_usage']['total_usage'] - \
|
||||||
|
stats['precpu_stats']['cpu_usage']['total_usage']
|
||||||
|
system_delta = stats['cpu_stats']['system_cpu_usage'] - \
|
||||||
|
stats['precpu_stats']['system_cpu_usage']
|
||||||
|
|
||||||
|
if system_delta > 0.0 and cpu_delta > 0.0:
|
||||||
|
self._cpu = (cpu_delta / system_delta) * \
|
||||||
|
len(stats['cpu_stats']['cpu_usage']['percpu_usage']) * 100.0
|
||||||
|
|
||||||
|
def _calc_network(self, networks):
|
||||||
|
"""Calculate Network IO stats."""
|
||||||
|
for _, stats in networks.items():
|
||||||
|
self._network_rx += stats['rx_bytes']
|
||||||
|
self._network_tx += stats['tx_bytes']
|
||||||
|
|
||||||
|
def _calc_block_io(self, blkio):
|
||||||
|
"""Calculate block IO stats."""
|
||||||
|
for stats in blkio['io_service_bytes_recursive']:
|
||||||
|
if stats['op'] == 'Read':
|
||||||
|
self._blk_read += stats['value']
|
||||||
|
elif stats['op'] == 'Write':
|
||||||
|
self._blk_write += stats['value']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cpu_percent(self):
|
||||||
|
"""Return CPU percent."""
|
||||||
|
return self._cpu
|
||||||
|
|
||||||
|
@property
|
||||||
|
def memory_usage(self):
|
||||||
|
"""Return memory usage."""
|
||||||
|
return self._memory_usage
|
||||||
|
|
||||||
|
@property
|
||||||
|
def memory_limit(self):
|
||||||
|
"""Return memory limit."""
|
||||||
|
return self._memory_limit
|
||||||
|
|
||||||
|
@property
|
||||||
|
def network_rx(self):
|
||||||
|
"""Return network rx stats."""
|
||||||
|
return self._network_rx
|
||||||
|
|
||||||
|
@property
|
||||||
|
def network_tx(self):
|
||||||
|
"""Return network rx stats."""
|
||||||
|
return self._network_tx
|
||||||
|
|
||||||
|
@property
|
||||||
|
def blk_read(self):
|
||||||
|
"""Return block IO read stats."""
|
||||||
|
return self._blk_read
|
||||||
|
|
||||||
|
@property
|
||||||
|
def blk_write(self):
|
||||||
|
"""Return block IO write stats."""
|
||||||
|
return self._blk_write
|
41
hassio/docker/supervisor.py
Normal file
41
hassio/docker/supervisor.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""Init file for HassIO docker object."""
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
|
from .interface import DockerInterface
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||||
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return name of docker container."""
|
||||||
|
return os.environ['SUPERVISOR_NAME']
|
||||||
|
|
||||||
|
def _attach(self):
|
||||||
|
"""Attach to running docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self._docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._meta = container.attrs
|
||||||
|
_LOGGER.info("Attach to supervisor %s with version %s",
|
||||||
|
self.image, self.version)
|
||||||
|
|
||||||
|
# if already attach
|
||||||
|
if container in self._docker.network.containers:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# attach to network
|
||||||
|
return self._docker.network.attach_container(
|
||||||
|
container, alias=['hassio'], ipv4=self._docker.network.supervisor)
|
@@ -9,12 +9,12 @@ def docker_process(method):
|
|||||||
"""Wrap function with only run once."""
|
"""Wrap function with only run once."""
|
||||||
async def wrap_api(api, *args, **kwargs):
|
async def wrap_api(api, *args, **kwargs):
|
||||||
"""Return api wrapper."""
|
"""Return api wrapper."""
|
||||||
if api._lock.locked():
|
if api.lock.locked():
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Can't excute %s while a task is in progress", method.__name__)
|
"Can't excute %s while a task is in progress", method.__name__)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async with api._lock:
|
async with api.lock:
|
||||||
return await method(api, *args, **kwargs)
|
return await method(api, *args, **kwargs)
|
||||||
|
|
||||||
return wrap_api
|
return wrap_api
|
@@ -6,14 +6,15 @@ import re
|
|||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from aiohttp.hdrs import CONTENT_TYPE
|
from aiohttp.hdrs import CONTENT_TYPE
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID,
|
||||||
ATTR_VERSION, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||||
HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
||||||
from .dock.homeassistant import DockerHomeAssistant
|
from .coresys import CoreSysAttributes
|
||||||
from .tools import JsonConfig, convert_to_ascii
|
from .docker.homeassistant import DockerHomeAssistant
|
||||||
|
from .utils import convert_to_ascii
|
||||||
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_HASS_CONFIG
|
from .validate import SCHEMA_HASS_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -21,30 +22,27 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||||
|
|
||||||
|
|
||||||
class HomeAssistant(JsonConfig):
|
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||||
"""Hass core object for handle it."""
|
"""Hass core object for handle it."""
|
||||||
|
|
||||||
def __init__(self, config, loop, docker, updater):
|
def __init__(self, coresys):
|
||||||
"""Initialize hass object."""
|
"""Initialize hass object."""
|
||||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||||
self.config = config
|
self.coresys = coresys
|
||||||
self.loop = loop
|
self.instance = DockerHomeAssistant(coresys)
|
||||||
self.updater = updater
|
|
||||||
self.docker = DockerHomeAssistant(config, loop, docker, self)
|
|
||||||
self.api_ip = docker.network.gateway
|
|
||||||
self.websession = aiohttp.ClientSession(
|
|
||||||
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
|
|
||||||
|
|
||||||
async def prepare(self):
|
async def load(self):
|
||||||
"""Prepare HomeAssistant object."""
|
"""Prepare HomeAssistant object."""
|
||||||
if not await self.docker.exists():
|
if await self.instance.attach():
|
||||||
|
return
|
||||||
|
|
||||||
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
|
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
|
||||||
if self.is_custom_image:
|
|
||||||
await self.install()
|
|
||||||
else:
|
|
||||||
await self.install_landingpage()
|
await self.install_landingpage()
|
||||||
else:
|
|
||||||
await self.docker.attach()
|
@property
|
||||||
|
def api_ip(self):
|
||||||
|
"""Return IP of HomeAssistant instance."""
|
||||||
|
return self._docker.network.gateway
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def api_port(self):
|
def api_port(self):
|
||||||
@@ -55,7 +53,6 @@ class HomeAssistant(JsonConfig):
|
|||||||
def api_port(self, value):
|
def api_port(self, value):
|
||||||
"""Set network port for home-assistant instance."""
|
"""Set network port for home-assistant instance."""
|
||||||
self._data[ATTR_PORT] = value
|
self._data[ATTR_PORT] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def api_password(self):
|
def api_password(self):
|
||||||
@@ -66,7 +63,6 @@ class HomeAssistant(JsonConfig):
|
|||||||
def api_password(self, value):
|
def api_password(self, value):
|
||||||
"""Set password for home-assistant instance."""
|
"""Set password for home-assistant instance."""
|
||||||
self._data[ATTR_PASSWORD] = value
|
self._data[ATTR_PASSWORD] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def api_ssl(self):
|
def api_ssl(self):
|
||||||
@@ -77,7 +73,6 @@ class HomeAssistant(JsonConfig):
|
|||||||
def api_ssl(self, value):
|
def api_ssl(self, value):
|
||||||
"""Set SSL for home-assistant instance."""
|
"""Set SSL for home-assistant instance."""
|
||||||
self._data[ATTR_SSL] = value
|
self._data[ATTR_SSL] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def api_url(self):
|
def api_url(self):
|
||||||
@@ -95,42 +90,47 @@ class HomeAssistant(JsonConfig):
|
|||||||
def watchdog(self, value):
|
def watchdog(self, value):
|
||||||
"""Return True if the watchdog should protect Home-Assistant."""
|
"""Return True if the watchdog should protect Home-Assistant."""
|
||||||
self._data[ATTR_WATCHDOG] = value
|
self._data[ATTR_WATCHDOG] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
"""Return version of running homeassistant."""
|
"""Return version of running homeassistant."""
|
||||||
return self.docker.version
|
return self.instance.version
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_version(self):
|
def last_version(self):
|
||||||
"""Return last available version of homeassistant."""
|
"""Return last available version of homeassistant."""
|
||||||
if self.is_custom_image:
|
if self.is_custom_image:
|
||||||
return self._data.get(ATTR_LAST_VERSION)
|
return self._data.get(ATTR_LAST_VERSION)
|
||||||
return self.updater.version_homeassistant
|
return self._updater.version_homeassistant
|
||||||
|
|
||||||
|
@last_version.setter
|
||||||
|
def last_version(self, value):
|
||||||
|
"""Set last available version of homeassistant."""
|
||||||
|
if value:
|
||||||
|
self._data[ATTR_LAST_VERSION] = value
|
||||||
|
else:
|
||||||
|
self._data.pop(ATTR_LAST_VERSION, None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image(self):
|
def image(self):
|
||||||
"""Return image name of hass containter."""
|
"""Return image name of hass containter."""
|
||||||
if ATTR_IMAGE in self._data:
|
if self._data.get(ATTR_IMAGE):
|
||||||
return self._data[ATTR_IMAGE]
|
return self._data[ATTR_IMAGE]
|
||||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
return os.environ['HOMEASSISTANT_REPOSITORY']
|
||||||
|
|
||||||
|
@image.setter
|
||||||
|
def image(self, value):
|
||||||
|
"""Set image name of hass containter."""
|
||||||
|
if value:
|
||||||
|
self._data[ATTR_IMAGE] = value
|
||||||
|
else:
|
||||||
|
self._data.pop(ATTR_IMAGE, None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_custom_image(self):
|
def is_custom_image(self):
|
||||||
"""Return True if a custom image is used."""
|
"""Return True if a custom image is used."""
|
||||||
return ATTR_IMAGE in self._data
|
return all(attr in self._data for attr in
|
||||||
|
(ATTR_IMAGE, ATTR_LAST_VERSION))
|
||||||
@property
|
|
||||||
def devices(self):
|
|
||||||
"""Return extend device mapping."""
|
|
||||||
return self._data[ATTR_DEVICES]
|
|
||||||
|
|
||||||
@devices.setter
|
|
||||||
def devices(self, value):
|
|
||||||
"""Set extend device mapping."""
|
|
||||||
self._data[ATTR_DEVICES] = value
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self):
|
def boot(self):
|
||||||
@@ -141,35 +141,23 @@ class HomeAssistant(JsonConfig):
|
|||||||
def boot(self, value):
|
def boot(self, value):
|
||||||
"""Set home-assistant boot options."""
|
"""Set home-assistant boot options."""
|
||||||
self._data[ATTR_BOOT] = value
|
self._data[ATTR_BOOT] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
def set_custom(self, image, version):
|
@property
|
||||||
"""Set a custom image for homeassistant."""
|
def uuid(self):
|
||||||
# reset
|
"""Return a UUID of this HomeAssistant."""
|
||||||
if image is None and version is None:
|
return self._data[ATTR_UUID]
|
||||||
self._data.pop(ATTR_IMAGE, None)
|
|
||||||
self._data.pop(ATTR_VERSION, None)
|
|
||||||
|
|
||||||
self.docker.image = self.image
|
|
||||||
else:
|
|
||||||
if image:
|
|
||||||
self._data[ATTR_IMAGE] = image
|
|
||||||
self.docker.image = image
|
|
||||||
if version:
|
|
||||||
self._data[ATTR_VERSION] = version
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
async def install_landingpage(self):
|
async def install_landingpage(self):
|
||||||
"""Install a landingpage."""
|
"""Install a landingpage."""
|
||||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||||
while True:
|
while True:
|
||||||
if await self.docker.install('landingpage'):
|
if await self.instance.install('landingpage'):
|
||||||
break
|
break
|
||||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||||
await asyncio.sleep(60, loop=self.loop)
|
await asyncio.sleep(60, loop=self._loop)
|
||||||
|
|
||||||
# run landingpage after installation
|
# run landingpage after installation
|
||||||
await self.docker.run()
|
await self.instance.run()
|
||||||
|
|
||||||
async def install(self):
|
async def install(self):
|
||||||
"""Install a landingpage."""
|
"""Install a landingpage."""
|
||||||
@@ -177,85 +165,93 @@ class HomeAssistant(JsonConfig):
|
|||||||
while True:
|
while True:
|
||||||
# read homeassistant tag and install it
|
# read homeassistant tag and install it
|
||||||
if not self.last_version:
|
if not self.last_version:
|
||||||
await self.updater.fetch_data()
|
await self._updater.reload()
|
||||||
|
|
||||||
tag = self.last_version
|
tag = self.last_version
|
||||||
if tag and await self.docker.install(tag):
|
if tag and await self.instance.install(tag):
|
||||||
break
|
break
|
||||||
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
||||||
await asyncio.sleep(60, loop=self.loop)
|
await asyncio.sleep(60, loop=self._loop)
|
||||||
|
|
||||||
# finishing
|
# finishing
|
||||||
_LOGGER.info("HomeAssistant docker now installed")
|
_LOGGER.info("HomeAssistant docker now installed")
|
||||||
if self.boot:
|
if self.boot:
|
||||||
await self.docker.run()
|
await self.instance.run()
|
||||||
await self.docker.cleanup()
|
await self.instance.cleanup()
|
||||||
|
|
||||||
async def update(self, version=None):
|
async def update(self, version=None):
|
||||||
"""Update HomeAssistant version."""
|
"""Update HomeAssistant version."""
|
||||||
version = version or self.last_version
|
version = version or self.last_version
|
||||||
running = await self.docker.is_running()
|
running = await self.instance.is_running()
|
||||||
|
exists = await self.instance.exists()
|
||||||
|
|
||||||
if version == self.docker.version:
|
if exists and version == self.instance.version:
|
||||||
_LOGGER.warning("Version %s is already installed", version)
|
_LOGGER.info("Version %s is already installed", version)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return await self.docker.update(version)
|
return await self.instance.update(version)
|
||||||
finally:
|
finally:
|
||||||
if running:
|
if running:
|
||||||
await self.docker.run()
|
await self.instance.run()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
"""Run HomeAssistant docker.
|
"""Run HomeAssistant docker.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.run()
|
return self.instance.run()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
"""Stop HomeAssistant docker.
|
"""Stop HomeAssistant docker.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.stop()
|
return self.instance.stop()
|
||||||
|
|
||||||
def restart(self):
|
def restart(self):
|
||||||
"""Restart HomeAssistant docker.
|
"""Restart HomeAssistant docker.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.restart()
|
return self.instance.restart()
|
||||||
|
|
||||||
def logs(self):
|
def logs(self):
|
||||||
"""Get HomeAssistant docker logs.
|
"""Get HomeAssistant docker logs.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.logs()
|
return self.instance.logs()
|
||||||
|
|
||||||
|
def stats(self):
|
||||||
|
"""Return stats of HomeAssistant.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.stats()
|
||||||
|
|
||||||
def is_running(self):
|
def is_running(self):
|
||||||
"""Return True if docker container is running.
|
"""Return True if docker container is running.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.is_running()
|
return self.instance.is_running()
|
||||||
|
|
||||||
def is_initialize(self):
|
def is_initialize(self):
|
||||||
"""Return True if a docker container is exists.
|
"""Return True if a docker container is exists.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.docker.is_initialize()
|
return self.instance.is_initialize()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_progress(self):
|
def in_progress(self):
|
||||||
"""Return True if a task is in progress."""
|
"""Return True if a task is in progress."""
|
||||||
return self.docker.in_progress
|
return self.instance.in_progress
|
||||||
|
|
||||||
async def check_config(self):
|
async def check_config(self):
|
||||||
"""Run homeassistant config check."""
|
"""Run homeassistant config check."""
|
||||||
exit_code, log = await self.docker.execute_command(
|
exit_code, log = await self.instance.execute_command(
|
||||||
"python3 -m homeassistant -c /config --script check_config"
|
"python3 -m homeassistant -c /config --script check_config"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -271,15 +267,16 @@ class HomeAssistant(JsonConfig):
|
|||||||
|
|
||||||
async def check_api_state(self):
|
async def check_api_state(self):
|
||||||
"""Check if Home-Assistant up and running."""
|
"""Check if Home-Assistant up and running."""
|
||||||
url = "{}/api/".format(self.api_url)
|
url = f"{self.api_url}/api/"
|
||||||
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||||
|
|
||||||
if self.api_password:
|
if self.api_password:
|
||||||
header.update({HEADER_HA_ACCESS: self.api_password})
|
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with async_timeout.timeout(30, loop=self.loop):
|
# pylint: disable=bad-continuation
|
||||||
async with self.websession.get(url, headers=header) as request:
|
async with self._websession_ssl.get(
|
||||||
|
url, headers=header, timeout=30) as request:
|
||||||
status = request.status
|
status = request.status
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
1
hassio/misc/__init__.py
Normal file
1
hassio/misc/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Special object and tools for Hass.io."""
|
@@ -11,8 +11,9 @@ COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
|||||||
class DNSForward(object):
|
class DNSForward(object):
|
||||||
"""Manage DNS forwarding to internal DNS."""
|
"""Manage DNS forwarding to internal DNS."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, loop):
|
||||||
"""Initialize DNS forwarding."""
|
"""Initialize DNS forwarding."""
|
||||||
|
self.loop = loop
|
||||||
self.proc = None
|
self.proc = None
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
@@ -23,9 +24,10 @@ class DNSForward(object):
|
|||||||
stdin=asyncio.subprocess.DEVNULL,
|
stdin=asyncio.subprocess.DEVNULL,
|
||||||
stdout=asyncio.subprocess.DEVNULL,
|
stdout=asyncio.subprocess.DEVNULL,
|
||||||
stderr=asyncio.subprocess.DEVNULL,
|
stderr=asyncio.subprocess.DEVNULL,
|
||||||
|
loop=self.loop
|
||||||
)
|
)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't start DNS forwarding -> %s", err)
|
_LOGGER.error("Can't start DNS forwarding: %s", err)
|
||||||
else:
|
else:
|
||||||
_LOGGER.info("Start DNS port forwarding for host add-ons")
|
_LOGGER.info("Start DNS port forwarding for host add-ons")
|
||||||
|
|
@@ -6,7 +6,7 @@ import re
|
|||||||
|
|
||||||
import pyudev
|
import pyudev
|
||||||
|
|
||||||
from .const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
from ..const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -20,6 +20,7 @@ PROC_STAT = Path("/proc/stat")
|
|||||||
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
||||||
|
|
||||||
GPIO_DEVICES = Path("/sys/class/gpio")
|
GPIO_DEVICES = Path("/sys/class/gpio")
|
||||||
|
RE_TTY = re.compile(r"tty[A-Z]+")
|
||||||
|
|
||||||
|
|
||||||
class Hardware(object):
|
class Hardware(object):
|
||||||
@@ -34,7 +35,7 @@ class Hardware(object):
|
|||||||
"""Return all serial and connected devices."""
|
"""Return all serial and connected devices."""
|
||||||
dev_list = set()
|
dev_list = set()
|
||||||
for device in self.context.list_devices(subsystem='tty'):
|
for device in self.context.list_devices(subsystem='tty'):
|
||||||
if 'ID_VENDOR' in device:
|
if 'ID_VENDOR' in device or RE_TTY.search(device.device_node):
|
||||||
dev_list.add(device.device_node)
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
return dev_list
|
return dev_list
|
||||||
@@ -68,8 +69,8 @@ class Hardware(object):
|
|||||||
with ASOUND_DEVICES.open('r') as devices_file:
|
with ASOUND_DEVICES.open('r') as devices_file:
|
||||||
devices = devices_file.read()
|
devices = devices_file.read()
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't read asound data -> %s", err)
|
_LOGGER.error("Can't read asound data: %s", err)
|
||||||
return
|
return {}
|
||||||
|
|
||||||
audio_list = {}
|
audio_list = {}
|
||||||
|
|
||||||
@@ -108,13 +109,13 @@ class Hardware(object):
|
|||||||
with PROC_STAT.open("r") as stat_file:
|
with PROC_STAT.open("r") as stat_file:
|
||||||
stats = stat_file.read()
|
stats = stat_file.read()
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't read stat data -> %s", err)
|
_LOGGER.error("Can't read stat data: %s", err)
|
||||||
return
|
return None
|
||||||
|
|
||||||
# parse stat file
|
# parse stat file
|
||||||
found = RE_BOOT_TIME.search(stats)
|
found = RE_BOOT_TIME.search(stats)
|
||||||
if not found:
|
if not found:
|
||||||
_LOGGER.error("Can't found last boot time!")
|
_LOGGER.error("Can't found last boot time!")
|
||||||
return
|
return None
|
||||||
|
|
||||||
return datetime.utcfromtimestamp(int(found.group(1)))
|
return datetime.utcfromtimestamp(int(found.group(1)))
|
@@ -5,7 +5,7 @@ import logging
|
|||||||
|
|
||||||
import async_timeout
|
import async_timeout
|
||||||
|
|
||||||
from .const import (
|
from ..const import (
|
||||||
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
|
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
|
||||||
ATTR_HOSTNAME, ATTR_OS)
|
ATTR_HOSTNAME, ATTR_OS)
|
||||||
|
|
75
hassio/misc/scheduler.py
Normal file
75
hassio/misc/scheduler.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
"""Schedule for HassIO."""
|
||||||
|
import logging
|
||||||
|
from datetime import date, datetime, time, timedelta
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
INTERVAL = 'interval'
|
||||||
|
REPEAT = 'repeat'
|
||||||
|
CALL = 'callback'
|
||||||
|
TASK = 'task'
|
||||||
|
|
||||||
|
|
||||||
|
class Scheduler(object):
|
||||||
|
"""Schedule task inside HassIO."""
|
||||||
|
|
||||||
|
def __init__(self, loop):
|
||||||
|
"""Initialize task schedule."""
|
||||||
|
self.loop = loop
|
||||||
|
self._data = {}
|
||||||
|
self.suspend = False
|
||||||
|
|
||||||
|
def register_task(self, coro_callback, interval, repeat=True):
|
||||||
|
"""Schedule a coroutine.
|
||||||
|
|
||||||
|
The coroutien need to be a callback without arguments.
|
||||||
|
"""
|
||||||
|
task_id = hash(coro_callback)
|
||||||
|
|
||||||
|
# generate data
|
||||||
|
opts = {
|
||||||
|
CALL: coro_callback,
|
||||||
|
INTERVAL: interval,
|
||||||
|
REPEAT: repeat,
|
||||||
|
}
|
||||||
|
|
||||||
|
# schedule task
|
||||||
|
self._data[task_id] = opts
|
||||||
|
self._schedule_task(interval, task_id)
|
||||||
|
|
||||||
|
return task_id
|
||||||
|
|
||||||
|
def _run_task(self, task_id):
|
||||||
|
"""Run a scheduled task."""
|
||||||
|
data = self._data[task_id]
|
||||||
|
|
||||||
|
if not self.suspend:
|
||||||
|
self.loop.create_task(data[CALL]())
|
||||||
|
|
||||||
|
if data[REPEAT]:
|
||||||
|
self._schedule_task(data[INTERVAL], task_id)
|
||||||
|
else:
|
||||||
|
self._data.pop(task_id)
|
||||||
|
|
||||||
|
def _schedule_task(self, interval, task_id):
|
||||||
|
"""Schedule a task on loop."""
|
||||||
|
if isinstance(interval, (int, float)):
|
||||||
|
job = self.loop.call_later(interval, self._run_task, task_id)
|
||||||
|
elif isinstance(interval, time):
|
||||||
|
today = datetime.combine(date.today(), interval)
|
||||||
|
tomorrow = datetime.combine(
|
||||||
|
date.today() + timedelta(days=1), interval)
|
||||||
|
|
||||||
|
# check if we run it today or next day
|
||||||
|
if today > datetime.today():
|
||||||
|
calc = today
|
||||||
|
else:
|
||||||
|
calc = tomorrow
|
||||||
|
|
||||||
|
job = self.loop.call_at(calc.timestamp(), self._run_task, task_id)
|
||||||
|
else:
|
||||||
|
_LOGGER.fatal("Unknow interval %s (type: %s) for scheduler %s",
|
||||||
|
interval, type(interval), task_id)
|
||||||
|
|
||||||
|
# Store job
|
||||||
|
self._data[task_id][TASK] = job
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,56 +0,0 @@
|
|||||||
"""Schedule for HassIO."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
SEC = 'seconds'
|
|
||||||
REPEAT = 'repeat'
|
|
||||||
CALL = 'callback'
|
|
||||||
TASK = 'task'
|
|
||||||
|
|
||||||
|
|
||||||
class Scheduler(object):
|
|
||||||
"""Schedule task inside HassIO."""
|
|
||||||
|
|
||||||
def __init__(self, loop):
|
|
||||||
"""Initialize task schedule."""
|
|
||||||
self.loop = loop
|
|
||||||
self._data = {}
|
|
||||||
self.suspend = False
|
|
||||||
|
|
||||||
def register_task(self, coro_callback, seconds, repeat=True,
|
|
||||||
now=False):
|
|
||||||
"""Schedule a coroutine.
|
|
||||||
|
|
||||||
The coroutien need to be a callback without arguments.
|
|
||||||
"""
|
|
||||||
idx = hash(coro_callback)
|
|
||||||
|
|
||||||
# generate data
|
|
||||||
opts = {
|
|
||||||
CALL: coro_callback,
|
|
||||||
SEC: seconds,
|
|
||||||
REPEAT: repeat,
|
|
||||||
}
|
|
||||||
self._data[idx] = opts
|
|
||||||
|
|
||||||
# schedule task
|
|
||||||
if now:
|
|
||||||
self._run_task(idx)
|
|
||||||
else:
|
|
||||||
task = self.loop.call_later(seconds, self._run_task, idx)
|
|
||||||
self._data[idx][TASK] = task
|
|
||||||
|
|
||||||
return idx
|
|
||||||
|
|
||||||
def _run_task(self, idx):
|
|
||||||
"""Run a scheduled task."""
|
|
||||||
data = self._data.pop(idx)
|
|
||||||
|
|
||||||
if not self.suspend:
|
|
||||||
self.loop.create_task(data[CALL]())
|
|
||||||
|
|
||||||
if data[REPEAT]:
|
|
||||||
task = self.loop.call_later(data[SEC], self._run_task, idx)
|
|
||||||
data[TASK] = task
|
|
||||||
self._data[idx] = data
|
|
@@ -6,95 +6,99 @@ from pathlib import Path
|
|||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
from .snapshot import Snapshot
|
from .snapshot import Snapshot
|
||||||
from .util import create_slug
|
from .utils import create_slug
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SnapshotsManager(object):
|
class SnapshotsManager(CoreSysAttributes):
|
||||||
"""Manage snapshots."""
|
"""Manage snapshots."""
|
||||||
|
|
||||||
def __init__(self, config, loop, sheduler, addons, homeassistant):
|
def __init__(self, coresys):
|
||||||
"""Initialize a snapshot manager."""
|
"""Initialize a snapshot manager."""
|
||||||
self.config = config
|
self.coresys = coresys
|
||||||
self.loop = loop
|
self.snapshots_obj = {}
|
||||||
self.sheduler = sheduler
|
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||||
self.addons = addons
|
|
||||||
self.homeassistant = homeassistant
|
|
||||||
self.snapshots = {}
|
|
||||||
self._lock = asyncio.Lock(loop=loop)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_snapshots(self):
|
def list_snapshots(self):
|
||||||
"""Return a list of all snapshot object."""
|
"""Return a list of all snapshot object."""
|
||||||
return set(self.snapshots.values())
|
return set(self.snapshots_obj.values())
|
||||||
|
|
||||||
def get(self, slug):
|
def get(self, slug):
|
||||||
"""Return snapshot object."""
|
"""Return snapshot object."""
|
||||||
return self.snapshots.get(slug)
|
return self.snapshots_obj.get(slug)
|
||||||
|
|
||||||
def _create_snapshot(self, name, sys_type):
|
def _create_snapshot(self, name, sys_type):
|
||||||
"""Initialize a new snapshot object from name."""
|
"""Initialize a new snapshot object from name."""
|
||||||
date_str = datetime.utcnow().isoformat()
|
date_str = datetime.utcnow().isoformat()
|
||||||
slug = create_slug(name, date_str)
|
slug = create_slug(name, date_str)
|
||||||
tar_file = Path(self.config.path_backup, "{}.tar".format(slug))
|
tar_file = Path(self._config.path_backup, "{}.tar".format(slug))
|
||||||
|
|
||||||
# init object
|
# init object
|
||||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
snapshot = Snapshot(self.coresys, tar_file)
|
||||||
snapshot.create(slug, name, date_str, sys_type)
|
snapshot.create(slug, name, date_str, sys_type)
|
||||||
|
|
||||||
# set general data
|
# set general data
|
||||||
snapshot.snapshot_homeassistant(self.homeassistant)
|
snapshot.store_homeassistant()
|
||||||
snapshot.repositories = self.config.addons_repositories
|
snapshot.store_repositories()
|
||||||
|
|
||||||
return snapshot
|
return snapshot
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
"""Load exists snapshots data.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.reload()
|
||||||
|
|
||||||
async def reload(self):
|
async def reload(self):
|
||||||
"""Load exists backups."""
|
"""Load exists backups."""
|
||||||
self.snapshots = {}
|
self.snapshots_obj = {}
|
||||||
|
|
||||||
async def _load_snapshot(tar_file):
|
async def _load_snapshot(tar_file):
|
||||||
"""Internal function to load snapshot."""
|
"""Internal function to load snapshot."""
|
||||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
snapshot = Snapshot(self.coresys, tar_file)
|
||||||
if await snapshot.load():
|
if await snapshot.load():
|
||||||
self.snapshots[snapshot.slug] = snapshot
|
self.snapshots_obj[snapshot.slug] = snapshot
|
||||||
|
|
||||||
tasks = [_load_snapshot(tar_file) for tar_file in
|
tasks = [_load_snapshot(tar_file) for tar_file in
|
||||||
self.config.path_backup.glob("*.tar")]
|
self._config.path_backup.glob("*.tar")]
|
||||||
|
|
||||||
_LOGGER.info("Found %d snapshot files", len(tasks))
|
_LOGGER.info("Found %d snapshot files", len(tasks))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
def remove(self, snapshot):
|
def remove(self, snapshot):
|
||||||
"""Remove a snapshot."""
|
"""Remove a snapshot."""
|
||||||
try:
|
try:
|
||||||
snapshot.tar_file.unlink()
|
snapshot.tar_file.unlink()
|
||||||
self.snapshots.pop(snapshot.slug, None)
|
self.snapshots_obj.pop(snapshot.slug, None)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't remove snapshot %s -> %s", snapshot.slug, err)
|
_LOGGER.error("Can't remove snapshot %s: %s", snapshot.slug, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def do_snapshot_full(self, name=""):
|
async def do_snapshot_full(self, name=""):
|
||||||
"""Create a full snapshot."""
|
"""Create a full snapshot."""
|
||||||
if self._lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.error("It is already a snapshot/restore process running")
|
_LOGGER.error("It is already a snapshot/restore process running")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
|
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
|
||||||
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self.sheduler.suspend = True
|
self._scheduler.suspend = True
|
||||||
await self._lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
# snapshot addons
|
# snapshot addons
|
||||||
tasks = []
|
tasks = []
|
||||||
for addon in self.addons.list_addons:
|
for addon in self._addons.list_addons:
|
||||||
if not addon.is_installed:
|
if not addon.is_installed:
|
||||||
continue
|
continue
|
||||||
tasks.append(snapshot.import_addon(addon))
|
tasks.append(snapshot.import_addon(addon))
|
||||||
@@ -102,27 +106,28 @@ class SnapshotsManager(object):
|
|||||||
if tasks:
|
if tasks:
|
||||||
_LOGGER.info("Full-Snapshot %s run %d addons",
|
_LOGGER.info("Full-Snapshot %s run %d addons",
|
||||||
snapshot.slug, len(tasks))
|
snapshot.slug, len(tasks))
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# snapshot folders
|
# snapshot folders
|
||||||
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
|
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
|
||||||
await snapshot.store_folders()
|
await snapshot.store_folders()
|
||||||
|
|
||||||
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
|
||||||
self.snapshots[snapshot.slug] = snapshot
|
|
||||||
return True
|
|
||||||
|
|
||||||
except (OSError, ValueError, tarfile.TarError) as err:
|
except (OSError, ValueError, tarfile.TarError) as err:
|
||||||
_LOGGER.info("Full-Snapshot %s error -> %s", snapshot.slug, err)
|
_LOGGER.info("Full-Snapshot %s error: %s", snapshot.slug, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
|
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
||||||
|
self.snapshots_obj[snapshot.slug] = snapshot
|
||||||
|
return True
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self.sheduler.suspend = False
|
self._scheduler.suspend = False
|
||||||
self._lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_snapshot_partial(self, name="", addons=None, folders=None):
|
async def do_snapshot_partial(self, name="", addons=None, folders=None):
|
||||||
"""Create a partial snapshot."""
|
"""Create a partial snapshot."""
|
||||||
if self._lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.error("It is already a snapshot/restore process running")
|
_LOGGER.error("It is already a snapshot/restore process running")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -132,42 +137,43 @@ class SnapshotsManager(object):
|
|||||||
|
|
||||||
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self.sheduler.suspend = True
|
self._scheduler.suspend = True
|
||||||
await self._lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
# snapshot addons
|
# snapshot addons
|
||||||
tasks = []
|
tasks = []
|
||||||
for slug in addons:
|
for slug in addons:
|
||||||
addon = self.addons.get(slug)
|
addon = self._addons.get(slug)
|
||||||
if addon.is_installed:
|
if addon.is_installed:
|
||||||
tasks.append(snapshot.import_addon(addon))
|
tasks.append(snapshot.import_addon(addon))
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
_LOGGER.info("Partial-Snapshot %s run %d addons",
|
_LOGGER.info("Partial-Snapshot %s run %d addons",
|
||||||
snapshot.slug, len(tasks))
|
snapshot.slug, len(tasks))
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# snapshot folders
|
# snapshot folders
|
||||||
_LOGGER.info("Partial-Snapshot %s store folders %s",
|
_LOGGER.info("Partial-Snapshot %s store folders %s",
|
||||||
snapshot.slug, folders)
|
snapshot.slug, folders)
|
||||||
await snapshot.store_folders(folders)
|
await snapshot.store_folders(folders)
|
||||||
|
|
||||||
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
|
||||||
self.snapshots[snapshot.slug] = snapshot
|
|
||||||
return True
|
|
||||||
|
|
||||||
except (OSError, ValueError, tarfile.TarError) as err:
|
except (OSError, ValueError, tarfile.TarError) as err:
|
||||||
_LOGGER.info("Partial-Snapshot %s error -> %s", snapshot.slug, err)
|
_LOGGER.info("Partial-Snapshot %s error: %s", snapshot.slug, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
|
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
||||||
|
self.snapshots_obj[snapshot.slug] = snapshot
|
||||||
|
return True
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self.sheduler.suspend = False
|
self._scheduler.suspend = False
|
||||||
self._lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_restore_full(self, snapshot):
|
async def do_restore_full(self, snapshot):
|
||||||
"""Restore a snapshot."""
|
"""Restore a snapshot."""
|
||||||
if self._lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.error("It is already a snapshot/restore process running")
|
_LOGGER.error("It is already a snapshot/restore process running")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -178,19 +184,19 @@ class SnapshotsManager(object):
|
|||||||
|
|
||||||
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self.sheduler.suspend = True
|
self._scheduler.suspend = True
|
||||||
await self._lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
# stop system
|
# stop system
|
||||||
tasks = []
|
tasks = []
|
||||||
tasks.append(self.homeassistant.stop())
|
tasks.append(self._homeassistant.stop())
|
||||||
|
|
||||||
for addon in self.addons.list_addons:
|
for addon in self._addons.list_addons:
|
||||||
if addon.is_installed:
|
if addon.is_installed:
|
||||||
tasks.append(addon.stop())
|
tasks.append(addon.stop())
|
||||||
|
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# restore folders
|
# restore folders
|
||||||
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
|
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
|
||||||
@@ -199,17 +205,19 @@ class SnapshotsManager(object):
|
|||||||
# start homeassistant restore
|
# start homeassistant restore
|
||||||
_LOGGER.info("Full-Restore %s restore Home-Assistant",
|
_LOGGER.info("Full-Restore %s restore Home-Assistant",
|
||||||
snapshot.slug)
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant(self.homeassistant)
|
snapshot.restore_homeassistant()
|
||||||
task_hass = self.loop.create_task(
|
task_hass = self._loop.create_task(
|
||||||
self.homeassistant.update(snapshot.homeassistant_version))
|
self._homeassistant.update(snapshot.homeassistant_version))
|
||||||
|
|
||||||
# restore repositories
|
# restore repositories
|
||||||
await self.addons.load_repositories(snapshot.repositories)
|
_LOGGER.info("Full-Restore %s restore Repositories",
|
||||||
|
snapshot.slug)
|
||||||
|
await snapshot.restore_repositories()
|
||||||
|
|
||||||
# restore addons
|
# restore addons
|
||||||
tasks = []
|
tasks = []
|
||||||
actual_addons = \
|
actual_addons = \
|
||||||
set(addon.slug for addon in self.addons.list_addons
|
set(addon.slug for addon in self._addons.list_addons
|
||||||
if addon.is_installed)
|
if addon.is_installed)
|
||||||
restore_addons = \
|
restore_addons = \
|
||||||
set(data[ATTR_SLUG] for data in snapshot.addons)
|
set(data[ATTR_SLUG] for data in snapshot.addons)
|
||||||
@@ -219,14 +227,14 @@ class SnapshotsManager(object):
|
|||||||
snapshot.slug, restore_addons, remove_addons)
|
snapshot.slug, restore_addons, remove_addons)
|
||||||
|
|
||||||
for slug in remove_addons:
|
for slug in remove_addons:
|
||||||
addon = self.addons.get(slug)
|
addon = self._addons.get(slug)
|
||||||
if addon:
|
if addon:
|
||||||
tasks.append(addon.uninstall())
|
tasks.append(addon.uninstall())
|
||||||
else:
|
else:
|
||||||
_LOGGER.warning("Can't remove addon %s", slug)
|
_LOGGER.warning("Can't remove addon %s", snapshot.slug)
|
||||||
|
|
||||||
for slug in restore_addons:
|
for slug in restore_addons:
|
||||||
addon = self.addons.get(slug)
|
addon = self._addons.get(slug)
|
||||||
if addon:
|
if addon:
|
||||||
tasks.append(snapshot.export_addon(addon))
|
tasks.append(snapshot.export_addon(addon))
|
||||||
else:
|
else:
|
||||||
@@ -235,29 +243,30 @@ class SnapshotsManager(object):
|
|||||||
if tasks:
|
if tasks:
|
||||||
_LOGGER.info("Full-Restore %s restore addons tasks %d",
|
_LOGGER.info("Full-Restore %s restore addons tasks %d",
|
||||||
snapshot.slug, len(tasks))
|
snapshot.slug, len(tasks))
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# finish homeassistant task
|
# finish homeassistant task
|
||||||
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
|
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
|
||||||
snapshot.slug)
|
snapshot.slug)
|
||||||
await task_hass
|
await task_hass
|
||||||
await self.homeassistant.run()
|
await self._homeassistant.run()
|
||||||
|
|
||||||
|
except (OSError, ValueError, tarfile.TarError) as err:
|
||||||
|
_LOGGER.info("Full-Restore %s error: %s", snapshot.slug, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
_LOGGER.info("Full-Restore %s done", snapshot.slug)
|
_LOGGER.info("Full-Restore %s done", snapshot.slug)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except (OSError, ValueError, tarfile.TarError) as err:
|
|
||||||
_LOGGER.info("Full-Restore %s error -> %s", slug, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self.sheduler.suspend = False
|
self._scheduler.suspend = False
|
||||||
self._lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_restore_partial(self, snapshot, homeassistant=False,
|
async def do_restore_partial(self, snapshot, homeassistant=False,
|
||||||
addons=None, folders=None):
|
addons=None, folders=None):
|
||||||
"""Restore a snapshot."""
|
"""Restore a snapshot."""
|
||||||
if self._lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.error("It is already a snapshot/restore process running")
|
_LOGGER.error("It is already a snapshot/restore process running")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -266,14 +275,14 @@ class SnapshotsManager(object):
|
|||||||
|
|
||||||
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self.sheduler.suspend = True
|
self._scheduler.suspend = True
|
||||||
await self._lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
tasks = []
|
tasks = []
|
||||||
|
|
||||||
if FOLDER_HOMEASSISTANT in folders:
|
if FOLDER_HOMEASSISTANT in folders:
|
||||||
await self.homeassistant.stop()
|
await self._homeassistant.stop()
|
||||||
|
|
||||||
if folders:
|
if folders:
|
||||||
_LOGGER.info("Partial-Restore %s restore folders %s",
|
_LOGGER.info("Partial-Restore %s restore folders %s",
|
||||||
@@ -283,32 +292,34 @@ class SnapshotsManager(object):
|
|||||||
if homeassistant:
|
if homeassistant:
|
||||||
_LOGGER.info("Partial-Restore %s restore Home-Assistant",
|
_LOGGER.info("Partial-Restore %s restore Home-Assistant",
|
||||||
snapshot.slug)
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant(self.homeassistant)
|
snapshot.restore_homeassistant()
|
||||||
tasks.append(self.homeassistant.update(
|
tasks.append(self._homeassistant.update(
|
||||||
snapshot.homeassistant_version))
|
snapshot.homeassistant_version))
|
||||||
|
|
||||||
for slug in addons:
|
for slug in addons:
|
||||||
addon = self.addons.get(slug)
|
addon = self._addons.get(slug)
|
||||||
if addon:
|
if addon:
|
||||||
tasks.append(snapshot.export_addon(addon))
|
tasks.append(snapshot.export_addon(addon))
|
||||||
else:
|
else:
|
||||||
_LOGGER.warning("Can't restore addon %s", slug)
|
_LOGGER.warning("Can't restore addon %s",
|
||||||
|
snapshot.slug)
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
_LOGGER.info("Partial-Restore %s run %d tasks",
|
_LOGGER.info("Partial-Restore %s run %d tasks",
|
||||||
snapshot.slug, len(tasks))
|
snapshot.slug, len(tasks))
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# make sure homeassistant run agen
|
# make sure homeassistant run agen
|
||||||
await self.homeassistant.run()
|
await self._homeassistant.run()
|
||||||
|
|
||||||
|
except (OSError, ValueError, tarfile.TarError) as err:
|
||||||
|
_LOGGER.info("Partial-Restore %s error: %s", snapshot.slug, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
|
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except (OSError, ValueError, tarfile.TarError) as err:
|
|
||||||
_LOGGER.info("Partial-Restore %s error -> %s", slug, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self.sheduler.suspend = False
|
self._scheduler.suspend = False
|
||||||
self._lock.release()
|
self.lock.release()
|
||||||
|
@@ -10,23 +10,24 @@ import voluptuous as vol
|
|||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
||||||
from .util import remove_folder
|
from .utils import remove_folder
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
|
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE,
|
||||||
ATTR_IMAGE, ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT)
|
ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT,
|
||||||
from ..tools import write_json_file
|
ATTR_LAST_VERSION)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..utils.json import write_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Snapshot(object):
|
class Snapshot(CoreSysAttributes):
|
||||||
"""A signle hassio snapshot."""
|
"""A signle hassio snapshot."""
|
||||||
|
|
||||||
def __init__(self, config, loop, tar_file):
|
def __init__(self, coresys, tar_file):
|
||||||
"""Initialize a snapshot."""
|
"""Initialize a snapshot."""
|
||||||
self.loop = loop
|
self.coresys = coresys
|
||||||
self.config = config
|
|
||||||
self.tar_file = tar_file
|
self.tar_file = tar_file
|
||||||
self._data = {}
|
self._data = {}
|
||||||
self._tmp = None
|
self._tmp = None
|
||||||
@@ -82,14 +83,14 @@ class Snapshot(object):
|
|||||||
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
|
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant_devices(self):
|
def homeassistant_last_version(self):
|
||||||
"""Return snapshot homeassistant devices."""
|
"""Return snapshot homeassistant last version (custom)."""
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_DEVICES)
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_LAST_VERSION)
|
||||||
|
|
||||||
@homeassistant_devices.setter
|
@homeassistant_last_version.setter
|
||||||
def homeassistant_devices(self, value):
|
def homeassistant_last_version(self, value):
|
||||||
"""Set snapshot homeassistant devices."""
|
"""Set snapshot homeassistant last version (custom)."""
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_DEVICES] = value
|
self._data[ATTR_HOMEASSISTANT][ATTR_LAST_VERSION] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant_image(self):
|
def homeassistant_image(self):
|
||||||
@@ -166,43 +167,8 @@ class Snapshot(object):
|
|||||||
self._data[ATTR_DATE] = date
|
self._data[ATTR_DATE] = date
|
||||||
self._data[ATTR_TYPE] = sys_type
|
self._data[ATTR_TYPE] = sys_type
|
||||||
|
|
||||||
# init other constructs
|
# Add defaults
|
||||||
self._data[ATTR_HOMEASSISTANT] = {}
|
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||||
self._data[ATTR_ADDONS] = []
|
|
||||||
self._data[ATTR_REPOSITORIES] = []
|
|
||||||
self._data[ATTR_FOLDERS] = []
|
|
||||||
|
|
||||||
def snapshot_homeassistant(self, homeassistant):
|
|
||||||
"""Read all data from homeassistant object."""
|
|
||||||
self.homeassistant_version = homeassistant.version
|
|
||||||
self.homeassistant_devices = homeassistant.devices
|
|
||||||
self.homeassistant_watchdog = homeassistant.watchdog
|
|
||||||
self.homeassistant_boot = homeassistant.boot
|
|
||||||
|
|
||||||
# custom image
|
|
||||||
if homeassistant.is_custom_image:
|
|
||||||
self.homeassistant_image = homeassistant.image
|
|
||||||
|
|
||||||
# api
|
|
||||||
self.homeassistant_port = homeassistant.api_port
|
|
||||||
self.homeassistant_ssl = homeassistant.api_ssl
|
|
||||||
self.homeassistant_password = homeassistant.api_password
|
|
||||||
|
|
||||||
def restore_homeassistant(self, homeassistant):
|
|
||||||
"""Write all data to homeassistant object."""
|
|
||||||
homeassistant.devices = self.homeassistant_devices
|
|
||||||
homeassistant.watchdog = self.homeassistant_watchdog
|
|
||||||
homeassistant.boot = self.homeassistant_boot
|
|
||||||
|
|
||||||
# custom image
|
|
||||||
if self.homeassistant_image:
|
|
||||||
homeassistant.set_custom(
|
|
||||||
self.homeassistant_image, self.homeassistant_version)
|
|
||||||
|
|
||||||
# api
|
|
||||||
homeassistant.api_port = self.homeassistant_port
|
|
||||||
homeassistant.api_ssl = self.homeassistant_ssl
|
|
||||||
homeassistant.api_password = self.homeassistant_password
|
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Read snapshot.json from tar file."""
|
"""Read snapshot.json from tar file."""
|
||||||
@@ -218,24 +184,24 @@ class Snapshot(object):
|
|||||||
|
|
||||||
# read snapshot.json
|
# read snapshot.json
|
||||||
try:
|
try:
|
||||||
raw = await self.loop.run_in_executor(None, _load_file)
|
raw = await self._loop.run_in_executor(None, _load_file)
|
||||||
except (tarfile.TarError, KeyError) as err:
|
except (tarfile.TarError, KeyError) as err:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Can't read snapshot tarfile %s -> %s", self.tar_file, err)
|
"Can't read snapshot tarfile %s: %s", self.tar_file, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# parse data
|
# parse data
|
||||||
try:
|
try:
|
||||||
raw_dict = json.loads(raw)
|
raw_dict = json.loads(raw)
|
||||||
except json.JSONDecodeError as err:
|
except json.JSONDecodeError as err:
|
||||||
_LOGGER.error("Can't read data for %s -> %s", self.tar_file, err)
|
_LOGGER.error("Can't read data for %s: %s", self.tar_file, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# validate
|
# validate
|
||||||
try:
|
try:
|
||||||
self._data = SCHEMA_SNAPSHOT(raw_dict)
|
self._data = SCHEMA_SNAPSHOT(raw_dict)
|
||||||
except vol.Invalid as err:
|
except vol.Invalid as err:
|
||||||
_LOGGER.error("Can't validate data for %s -> %s", self.tar_file,
|
_LOGGER.error("Can't validate data for %s: %s", self.tar_file,
|
||||||
humanize_error(raw_dict, err))
|
humanize_error(raw_dict, err))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -243,7 +209,7 @@ class Snapshot(object):
|
|||||||
|
|
||||||
async def __aenter__(self):
|
async def __aenter__(self):
|
||||||
"""Async context to open a snapshot."""
|
"""Async context to open a snapshot."""
|
||||||
self._tmp = TemporaryDirectory(dir=str(self.config.path_tmp))
|
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))
|
||||||
|
|
||||||
# create a snapshot
|
# create a snapshot
|
||||||
if not self.tar_file.is_file():
|
if not self.tar_file.is_file():
|
||||||
@@ -255,7 +221,7 @@ class Snapshot(object):
|
|||||||
with tarfile.open(self.tar_file, "r:") as tar:
|
with tarfile.open(self.tar_file, "r:") as tar:
|
||||||
tar.extractall(path=self._tmp.name)
|
tar.extractall(path=self._tmp.name)
|
||||||
|
|
||||||
await self.loop.run_in_executor(None, _extract_snapshot)
|
await self._loop.run_in_executor(None, _extract_snapshot)
|
||||||
|
|
||||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||||
"""Async context to close a snapshot."""
|
"""Async context to close a snapshot."""
|
||||||
@@ -268,7 +234,7 @@ class Snapshot(object):
|
|||||||
try:
|
try:
|
||||||
self._data = SCHEMA_SNAPSHOT(self._data)
|
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||||
except vol.Invalid as err:
|
except vol.Invalid as err:
|
||||||
_LOGGER.error("Invalid data for %s -> %s", self.tar_file,
|
_LOGGER.error("Invalid data for %s: %s", self.tar_file,
|
||||||
humanize_error(self._data, err))
|
humanize_error(self._data, err))
|
||||||
raise ValueError("Invalid config") from None
|
raise ValueError("Invalid config") from None
|
||||||
|
|
||||||
@@ -278,11 +244,12 @@ class Snapshot(object):
|
|||||||
with tarfile.open(self.tar_file, "w:") as tar:
|
with tarfile.open(self.tar_file, "w:") as tar:
|
||||||
tar.add(self._tmp.name, arcname=".")
|
tar.add(self._tmp.name, arcname=".")
|
||||||
|
|
||||||
if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data):
|
try:
|
||||||
await self.loop.run_in_executor(None, _create_snapshot)
|
write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
|
||||||
else:
|
await self._loop.run_in_executor(None, _create_snapshot)
|
||||||
_LOGGER.error("Can't write snapshot.json")
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
|
_LOGGER.error("Can't write snapshot: %s", err)
|
||||||
|
finally:
|
||||||
self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
|
|
||||||
async def import_addon(self, addon):
|
async def import_addon(self, addon):
|
||||||
@@ -314,40 +281,40 @@ class Snapshot(object):
|
|||||||
|
|
||||||
async def store_folders(self, folder_list=None):
|
async def store_folders(self, folder_list=None):
|
||||||
"""Backup hassio data into snapshot."""
|
"""Backup hassio data into snapshot."""
|
||||||
folder_list = folder_list or ALL_FOLDERS
|
folder_list = set(folder_list or ALL_FOLDERS)
|
||||||
|
|
||||||
def _folder_save(name):
|
def _folder_save(name):
|
||||||
"""Intenal function to snapshot a folder."""
|
"""Intenal function to snapshot a folder."""
|
||||||
slug_name = name.replace("/", "_")
|
slug_name = name.replace("/", "_")
|
||||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||||
origin_dir = Path(self.config.path_hassio, name)
|
origin_dir = Path(self._config.path_hassio, name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Snapshot folder %s", name)
|
_LOGGER.info("Snapshot folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "w:gz",
|
with tarfile.open(snapshot_tar, "w:gz",
|
||||||
compresslevel=1) as tar_file:
|
compresslevel=1) as tar_file:
|
||||||
tar_file.add(origin_dir, arcname=".")
|
tar_file.add(origin_dir, arcname=".")
|
||||||
_LOGGER.info("Snapshot folder %s done", name)
|
|
||||||
|
|
||||||
|
_LOGGER.info("Snapshot folder %s done", name)
|
||||||
self._data[ATTR_FOLDERS].append(name)
|
self._data[ATTR_FOLDERS].append(name)
|
||||||
except tarfile.TarError as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
_LOGGER.warning("Can't snapshot folder %s -> %s", name, err)
|
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
|
||||||
|
|
||||||
# run tasks
|
# run tasks
|
||||||
tasks = [self.loop.run_in_executor(None, _folder_save, folder)
|
tasks = [self._loop.run_in_executor(None, _folder_save, folder)
|
||||||
for folder in folder_list]
|
for folder in folder_list]
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
async def restore_folders(self, folder_list=None):
|
async def restore_folders(self, folder_list=None):
|
||||||
"""Backup hassio data into snapshot."""
|
"""Backup hassio data into snapshot."""
|
||||||
folder_list = folder_list or ALL_FOLDERS
|
folder_list = set(folder_list or self.folders)
|
||||||
|
|
||||||
def _folder_restore(name):
|
def _folder_restore(name):
|
||||||
"""Intenal function to restore a folder."""
|
"""Intenal function to restore a folder."""
|
||||||
slug_name = name.replace("/", "_")
|
slug_name = name.replace("/", "_")
|
||||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||||
origin_dir = Path(self.config.path_hassio, name)
|
origin_dir = Path(self._config.path_hassio, name)
|
||||||
|
|
||||||
# clean old stuff
|
# clean old stuff
|
||||||
if origin_dir.is_dir():
|
if origin_dir.is_dir():
|
||||||
@@ -358,11 +325,56 @@ class Snapshot(object):
|
|||||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
||||||
tar_file.extractall(path=origin_dir)
|
tar_file.extractall(path=origin_dir)
|
||||||
_LOGGER.info("Restore folder %s done", name)
|
_LOGGER.info("Restore folder %s done", name)
|
||||||
except tarfile.TarError as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
||||||
|
|
||||||
# run tasks
|
# run tasks
|
||||||
tasks = [self.loop.run_in_executor(None, _folder_restore, folder)
|
tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
|
||||||
for folder in folder_list]
|
for folder in folder_list]
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
|
def store_homeassistant(self):
|
||||||
|
"""Read all data from homeassistant object."""
|
||||||
|
self.homeassistant_version = self._homeassistant.version
|
||||||
|
self.homeassistant_watchdog = self._homeassistant.watchdog
|
||||||
|
self.homeassistant_boot = self._homeassistant.boot
|
||||||
|
|
||||||
|
# custom image
|
||||||
|
if self._homeassistant.is_custom_image:
|
||||||
|
self.homeassistant_image = self._homeassistant.image
|
||||||
|
self.homeassistant_last_version = self._homeassistant.last_version
|
||||||
|
|
||||||
|
# api
|
||||||
|
self.homeassistant_port = self._homeassistant.api_port
|
||||||
|
self.homeassistant_ssl = self._homeassistant.api_ssl
|
||||||
|
self.homeassistant_password = self._homeassistant.api_password
|
||||||
|
|
||||||
|
def restore_homeassistant(self):
|
||||||
|
"""Write all data to homeassistant object."""
|
||||||
|
self._homeassistant.watchdog = self.homeassistant_watchdog
|
||||||
|
self._homeassistant.boot = self.homeassistant_boot
|
||||||
|
|
||||||
|
# custom image
|
||||||
|
if self.homeassistant_image:
|
||||||
|
self._homeassistant.image = self.homeassistant_image
|
||||||
|
self._homeassistant.last_version = self.homeassistant_last_version
|
||||||
|
|
||||||
|
# api
|
||||||
|
self._homeassistant.api_port = self.homeassistant_port
|
||||||
|
self._homeassistant.api_ssl = self.homeassistant_ssl
|
||||||
|
self._homeassistant.api_password = self.homeassistant_password
|
||||||
|
|
||||||
|
# save
|
||||||
|
self._homeassistant.save_data()
|
||||||
|
|
||||||
|
def store_repositories(self):
|
||||||
|
"""Store repository list into snapshot."""
|
||||||
|
self.repositories = self._config.addons_repositories
|
||||||
|
|
||||||
|
def restore_repositories(self):
|
||||||
|
"""Restore repositories from snapshot.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self._addons.load_repositories(self.repositories)
|
||||||
|
@@ -4,35 +4,47 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE,
|
||||||
ATTR_IMAGE, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT,
|
ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT,
|
||||||
|
ATTR_LAST_VERSION,
|
||||||
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||||
from ..validate import HASS_DEVICES, NETWORK_PORT
|
from ..validate import NETWORK_PORT, REPOSITORIES, DOCKER_IMAGE
|
||||||
|
|
||||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||||
|
|
||||||
|
|
||||||
|
def unique_addons(addons_list):
|
||||||
|
"""Validate that a add-on is unique."""
|
||||||
|
single = set([addon[ATTR_SLUG] for addon in addons_list])
|
||||||
|
|
||||||
|
if len(single) != len(addons_list):
|
||||||
|
raise vol.Invalid("Invalid addon list on snapshot!")
|
||||||
|
return addons_list
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_SNAPSHOT = vol.Schema({
|
SCHEMA_SNAPSHOT = vol.Schema({
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||||
vol.Required(ATTR_HOMEASSISTANT): vol.Schema({
|
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema({
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||||
}),
|
}, extra=vol.REMOVE_EXTRA),
|
||||||
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
vol.Optional(ATTR_FOLDERS, default=list):
|
||||||
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||||
|
vol.Optional(ATTR_ADDONS, default=list): vol.All([vol.Schema({
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
})],
|
}, extra=vol.REMOVE_EXTRA)], unique_addons),
|
||||||
vol.Optional(ATTR_REPOSITORIES, default=[]): [vol.Url()],
|
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
||||||
}, extra=vol.ALLOW_EXTRA)
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
82
hassio/supervisor.py
Normal file
82
hassio/supervisor.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
"""HomeAssistant control object."""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .coresys import CoreSysAttributes
|
||||||
|
from .docker.supervisor import DockerSupervisor
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Supervisor(CoreSysAttributes):
|
||||||
|
"""Hass core object for handle it."""
|
||||||
|
|
||||||
|
def __init__(self, coresys):
|
||||||
|
"""Initialize hass object."""
|
||||||
|
self.coresys = coresys
|
||||||
|
self.instance = DockerSupervisor(coresys)
|
||||||
|
|
||||||
|
async def load(self):
|
||||||
|
"""Prepare HomeAssistant object."""
|
||||||
|
if not await self.instance.attach():
|
||||||
|
_LOGGER.fatal("Can't setup supervisor docker container!")
|
||||||
|
await self.instance.cleanup()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def need_update(self):
|
||||||
|
"""Return True if a update is available."""
|
||||||
|
return self.version != self.last_version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
"""Return version of running homeassistant."""
|
||||||
|
return self.instance.version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_version(self):
|
||||||
|
"""Return last available version of homeassistant."""
|
||||||
|
return self._updater.version_hassio
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image(self):
|
||||||
|
"""Return image name of hass containter."""
|
||||||
|
return self.instance.image
|
||||||
|
|
||||||
|
@property
|
||||||
|
def arch(self):
|
||||||
|
"""Return arch of hass.io containter."""
|
||||||
|
return self.instance.arch
|
||||||
|
|
||||||
|
async def update(self, version=None):
|
||||||
|
"""Update HomeAssistant version."""
|
||||||
|
version = version or self.last_version
|
||||||
|
|
||||||
|
if version == self._supervisor.version:
|
||||||
|
_LOGGER.warning("Version %s is already installed", version)
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.info("Update supervisor to version %s", version)
|
||||||
|
if await self.instance.install(version):
|
||||||
|
self._loop.call_later(1, self._loop.stop)
|
||||||
|
return True
|
||||||
|
|
||||||
|
_LOGGER.error("Update of hass.io fails!")
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def in_progress(self):
|
||||||
|
"""Return True if a task is in progress."""
|
||||||
|
return self.instance.in_progress
|
||||||
|
|
||||||
|
def logs(self):
|
||||||
|
"""Get Supervisor docker logs.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.logs()
|
||||||
|
|
||||||
|
def stats(self):
|
||||||
|
"""Return stats of Supervisor.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.stats()
|
135
hassio/tasks.py
135
hassio/tasks.py
@@ -1,29 +1,61 @@
|
|||||||
"""Multible tasks."""
|
"""Multible tasks."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from .coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def api_sessions_cleanup(config):
|
class Tasks(CoreSysAttributes):
|
||||||
"""Create scheduler task for cleanup api sessions."""
|
"""Handle Tasks inside HassIO."""
|
||||||
async def _api_sessions_cleanup():
|
|
||||||
"""Cleanup old api sessions."""
|
|
||||||
now = datetime.now()
|
|
||||||
for session, until_valid in config.security_sessions.items():
|
|
||||||
if now >= until_valid:
|
|
||||||
config.drop_security_session(session)
|
|
||||||
|
|
||||||
return _api_sessions_cleanup
|
RUN_UPDATE_SUPERVISOR = 29100
|
||||||
|
RUN_UPDATE_ADDONS = 57600
|
||||||
|
|
||||||
|
RUN_RELOAD_ADDONS = 21600
|
||||||
|
RUN_RELOAD_SNAPSHOTS = 72000
|
||||||
|
RUN_RELOAD_HOST_CONTROL = 72000
|
||||||
|
RUN_RELOAD_UPDATER = 21600
|
||||||
|
|
||||||
def addons_update(loop, addons):
|
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||||
"""Create scheduler task for auto update addons."""
|
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||||
async def _addons_update():
|
|
||||||
|
def __init__(self, coresys):
|
||||||
|
"""Initialize Tasks."""
|
||||||
|
self.coresys = coresys
|
||||||
|
self.jobs = set()
|
||||||
|
self._data = {}
|
||||||
|
|
||||||
|
async def load(self):
|
||||||
|
"""Add Tasks to scheduler."""
|
||||||
|
self.jobs.add(self._scheduler.register_task(
|
||||||
|
self._update_addons, self.RUN_UPDATE_ADDONS))
|
||||||
|
self.jobs.add(self._scheduler.register_task(
|
||||||
|
self._update_supervisor, self.RUN_UPDATE_SUPERVISOR))
|
||||||
|
|
||||||
|
self.jobs.add(self._scheduler.register_task(
|
||||||
|
self._addons.reload, self.RUN_RELOAD_ADDONS))
|
||||||
|
self.jobs.add(self._scheduler.register_task(
|
||||||
|
self._updater.reload, self.RUN_RELOAD_UPDATER))
|
||||||
|
self.jobs.add(self._scheduler.register_task(
|
||||||
|
self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
|
||||||
|
self.jobs.add(self._scheduler.register_task(
|
||||||
|
self._host_control.load, self.RUN_RELOAD_HOST_CONTROL))
|
||||||
|
|
||||||
|
self.jobs.add(self._scheduler.register_task(
|
||||||
|
self._watchdog_homeassistant_docker,
|
||||||
|
self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
|
||||||
|
self.jobs.add(self._scheduler.register_task(
|
||||||
|
self._watchdog_homeassistant_api,
|
||||||
|
self.RUN_WATCHDOG_HOMEASSISTANT_API))
|
||||||
|
|
||||||
|
_LOGGER.info("All core tasks are scheduled")
|
||||||
|
|
||||||
|
async def _update_addons(self):
|
||||||
"""Check if a update is available of a addon and update it."""
|
"""Check if a update is available of a addon and update it."""
|
||||||
tasks = []
|
tasks = []
|
||||||
for addon in addons.list_addons:
|
for addon in self._addons.list_addons:
|
||||||
if not addon.is_installed or not addon.auto_update:
|
if not addon.is_installed or not addon.auto_update:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -38,78 +70,61 @@ def addons_update(loop, addons):
|
|||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
||||||
await asyncio.wait(tasks, loop=loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
return _addons_update
|
async def _update_supervisor(self):
|
||||||
|
|
||||||
|
|
||||||
def hassio_update(supervisor, updater):
|
|
||||||
"""Create scheduler task for update of supervisor hassio."""
|
|
||||||
async def _hassio_update():
|
|
||||||
"""Check and run update of supervisor hassio."""
|
"""Check and run update of supervisor hassio."""
|
||||||
await updater.fetch_data()
|
if not self._supervisor.need_update:
|
||||||
if updater.version_hassio == supervisor.version:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# don't perform a update on beta/dev channel
|
# don't perform a update on beta/dev channel
|
||||||
if updater.beta_channel:
|
if self._updater.beta_channel:
|
||||||
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
_LOGGER.warning("Ignore Hass.io update on beta upstream!")
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.info("Found new HassIO version %s.", updater.version_hassio)
|
_LOGGER.info("Found new Hass.io version")
|
||||||
await supervisor.update(updater.version_hassio)
|
await self._supervisor.update()
|
||||||
|
|
||||||
return _hassio_update
|
async def _watchdog_homeassistant_docker(self):
|
||||||
|
|
||||||
|
|
||||||
def homeassistant_watchdog_docker(loop, homeassistant):
|
|
||||||
"""Create scheduler task for montoring running state of docker."""
|
|
||||||
async def _homeassistant_watchdog_docker():
|
|
||||||
"""Check running state of docker and start if they is close."""
|
"""Check running state of docker and start if they is close."""
|
||||||
# if Home-Assistant is active
|
# if Home-Assistant is active
|
||||||
if not await homeassistant.is_initialize() or \
|
if not await self._homeassistant.is_initialize() or \
|
||||||
not homeassistant.watchdog:
|
not self._homeassistant.watchdog:
|
||||||
return
|
return
|
||||||
|
|
||||||
# if Home-Assistant is running
|
# if Home-Assistant is running
|
||||||
if homeassistant.in_progress or await homeassistant.is_running():
|
if self._homeassistant.in_progress or \
|
||||||
|
await self._homeassistant.is_running():
|
||||||
return
|
return
|
||||||
|
|
||||||
loop.create_task(homeassistant.run())
|
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
|
||||||
_LOGGER.error("Watchdog found a problem with Home-Assistant docker!")
|
await self._homeassistant.run()
|
||||||
|
|
||||||
return _homeassistant_watchdog_docker
|
async def _watchdog_homeassistant_api(self):
|
||||||
|
|
||||||
|
|
||||||
def homeassistant_watchdog_api(loop, homeassistant):
|
|
||||||
"""Create scheduler task for montoring running state of API.
|
"""Create scheduler task for montoring running state of API.
|
||||||
|
|
||||||
Try 2 times to call API before we restart Home-Assistant. Maybe we had a
|
Try 2 times to call API before we restart Home-Assistant. Maybe we had
|
||||||
delay in our system.
|
a delay in our system.
|
||||||
"""
|
"""
|
||||||
retry_scan = 0
|
retry_scan = self._data.get('HASS_WATCHDOG_API', 0)
|
||||||
|
|
||||||
async def _homeassistant_watchdog_api():
|
# If Home-Assistant is active
|
||||||
"""Check running state of API and start if they is close."""
|
if not await self._homeassistant.is_initialize() or \
|
||||||
nonlocal retry_scan
|
not self._homeassistant.watchdog:
|
||||||
|
|
||||||
# if Home-Assistant is active
|
|
||||||
if not await homeassistant.is_initialize() or \
|
|
||||||
not homeassistant.watchdog:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# if Home-Assistant API is up
|
# If Home-Assistant API is up
|
||||||
if homeassistant.in_progress or await homeassistant.check_api_state():
|
if self._homeassistant.in_progress or \
|
||||||
|
await self._homeassistant.check_api_state():
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Look like we run into a problem
|
||||||
retry_scan += 1
|
retry_scan += 1
|
||||||
|
|
||||||
# Retry active
|
|
||||||
if retry_scan == 1:
|
if retry_scan == 1:
|
||||||
|
self._data['HASS_WATCHDOG_API'] = retry_scan
|
||||||
_LOGGER.warning("Watchdog miss API response from Home-Assistant")
|
_LOGGER.warning("Watchdog miss API response from Home-Assistant")
|
||||||
return
|
return
|
||||||
|
|
||||||
loop.create_task(homeassistant.restart())
|
|
||||||
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
|
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
|
||||||
retry_scan = 0
|
await self._homeassistant.restart()
|
||||||
|
self._data['HASS_WATCHDOG_API'] = 0
|
||||||
return _homeassistant_watchdog_api
|
|
||||||
|
167
hassio/tools.py
167
hassio/tools.py
@@ -1,167 +0,0 @@
|
|||||||
"""Tools file for HassIO."""
|
|
||||||
import asyncio
|
|
||||||
from contextlib import suppress
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import async_timeout
|
|
||||||
import pytz
|
|
||||||
import voluptuous as vol
|
|
||||||
from voluptuous.humanize import humanize_error
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
|
||||||
|
|
||||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
|
||||||
|
|
||||||
# Copyright (c) Django Software Foundation and individual contributors.
|
|
||||||
# All rights reserved.
|
|
||||||
# https://github.com/django/django/blob/master/LICENSE
|
|
||||||
DATETIME_RE = re.compile(
|
|
||||||
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
|
|
||||||
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
|
|
||||||
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
|
|
||||||
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def write_json_file(jsonfile, data):
|
|
||||||
"""Write a json file."""
|
|
||||||
try:
|
|
||||||
json_str = json.dumps(data, indent=2)
|
|
||||||
with jsonfile.open('w') as conf_file:
|
|
||||||
conf_file.write(json_str)
|
|
||||||
except (OSError, json.JSONDecodeError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def read_json_file(jsonfile):
|
|
||||||
"""Read a json file and return a dict."""
|
|
||||||
with jsonfile.open('r') as cfile:
|
|
||||||
return json.loads(cfile.read())
|
|
||||||
|
|
||||||
|
|
||||||
async def fetch_timezone(websession):
|
|
||||||
"""Read timezone from freegeoip."""
|
|
||||||
data = {}
|
|
||||||
with suppress(aiohttp.ClientError, asyncio.TimeoutError,
|
|
||||||
json.JSONDecodeError, KeyError):
|
|
||||||
with async_timeout.timeout(10, loop=websession.loop):
|
|
||||||
async with websession.get(FREEGEOIP_URL) as request:
|
|
||||||
data = await request.json()
|
|
||||||
|
|
||||||
return data.get('time_zone', 'UTC')
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_ascii(raw):
|
|
||||||
"""Convert binary to ascii and remove colors."""
|
|
||||||
return RE_STRING.sub("", raw.decode())
|
|
||||||
|
|
||||||
|
|
||||||
# Copyright (c) Django Software Foundation and individual contributors.
|
|
||||||
# All rights reserved.
|
|
||||||
# https://github.com/django/django/blob/master/LICENSE
|
|
||||||
def parse_datetime(dt_str):
|
|
||||||
"""Parse a string and return a datetime.datetime.
|
|
||||||
|
|
||||||
This function supports time zone offsets. When the input contains one,
|
|
||||||
the output uses a timezone with a fixed offset from UTC.
|
|
||||||
Raises ValueError if the input is well formatted but not a valid datetime.
|
|
||||||
Returns None if the input isn't well formatted.
|
|
||||||
"""
|
|
||||||
match = DATETIME_RE.match(dt_str)
|
|
||||||
if not match:
|
|
||||||
return None
|
|
||||||
kws = match.groupdict() # type: Dict[str, Any]
|
|
||||||
if kws['microsecond']:
|
|
||||||
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
|
|
||||||
tzinfo_str = kws.pop('tzinfo')
|
|
||||||
|
|
||||||
tzinfo = None # type: Optional[dt.tzinfo]
|
|
||||||
if tzinfo_str == 'Z':
|
|
||||||
tzinfo = pytz.utc
|
|
||||||
elif tzinfo_str is not None:
|
|
||||||
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
|
||||||
offset_hours = int(tzinfo_str[1:3])
|
|
||||||
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
|
||||||
if tzinfo_str[0] == '-':
|
|
||||||
offset = -offset
|
|
||||||
tzinfo = timezone(offset)
|
|
||||||
else:
|
|
||||||
tzinfo = None
|
|
||||||
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
|
||||||
kws['tzinfo'] = tzinfo
|
|
||||||
return datetime(**kws)
|
|
||||||
|
|
||||||
|
|
||||||
class JsonConfig(object):
|
|
||||||
"""Hass core object for handle it."""
|
|
||||||
|
|
||||||
def __init__(self, json_file, schema):
|
|
||||||
"""Initialize hass object."""
|
|
||||||
self._file = json_file
|
|
||||||
self._schema = schema
|
|
||||||
self._data = {}
|
|
||||||
|
|
||||||
# init or load data
|
|
||||||
if self._file.is_file():
|
|
||||||
try:
|
|
||||||
self._data = read_json_file(self._file)
|
|
||||||
except (OSError, json.JSONDecodeError):
|
|
||||||
_LOGGER.warning("Can't read %s", self._file)
|
|
||||||
self._data = {}
|
|
||||||
|
|
||||||
# validate
|
|
||||||
try:
|
|
||||||
self._data = self._schema(self._data)
|
|
||||||
except vol.Invalid as ex:
|
|
||||||
_LOGGER.error("Can't parse %s -> %s",
|
|
||||||
self._file, humanize_error(self._data, ex))
|
|
||||||
# reset data to default
|
|
||||||
self._data = self._schema({})
|
|
||||||
|
|
||||||
def save(self):
|
|
||||||
"""Store data to config file."""
|
|
||||||
# validate
|
|
||||||
try:
|
|
||||||
self._data = self._schema(self._data)
|
|
||||||
except vol.Invalid as ex:
|
|
||||||
_LOGGER.error("Can't parse data -> %s",
|
|
||||||
humanize_error(self._data, ex))
|
|
||||||
return False
|
|
||||||
|
|
||||||
# write
|
|
||||||
if not write_json_file(self._file, self._data):
|
|
||||||
_LOGGER.error("Can't store config in %s", self._file)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncThrottle(object):
|
|
||||||
"""
|
|
||||||
Decorator that prevents a function from being called more than once every
|
|
||||||
time period.
|
|
||||||
"""
|
|
||||||
def __init__(self, delta):
|
|
||||||
"""Initialize async throttle."""
|
|
||||||
self.throttle_period = delta
|
|
||||||
self.time_of_last_call = datetime.min
|
|
||||||
|
|
||||||
def __call__(self, method):
|
|
||||||
"""Throttle function"""
|
|
||||||
async def wrapper(*args, **kwargs):
|
|
||||||
"""Throttle function wrapper"""
|
|
||||||
now = datetime.now()
|
|
||||||
time_since_last_call = now - self.time_of_last_call
|
|
||||||
|
|
||||||
if time_since_last_call > self.throttle_period:
|
|
||||||
self.time_of_last_call = now
|
|
||||||
return await method(*args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
@@ -10,21 +10,28 @@ import async_timeout
|
|||||||
from .const import (
|
from .const import (
|
||||||
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||||
ATTR_BETA_CHANNEL)
|
ATTR_BETA_CHANNEL)
|
||||||
from .tools import AsyncThrottle, JsonConfig
|
from .coresys import CoreSysAttributes
|
||||||
|
from .utils import AsyncThrottle
|
||||||
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_UPDATER_CONFIG
|
from .validate import SCHEMA_UPDATER_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Updater(JsonConfig):
|
class Updater(JsonConfig, CoreSysAttributes):
|
||||||
"""Fetch last versions from version.json."""
|
"""Fetch last versions from version.json."""
|
||||||
|
|
||||||
def __init__(self, config, loop, websession):
|
def __init__(self, coresys):
|
||||||
"""Initialize updater."""
|
"""Initialize updater."""
|
||||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||||
self.config = config
|
self.coresys = coresys
|
||||||
self.loop = loop
|
|
||||||
self.websession = websession
|
def load(self):
|
||||||
|
"""Update internal data.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.reload()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_homeassistant(self):
|
def version_homeassistant(self):
|
||||||
@@ -52,10 +59,9 @@ class Updater(JsonConfig):
|
|||||||
def beta_channel(self, value):
|
def beta_channel(self, value):
|
||||||
"""Set beta upstream mode."""
|
"""Set beta upstream mode."""
|
||||||
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
||||||
self.save()
|
|
||||||
|
|
||||||
@AsyncThrottle(timedelta(seconds=60))
|
@AsyncThrottle(timedelta(seconds=60))
|
||||||
async def fetch_data(self):
|
async def reload(self):
|
||||||
"""Fetch current versions from github.
|
"""Fetch current versions from github.
|
||||||
|
|
||||||
Is a coroutine.
|
Is a coroutine.
|
||||||
@@ -63,16 +69,16 @@ class Updater(JsonConfig):
|
|||||||
url = URL_HASSIO_VERSION.format(self.upstream)
|
url = URL_HASSIO_VERSION.format(self.upstream)
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Fetch update data from %s", url)
|
_LOGGER.info("Fetch update data from %s", url)
|
||||||
with async_timeout.timeout(10, loop=self.loop):
|
with async_timeout.timeout(10, loop=self._loop):
|
||||||
async with self.websession.get(url) as request:
|
async with self._websession.get(url) as request:
|
||||||
data = await request.json(content_type=None)
|
data = await request.json(content_type=None)
|
||||||
|
|
||||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||||
_LOGGER.warning("Can't fetch versions from %s -> %s", url, err)
|
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||||
return
|
return
|
||||||
|
|
||||||
except json.JSONDecodeError as err:
|
except json.JSONDecodeError as err:
|
||||||
_LOGGER.warning("Can't parse versions from %s -> %s", url, err)
|
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
|
||||||
return
|
return
|
||||||
|
|
||||||
# data valid?
|
# data valid?
|
||||||
@@ -83,4 +89,4 @@ class Updater(JsonConfig):
|
|||||||
# update versions
|
# update versions
|
||||||
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
||||||
self._data[ATTR_HASSIO] = data.get('hassio')
|
self._data[ATTR_HASSIO] = data.get('hassio')
|
||||||
self.save()
|
self.save_data()
|
||||||
|
34
hassio/utils/__init__.py
Normal file
34
hassio/utils/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
"""Tools file for HassIO."""
|
||||||
|
from datetime import datetime
|
||||||
|
import re
|
||||||
|
|
||||||
|
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_ascii(raw):
|
||||||
|
"""Convert binary to ascii and remove colors."""
|
||||||
|
return RE_STRING.sub("", raw.decode())
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncThrottle(object):
|
||||||
|
"""
|
||||||
|
Decorator that prevents a function from being called more than once every
|
||||||
|
time period.
|
||||||
|
"""
|
||||||
|
def __init__(self, delta):
|
||||||
|
"""Initialize async throttle."""
|
||||||
|
self.throttle_period = delta
|
||||||
|
self.time_of_last_call = datetime.min
|
||||||
|
|
||||||
|
def __call__(self, method):
|
||||||
|
"""Throttle function"""
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
"""Throttle function wrapper"""
|
||||||
|
now = datetime.now()
|
||||||
|
time_since_last_call = now - self.time_of_last_call
|
||||||
|
|
||||||
|
if time_since_last_call > self.throttle_period:
|
||||||
|
self.time_of_last_call = now
|
||||||
|
return await method(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
76
hassio/utils/dt.py
Normal file
76
hassio/utils/dt.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
"""Tools file for HassIO."""
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import async_timeout
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||||
|
|
||||||
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
# All rights reserved.
|
||||||
|
# https://github.com/django/django/blob/master/LICENSE
|
||||||
|
DATETIME_RE = re.compile(
|
||||||
|
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
|
||||||
|
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
|
||||||
|
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
|
||||||
|
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_timezone(websession):
|
||||||
|
"""Read timezone from freegeoip."""
|
||||||
|
data = {}
|
||||||
|
try:
|
||||||
|
with async_timeout.timeout(10, loop=websession.loop):
|
||||||
|
async with websession.get(FREEGEOIP_URL) as request:
|
||||||
|
data = await request.json()
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||||
|
_LOGGER.warning("Can't fetch freegeoip data: %s", err)
|
||||||
|
|
||||||
|
except ValueError as err:
|
||||||
|
_LOGGER.warning("Error on parse freegeoip data: %s", err)
|
||||||
|
|
||||||
|
return data.get('time_zone', 'UTC')
|
||||||
|
|
||||||
|
|
||||||
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
# All rights reserved.
|
||||||
|
# https://github.com/django/django/blob/master/LICENSE
|
||||||
|
def parse_datetime(dt_str):
|
||||||
|
"""Parse a string and return a datetime.datetime.
|
||||||
|
|
||||||
|
This function supports time zone offsets. When the input contains one,
|
||||||
|
the output uses a timezone with a fixed offset from UTC.
|
||||||
|
Raises ValueError if the input is well formatted but not a valid datetime.
|
||||||
|
Returns None if the input isn't well formatted.
|
||||||
|
"""
|
||||||
|
match = DATETIME_RE.match(dt_str)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
kws = match.groupdict() # type: Dict[str, Any]
|
||||||
|
if kws['microsecond']:
|
||||||
|
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
|
||||||
|
tzinfo_str = kws.pop('tzinfo')
|
||||||
|
|
||||||
|
tzinfo = None # type: Optional[dt.tzinfo]
|
||||||
|
if tzinfo_str == 'Z':
|
||||||
|
tzinfo = pytz.utc
|
||||||
|
elif tzinfo_str is not None:
|
||||||
|
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
||||||
|
offset_hours = int(tzinfo_str[1:3])
|
||||||
|
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
||||||
|
if tzinfo_str[0] == '-':
|
||||||
|
offset = -offset
|
||||||
|
tzinfo = timezone(offset)
|
||||||
|
else:
|
||||||
|
tzinfo = None
|
||||||
|
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
||||||
|
kws['tzinfo'] = tzinfo
|
||||||
|
return datetime(**kws)
|
73
hassio/utils/json.py
Normal file
73
hassio/utils/json.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
"""Tools file for HassIO."""
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def write_json_file(jsonfile, data):
|
||||||
|
"""Write a json file."""
|
||||||
|
json_str = json.dumps(data, indent=2)
|
||||||
|
with jsonfile.open('w') as conf_file:
|
||||||
|
conf_file.write(json_str)
|
||||||
|
|
||||||
|
|
||||||
|
def read_json_file(jsonfile):
|
||||||
|
"""Read a json file and return a dict."""
|
||||||
|
with jsonfile.open('r') as cfile:
|
||||||
|
return json.loads(cfile.read())
|
||||||
|
|
||||||
|
|
||||||
|
class JsonConfig(object):
|
||||||
|
"""Hass core object for handle it."""
|
||||||
|
|
||||||
|
def __init__(self, json_file, schema):
|
||||||
|
"""Initialize hass object."""
|
||||||
|
self._file = json_file
|
||||||
|
self._schema = schema
|
||||||
|
self._data = {}
|
||||||
|
|
||||||
|
self.read_data()
|
||||||
|
|
||||||
|
def read_data(self):
|
||||||
|
"""Read json file & validate."""
|
||||||
|
if self._file.is_file():
|
||||||
|
try:
|
||||||
|
self._data = read_json_file(self._file)
|
||||||
|
except (OSError, json.JSONDecodeError):
|
||||||
|
_LOGGER.warning("Can't read %s", self._file)
|
||||||
|
self._data = {}
|
||||||
|
|
||||||
|
# Validate
|
||||||
|
try:
|
||||||
|
self._data = self._schema(self._data)
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
_LOGGER.error("Can't parse %s: %s",
|
||||||
|
self._file, humanize_error(self._data, ex))
|
||||||
|
|
||||||
|
# Reset data to default
|
||||||
|
_LOGGER.warning("Reset %s to default", self._file)
|
||||||
|
self._data = self._schema({})
|
||||||
|
|
||||||
|
def save_data(self):
|
||||||
|
"""Store data to config file."""
|
||||||
|
# Validate
|
||||||
|
try:
|
||||||
|
self._data = self._schema(self._data)
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
_LOGGER.error("Can't parse data: %s",
|
||||||
|
humanize_error(self._data, ex))
|
||||||
|
|
||||||
|
# Load last valid data
|
||||||
|
_LOGGER.warning("Reset %s to last version", self._file)
|
||||||
|
self.save_data()
|
||||||
|
return
|
||||||
|
|
||||||
|
# write
|
||||||
|
try:
|
||||||
|
write_json_file(self._file, self._data)
|
||||||
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
|
_LOGGER.error("Can't store config in %s: %s", self._file, err)
|
@@ -1,19 +1,23 @@
|
|||||||
"""Validate functions."""
|
"""Validate functions."""
|
||||||
import voluptuous as vol
|
import uuid
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD,
|
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||||
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
|
||||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL,
|
ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT,
|
||||||
ATTR_PORT, ATTR_WATCHDOG)
|
ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_UUID)
|
||||||
|
|
||||||
|
|
||||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||||
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
|
||||||
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
||||||
|
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
||||||
|
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
REPOSITORIES = vol.All([vol.Url()], vol.Unique())
|
||||||
|
|
||||||
|
|
||||||
def validate_timezone(timezone):
|
def validate_timezone(timezone):
|
||||||
@@ -29,11 +33,12 @@ def validate_timezone(timezone):
|
|||||||
return timezone
|
return timezone
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=inconsistent-return-statements
|
||||||
def convert_to_docker_ports(data):
|
def convert_to_docker_ports(data):
|
||||||
"""Convert data into docker port list."""
|
"""Convert data into docker port list."""
|
||||||
# dynamic ports
|
# dynamic ports
|
||||||
if data is None:
|
if data is None:
|
||||||
return
|
return None
|
||||||
|
|
||||||
# single port
|
# single port
|
||||||
if isinstance(data, int):
|
if isinstance(data, int):
|
||||||
@@ -58,9 +63,10 @@ DOCKER_PORTS = vol.Schema({
|
|||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
||||||
|
vol.Match(r"^[0-9a-f]{32}$"),
|
||||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
@@ -81,12 +87,10 @@ SCHEMA_UPDATER_CONFIG = vol.Schema({
|
|||||||
SCHEMA_HASSIO_CONFIG = vol.Schema({
|
SCHEMA_HASSIO_CONFIG = vol.Schema({
|
||||||
vol.Optional(ATTR_TIMEZONE, default='UTC'): validate_timezone,
|
vol.Optional(ATTR_TIMEZONE, default='UTC'): validate_timezone,
|
||||||
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[
|
||||||
vol.Optional(ATTR_SECURITY, default=False): vol.Boolean(),
|
"https://github.com/hassio-addons/repository",
|
||||||
vol.Optional(ATTR_TOTP): vol.Coerce(str),
|
]): REPOSITORIES,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_SESSIONS, default={}):
|
|
||||||
vol.Schema({vol.Coerce(str): vol.Coerce(str)}),
|
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
|
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
Submodule home-assistant-polymer updated: 9b9cba86c2...c3e35a27ba
BIN
misc/hassio.png
BIN
misc/hassio.png
Binary file not shown.
Before Width: | Height: | Size: 42 KiB After Width: | Height: | Size: 37 KiB |
@@ -1 +1 @@
|
|||||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36" version="6.5.6" editor="www.draw.io" type="device"><diagram name="Page-1">5Vptc6M2EP41/ng3gHj9mPiSy820c5n6Q3sfsVBsNTJyhYid/voKkABZkOBY+KYtmYnR6pVn99ld1l6A5e74laX77a80Q2ThOdlxAb4sPC8OY/G/Erw2At9xG8GG4awR9QQr/DeSQkdKS5yhQhvIKSUc73UhpHmOINdkKWP0oA97okTfdZ9ukCFYwZSY0t9xxrdS6oZJ1/GA8GYrt469sOlYp/B5w2iZy/0WHniqr6Z7l6q15IMW2zSjh54I3C3AklHKm7vdcYlIBa2CrZl3P9LbnpuhnE+Z4DUTXlJSInXikIipt09UrCAOyF8lKOFfJVUdn4paZTdigNjtKD5ERw206DtIYKrenLJdSrrJ4m5TfX5fqX3E2Zqtmg4JS7urd9hijlb7FFbtg7A2MWjLd0S03Oo0mJAlJZTVowXYKIRQyAvO6DPq9Tj1Jc+/kutLvF4Q4+g4CqHbKkbYO6I7xNmrGKImJKCZIm09SKRuD53l+Arobc9oQjkulca6aZfuFCZupM6G9QcM/X3LcaW31WvB0e5CNGGG1vF6CE0QggRkrb7sAhhNBNCzAKBvAPiFwmfELkUOokCQ/trI+SZy3hBywAJyoYHcw9JArXaFqJpRUe9MLscQDXN5HQd+4NjB0A8DHcPQxDBwTAgDCxAmBl4oE3FINinjW7qheUruOumtjmgPPXTE/I9K/DkKZPOH6srFwZq+QDV/yBX+RJy/ygiclpwKUbfxL5Tu5RrNUavzvQ20eBxaMihHRTJ4p2yDeM9uTHUwRFKOX/TVLwFX5RK20fXeQDcB3im+deMRMSweALGfBbp/JdCj0Xxi3UX48xIMN6wSjNMEYlXuEXvBhXAJagOm+h7Sovj2fTTBaMXr0aSjMwP3fbdluKflMgybVEN3aFmA4sy347ZAoLstMJB1uPGA33JtRE3Xm4Nbbo9Yyou13NJ4VbuxeUnkqveOHouiK7EIzOO6NHh1dE/iQtc89VyFwIPfVK9YQgCJYBqGSnyPidpzqm5QnpmLCWFvqcFMfrm0qlgvvlZQUm8cvaxJrPLpRjy6wLByU9dxRSmKn6CtLFR3Rd5A/t56HS1/9224ovDKXHE/O3qQ/+zG8aWBfiKtPmjxwLR4d0Sn1i3enyVUSJ30srCJCPYcTk5zpHmb8xQ2Vl+AJXtp+WpPYdeKPa5ZUrjJMpoXhhqLbbqvbveMQlQU73sn3ZVN9lX34qr9fZMTCt07XhiBxANhEHtx7PhgpqRqyJN5bmB6ssSCI1O1nDmJ0rVOHdWlqYAkU59uc7zoXEAAOfWR4vq9Q5WqneE0Wq3Q0FJO6hdSz1ynobKxTm0U7dNMs5PYJCjk1KxYKX6WO9IMALcVOzAUyKdrRB5pgTmmuRiyppzTnRhAqo7btoitVVbrMna3xg3Bm2oup+fRvCvEnpZu5QYWiHxS0wEDNR0wkJBYqciaNJ5AUifSWOq/x1LX5OgUOk5Ity8PgO97LQshEng/L0SqvXsMPBwOpvcmBO+LWg2SiZDQMrs4Tl6FQInuz3xnIKeP5iovgLcLo9K4P5DEn8mRmTLEXqzt3hyaQ3qj0faDNPFNmjTmaz+S+icmc+pN7YVAMP6tjfNQrkcjIUzZ5fQL62uAfkH1Z4d+CThJJ4boN1TdsxLBopnY17f7yGaWOT9lP8i+YAb2TVZjYJDkK+bbuekxFp2QmwUomocevnppvQo94v9LcEpCnaOR5dgU/idjk/m9+G9oX71qUYbReBXl30s+Vf6dgXyi2f0WqlFG93szcPcP</diagram></mxfile>
|
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36" version="7.9.5" editor="www.draw.io" type="device"><diagram name="Page-1" id="535f6c39-9b73-04c2-941c-82630de90f1a">5VrLcqM4FP0aLzsFiOcycefRVTPVqfFippdYKLYmMvII4cd8/QiQDEKQ4Bicnmp7Yevqybk691zJnoH55vDI4u36d5ogMnOs5DADX2eOY1vAER+F5VhZ3DCoDCuGE9moNizwv0j1lNYcJyjTGnJKCcdb3QhpmiLINVvMGN3rzV4o0WfdxitkGBYwJqb1T5zwtbTaflRXPCG8WsupQ8evKpYxfF0xmqdyvpkDXspXVb2J1VjyQbN1nNB9wwTuZ2DOKOXVt81hjkiBrYKt6vfQU3taN0MpH9JB+mkXkxypFftEdL17oWIEsUB+lKD4/+RUVXzJSpfdigZitoP4EBUl0KJuL4EpalPKNjGpO4tvq+Lz+0LNI9ZWTVVVSFhOszr7NeZosY1hUd6L7SYarfmGiJJdrAYTMqeEsrK1ABv5EAp7xhl9RY0aq3zJ9S/k+B14SdMOMY4ODZPE7xHRDeLsKJqo2ghUXeRe9yLp2329c1wF9LqxaXzZLpabdXUaunaY+CJ91u0/YPjvW4oLvy2OGUebC9GECVqGyy40gQ8ikJz8NS6AwUAAnREAdA0Av1L4itilyEHkCdJfGznXRM7pQg6MgJxvIPc0N1ArQyEqehTUO5PLIUTdXF6GnutZ42Do+p6OoW9i6FkmhN4IEEYGXigROiSLlPE1XdE0Jve19U5HtIEeOmD+V2G+CTxZ/KGqUrGwqs5TxR9yhL8R50epwHHOqTDVE/9G6VaO0Qt1RnMG5fKlyvOYrRDXtknxYG+6gyESc7zTBfgScFUuMTa6zhvoRiLxaeFbFp4Rw+IBELsS6O5ngR705hPLWuHPSzBsv0gw2gnEIt8itsOZCAlqAqbqnuIs+/a9N8E4mZe9SUe9Dez3w5YRnuZz369SDT2gJR4KE3ecsAU8PWyBjqzDDjvilj2GatrOFNyyG8RSUezELY1XZRgbSqJMMIPfFqcCYYBEbA4MlfkBE7WKQVyz1WmkQbbgs8gGpolwmhd0J7Tkoy62A9xAzIe6EKWJOZgwNobqTPjn80sc64Sfpl0qHjSSKzHKl1vx6ALDIppdJ2LFKHyBYyWresRyOtL8U3DS0nx3jIjlX5kr9o2l5wI3dhhemg8MpFWDLilNkcaVN9NmjRHAZITal9dnhDuJ4kifNZK5kRAe7tC+awqYs92Jzx922Kdpk2veTHzAgRoIvd4832d9InK52zrx/rjrrqE1pqduk4SmmeGvbB1vi69bRiHKsvd1RhelwarzIF6lcleHAMFSy/EDEDnA90InDC0XTJRFd2mSY3umJkUjSJK6vJsypNWltuRcmtTJsNck2Sgn2/FClez6THF50JQuV2ei9rlJjVDRUnZyGjfnZ45TUdkYp9wUp6cZtk9Ck6CQU/OKUvEz35CqAbgrqIChQD5eIvJMM8wxTUWTJeWcbkQDUlTcnX610K7Sy98t6jFuCV4VfTk9j+b1zXv7rl5OMAKRW5d4oOMSD3SklqNcwZs0HkBSK9BY6r7HUtvk6BA6XkXzztTxQYqofkH8KZIZtZgGA/f7vRm9CcHbrHSDZCIkNE8u1smrECjS45lrdZzOgqnuk8DbN+Fyc3/gOHYmRybK5RtaW58Bq0U6vWo7jCauSRO1WydXUre1ZdrRdDwJBP0/01lP+bJXCWHMLqefX7466OcV73HoF4FWOtFFv67r3FEULJiIfc19H4yZZU5P2WHs867BvsFu9AySPGK+npoefeqE7MRDwTT0cNWh9Sr0CH8VcYp8naPBZdrk/xraZP4R4g+0LY5alGHUf4vy/yWfusifgHyiWP/5rXJG/Q9DcP8f</diagram></mxfile>
|
Binary file not shown.
Before Width: | Height: | Size: 36 KiB |
@@ -1 +0,0 @@
|
|||||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0" version="6.5.8" editor="www.draw.io" type="device"><diagram name="Page-1">5Vxdd5s4EP01fmwOkgCbx9hp2j7sNrvpnnYfiVFsTjDyghwn++tXGMmAxileEB9O+9BjBhjM3GHmzjXKhCw2L58Sf7v+jQU0mmAreJmQmwnGU4eI/zPDa24gyM0NqyQMchMqDPfhv1QaLWndhQFNKwdyxiIebqvGJYtjuuQVm58kbF897JFF1atu/RUFhvulH0Hr9zDg69w6w25h/0zD1VpdGblevufBXz6tEraL5fUmmDwe/uW7N77yJW80XfsB25dM5OOELBLGeP5p87KgURZaFbb8vNs39h6/d0Jjfs4JOD/h2Y928tZvwyTlwnTP/YTLL8lfVWA4fRF+52u+iYQBiY8pT9gTXbCIJcISs1gcOX8Mo0gz+VG4isXmUnwzKuzzZ5rwUIT8Wu7YhEGQXWa+X4ec3m/9ZXbNvcivzCGL+b38Go7aztMGeWIb3rcMRXYV+lIyyTh8omxDefIqDpF7ySw/Q6asKxHaF/gjS9rWJewVkr5MudXRcRF28UFG/jQKBKDwVypipAe/FPUtC2N+uKIznzg3mYUmobhwFtoblvA1W7HYj+4KawcxQhgGyT0Vo5mBINkgSJ/9NB1hkDAiw0XJAVFaiyhdffk6wkDZ7oCBckGg2JbGh1uKs2b2drT0wvXAOGcbsYPGwXXWfDJbxJZPP4uSqK4ryiuZTYNKU4JhK4VFRSChkc/D52rbOhUW6e0uQ7pAwNOeZ1sLbMp2yZLKk8ptRPMjoNMc4aqj/HaBowNIxzs8C7cpwE2ckdLlLgm5uNPbMH5kvaLnDIYenmrPj9sQPuLUODIH3wzCNxVxFtdz/9llrGcexiEvtibkOiNwfpTS7KjpTVtsD085mQd+uqaBPE/slmRilm29hPyH+PzBurIcuf232LauCFH7S5XwxvpZpuQQVDKlyaPfMlNsy60AjK2mmYJrHJnLFA9kip8+ZfsP+WHdfe8+E856/kk/EOqsApOGECJS48gchGqcK2GYUm4Sw8vss7hpoT5GVDlyvM6wg6NhtdGyLQ9ZLAi4G2WF+kHMK+7qULK1gr4VBHTPkkAv6nrJt7b70iFGir1Kj/K4iC6vsWPPUGMHjgzmCxxiq/mS0jQVCfNGvvyvZOk1VxQdQFcWmlbowNRtRQfsMacc0XWNpikHHL2RcgIG/7V0mJxJWyYlFA306lSk5Rv5Jg94oq+mM66egDSqW31xSm16J9OmGTOrcWSwSEF5xMi43xGSA1FL0rTd6NQSODKIJNRvfmfJxodQvmPJGlfZoN2nZo2gEHMZorWDYJQ6UxkR1DsuRLXuN0xw2L8c2brXSGE4Ug+mW6vkHn6gdpqKIbpw7RDcVcc6JtpolGv11I1g3HAcQ+MGcGQQwBOKyBnaNU/E0XhROY4zvn2fGrfKqUZ1wrDK7TSWTXCNI4NJBWWTXOYejb6tiF7fU4jbVIHQpxDgyCB6UF/IZ4Xete3x9GK3aSnXxW3X7kzcPvHrfzdi5SAypVuVKV3itqros1EzhykyxByAoz6FylOvNbx7obI3XqANbNPG70nMahwZrFBQOBizUjkUSZjqM3VTkgAcGYQSihuXoZR5fQobBAobF6KU9RsmqCJcjlLWb6TguD6YUqaSe3h27plSyrzulDJS9ypB70qZeupGwHc9U0oZcGQQwPqf3dsoZflxFy6UkTZlwrBQ5pkSyoAjgzkFf7ovhLLbb1+/3XWfDGfVCnzubGyYCiPLlGAGPRmEESovZcXMCJAX2pqRZUo5Q1Z30hmpW4DRjXSWdYVDLzgcNcu64gVqaSrZRsotEDIlpkFPfapppH6VyftT03ojD/qqvebLjmZ1ngyWLSjCjFlPG4xEIFOCGvRkDky1TPHEy3+iSooiia2TPOLXeRVw5kqeVWoauKtXAW2oSY1U4LQ1noQ9G4SpuwXsGIRptAqnM2ScoPwzZolz0FBBouMvRTvwOT3WQJ2GywJZEHAzHLrgzIpB54wZ2a0Ys32iOaoHaQDGfHyd+rjQXWld7ZfMqwbaQb+E5Kc6s0mVzeDANsR6LNIy1fCJVDt3CUYXw5lWWWyvYaoRp85Tn8OZA8nbH39+WLCAts2YrtZTnVtuWg9Wem1pysXJTAPcsc8DvAmckPyNHM5z9ZbWo5UOgtvw+UWkzpNBOCFJ/ZKvzv7lJiqtPx8LV3l1lXpNp+VIJTaLv/mWo1b8XT3y8T8=</diagram></mxfile>
|
|
12
setup.py
12
setup.py
@@ -12,7 +12,7 @@ setup(
|
|||||||
url='https://home-assistant.io/',
|
url='https://home-assistant.io/',
|
||||||
description=('Open-source private cloud os for Home-Assistant'
|
description=('Open-source private cloud os for Home-Assistant'
|
||||||
' based on ResinOS'),
|
' based on ResinOS'),
|
||||||
long_description=('A maintenainless private cloud operator system that'
|
long_description=('A maintainless private cloud operator system that'
|
||||||
'setup a Home-Assistant instance. Based on ResinOS'),
|
'setup a Home-Assistant instance. Based on ResinOS'),
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Intended Audience :: End Users/Desktop',
|
'Intended Audience :: End Users/Desktop',
|
||||||
@@ -24,16 +24,18 @@ setup(
|
|||||||
'Topic :: Scientific/Engineering :: Atmospheric Science',
|
'Topic :: Scientific/Engineering :: Atmospheric Science',
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
'Programming Language :: Python :: 3.5',
|
'Programming Language :: Python :: 3.6',
|
||||||
],
|
],
|
||||||
keywords=['docker', 'home-assistant', 'api'],
|
keywords=['docker', 'home-assistant', 'api'],
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
platforms='any',
|
platforms='any',
|
||||||
packages=[
|
packages=[
|
||||||
'hassio',
|
'hassio',
|
||||||
'hassio.dock',
|
'hassio.docker',
|
||||||
'hassio.api',
|
|
||||||
'hassio.addons',
|
'hassio.addons',
|
||||||
|
'hassio.api',
|
||||||
|
'hassio.misc',
|
||||||
|
'hassio.utils',
|
||||||
'hassio.snapshots'
|
'hassio.snapshots'
|
||||||
],
|
],
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
@@ -44,8 +46,6 @@ setup(
|
|||||||
'colorlog',
|
'colorlog',
|
||||||
'voluptuous',
|
'voluptuous',
|
||||||
'gitpython',
|
'gitpython',
|
||||||
'pyotp',
|
|
||||||
'pyqrcode',
|
|
||||||
'pytz',
|
'pytz',
|
||||||
'pyudev'
|
'pyudev'
|
||||||
]
|
]
|
||||||
|
4
tox.ini
4
tox.ini
@@ -2,8 +2,6 @@
|
|||||||
envlist = lint
|
envlist = lint
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
setenv =
|
|
||||||
PYTHONPATH = {toxinidir}:{toxinidir}/hassio
|
|
||||||
deps =
|
deps =
|
||||||
flake8
|
flake8
|
||||||
pylint
|
pylint
|
||||||
@@ -13,4 +11,4 @@ basepython = python3
|
|||||||
ignore_errors = True
|
ignore_errors = True
|
||||||
commands =
|
commands =
|
||||||
flake8 hassio
|
flake8 hassio
|
||||||
pylint hassio
|
pylint --rcfile pylintrc hassio
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"hassio": "0.73",
|
"hassio": "0.85",
|
||||||
"homeassistant": "0.56.2",
|
"homeassistant": "0.62.0",
|
||||||
"resinos": "1.1",
|
"resinos": "1.1",
|
||||||
"resinhup": "0.3",
|
"resinhup": "0.3",
|
||||||
"generic": "0.3",
|
"generic": "0.3",
|
||||||
|
Reference in New Issue
Block a user