mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-29 10:59:21 +00:00
Compare commits
89 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
463c97f9e7 | ||
![]() |
3983928c6c | ||
![]() |
15e626027f | ||
![]() |
d46810752e | ||
![]() |
3d10b502a0 | ||
![]() |
433c5cef3b | ||
![]() |
697caf553a | ||
![]() |
1e11359c71 | ||
![]() |
5285431825 | ||
![]() |
7743a572a9 | ||
![]() |
3b974920d3 | ||
![]() |
6bc9792248 | ||
![]() |
da55f6fb10 | ||
![]() |
ffa90a3407 | ||
![]() |
0a13ea3743 | ||
![]() |
0e2e588145 | ||
![]() |
b8c50fee36 | ||
![]() |
8cb0b7c498 | ||
![]() |
699fcdafba | ||
![]() |
b4d5aeb5d0 | ||
![]() |
d067dd643e | ||
![]() |
65a2bf2d18 | ||
![]() |
e826e8184f | ||
![]() |
dacbde7d77 | ||
![]() |
5b0587b672 | ||
![]() |
f0320c0f6d | ||
![]() |
e05c32df25 | ||
![]() |
9c40c32e95 | ||
![]() |
ac60de0360 | ||
![]() |
587047f9d6 | ||
![]() |
e815223047 | ||
![]() |
b6fb5ab950 | ||
![]() |
a0906937c4 | ||
![]() |
07c47df369 | ||
![]() |
85e9a949cc | ||
![]() |
3933fb0664 | ||
![]() |
a885fbdb41 | ||
![]() |
210793eb34 | ||
![]() |
0235c7bce0 | ||
![]() |
4419c0fc6c | ||
![]() |
2f3701693d | ||
![]() |
3bf446cbdb | ||
![]() |
0c67cc13a1 | ||
![]() |
0b80d7b6f4 | ||
![]() |
23c35d4c80 | ||
![]() |
e939c29efa | ||
![]() |
ea0655b4e5 | ||
![]() |
4117ce2e86 | ||
![]() |
dec04386bf | ||
![]() |
b50756785e | ||
![]() |
b9538bdc67 | ||
![]() |
a928281bbe | ||
![]() |
4533d17e27 | ||
![]() |
546df6d001 | ||
![]() |
f14eef62ae | ||
![]() |
ee86770570 | ||
![]() |
385a4e9f6f | ||
![]() |
142cdcffca | ||
![]() |
eb6c753514 | ||
![]() |
c3b62c80fb | ||
![]() |
f77e176a6e | ||
![]() |
3f99dec858 | ||
![]() |
81b0cf55b0 | ||
![]() |
1d5d2dc731 | ||
![]() |
04f5ee0a80 | ||
![]() |
7a02777cfb | ||
![]() |
7257c44d27 | ||
![]() |
cb15602814 | ||
![]() |
0f2c333484 | ||
![]() |
6f2cf2ef85 | ||
![]() |
70a721a47d | ||
![]() |
b32947af98 | ||
![]() |
94b44ec7fe | ||
![]() |
5c8aa71c31 | ||
![]() |
a6c424b7c8 | ||
![]() |
38e40c342d | ||
![]() |
26d390b66e | ||
![]() |
baddafa552 | ||
![]() |
f443d3052b | ||
![]() |
8fc27ff28e | ||
![]() |
3784d759f5 | ||
![]() |
61037f3852 | ||
![]() |
db8aaecdbe | ||
![]() |
15a4541595 | ||
![]() |
50ae8e2335 | ||
![]() |
279df17ba4 | ||
![]() |
f8e6362283 | ||
![]() |
0c44064926 | ||
![]() |
73c437574c |
13
.github/move.yml
vendored
Normal file
13
.github/move.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Configuration for move-issues - https://github.com/dessant/move-issues
|
||||||
|
|
||||||
|
# Delete the command comment. Ignored when the comment also contains other content
|
||||||
|
deleteCommand: true
|
||||||
|
# Close the source issue after moving
|
||||||
|
closeSourceIssue: true
|
||||||
|
# Lock the source issue after moving
|
||||||
|
lockSourceIssue: false
|
||||||
|
# Set custom aliases for targets
|
||||||
|
# aliases:
|
||||||
|
# r: repo
|
||||||
|
# or: owner/repo
|
||||||
|
|
210
API.md
210
API.md
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
Interface for Home Assistant to control things from supervisor.
|
Interface for Home Assistant to control things from supervisor.
|
||||||
|
|
||||||
On error:
|
On error / Code 400:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -13,7 +13,7 @@ On error:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
On success:
|
On success / Code 200:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -22,6 +22,8 @@ On success:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with envoriment `HASSIO_TOKEN`.
|
||||||
|
|
||||||
### Hass.io
|
### Hass.io
|
||||||
|
|
||||||
- GET `/supervisor/ping`
|
- GET `/supervisor/ping`
|
||||||
@@ -45,6 +47,7 @@ The addons from `addons` are only installed one.
|
|||||||
"repository": "12345678|null",
|
"repository": "12345678|null",
|
||||||
"version": "LAST_VERSION",
|
"version": "LAST_VERSION",
|
||||||
"installed": "INSTALL_VERSION",
|
"installed": "INSTALL_VERSION",
|
||||||
|
"icon": "bool",
|
||||||
"logo": "bool",
|
"logo": "bool",
|
||||||
"state": "started|stopped",
|
"state": "started|stopped",
|
||||||
}
|
}
|
||||||
@@ -99,44 +102,7 @@ Output is the raw docker log.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Security
|
### Snapshot
|
||||||
|
|
||||||
- GET `/security/info`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"initialize": "bool",
|
|
||||||
"totp": "bool"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/security/options`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"password": "xy"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/security/totp`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"password": "xy"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Return QR-Code
|
|
||||||
|
|
||||||
- POST `/security/session`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"password": "xy",
|
|
||||||
"totp": "null|123456"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Backup/Snapshot
|
|
||||||
|
|
||||||
- GET `/snapshots`
|
- GET `/snapshots`
|
||||||
|
|
||||||
@@ -146,7 +112,9 @@ Return QR-Code
|
|||||||
{
|
{
|
||||||
"slug": "SLUG",
|
"slug": "SLUG",
|
||||||
"date": "ISO",
|
"date": "ISO",
|
||||||
"name": "Custom name"
|
"name": "Custom name",
|
||||||
|
"type": "full|partial",
|
||||||
|
"protected": "bool"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -154,11 +122,28 @@ Return QR-Code
|
|||||||
|
|
||||||
- POST `/snapshots/reload`
|
- POST `/snapshots/reload`
|
||||||
|
|
||||||
|
- POST `/snapshots/new/upload`
|
||||||
|
|
||||||
|
return:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"slug": ""
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
- POST `/snapshots/new/full`
|
- POST `/snapshots/new/full`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"name": "Optional"
|
"name": "Optional",
|
||||||
|
"password": "Optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
return:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"slug": ""
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -168,7 +153,15 @@ Return QR-Code
|
|||||||
{
|
{
|
||||||
"name": "Optional",
|
"name": "Optional",
|
||||||
"addons": ["ADDON_SLUG"],
|
"addons": ["ADDON_SLUG"],
|
||||||
"folders": ["FOLDER_NAME"]
|
"folders": ["FOLDER_NAME"],
|
||||||
|
"password": "Optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
return:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"slug": ""
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -183,12 +176,14 @@ Return QR-Code
|
|||||||
"name": "custom snapshot name / description",
|
"name": "custom snapshot name / description",
|
||||||
"date": "ISO",
|
"date": "ISO",
|
||||||
"size": "SIZE_IN_MB",
|
"size": "SIZE_IN_MB",
|
||||||
|
"protected": "bool",
|
||||||
"homeassistant": "version",
|
"homeassistant": "version",
|
||||||
"addons": [
|
"addons": [
|
||||||
{
|
{
|
||||||
"slug": "ADDON_SLUG",
|
"slug": "ADDON_SLUG",
|
||||||
"name": "NAME",
|
"name": "NAME",
|
||||||
"version": "INSTALLED_VERSION"
|
"version": "INSTALLED_VERSION",
|
||||||
|
"size": "SIZE_IN_MB"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"repositories": ["URL"],
|
"repositories": ["URL"],
|
||||||
@@ -197,14 +192,25 @@ Return QR-Code
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/remove`
|
- POST `/snapshots/{slug}/remove`
|
||||||
|
|
||||||
|
- GET `/snapshots/{slug}/download`
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/full`
|
- POST `/snapshots/{slug}/restore/full`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"password": "Optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/partial`
|
- POST `/snapshots/{slug}/restore/partial`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"homeassistant": "bool",
|
"homeassistant": "bool",
|
||||||
"addons": ["ADDON_SLUG"],
|
"addons": ["ADDON_SLUG"],
|
||||||
"folders": ["FOLDER_NAME"]
|
"folders": ["FOLDER_NAME"],
|
||||||
|
"password": "Optional"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -301,7 +307,8 @@ Optional:
|
|||||||
"boot": "bool",
|
"boot": "bool",
|
||||||
"port": 8123,
|
"port": 8123,
|
||||||
"ssl": "bool",
|
"ssl": "bool",
|
||||||
"watchdog": "bool"
|
"watchdog": "bool",
|
||||||
|
"startup_time": 600
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -333,7 +340,8 @@ Output is the raw Docker log.
|
|||||||
"port": "port for access hass",
|
"port": "port for access hass",
|
||||||
"ssl": "bool",
|
"ssl": "bool",
|
||||||
"password": "",
|
"password": "",
|
||||||
"watchdog": "bool"
|
"watchdog": "bool",
|
||||||
|
"startup_time": 600
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -380,6 +388,7 @@ Get all available addons.
|
|||||||
"detached": "bool",
|
"detached": "bool",
|
||||||
"build": "bool",
|
"build": "bool",
|
||||||
"url": "null|url",
|
"url": "null|url",
|
||||||
|
"icon": "bool",
|
||||||
"logo": "bool"
|
"logo": "bool"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -420,6 +429,7 @@ Get all available addons.
|
|||||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
"devices": ["/dev/xy"],
|
"devices": ["/dev/xy"],
|
||||||
"auto_uart": "bool",
|
"auto_uart": "bool",
|
||||||
|
"icon": "bool",
|
||||||
"logo": "bool",
|
"logo": "bool",
|
||||||
"changelog": "bool",
|
"changelog": "bool",
|
||||||
"hassio_api": "bool",
|
"hassio_api": "bool",
|
||||||
@@ -429,10 +439,14 @@ Get all available addons.
|
|||||||
"gpio": "bool",
|
"gpio": "bool",
|
||||||
"audio": "bool",
|
"audio": "bool",
|
||||||
"audio_input": "null|0,0",
|
"audio_input": "null|0,0",
|
||||||
"audio_output": "null|0,0"
|
"audio_output": "null|0,0",
|
||||||
|
"services": "null|['mqtt']",
|
||||||
|
"discovery": "null|['component/platform']"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/icon`
|
||||||
|
|
||||||
- GET `/addons/{addon}/logo`
|
- GET `/addons/{addon}/logo`
|
||||||
|
|
||||||
- GET `/addons/{addon}/changelog`
|
- GET `/addons/{addon}/changelog`
|
||||||
@@ -452,7 +466,7 @@ Get all available addons.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
For reset custom network/audio settings, set it `null`.
|
Reset custom network/audio/options, set it `null`.
|
||||||
|
|
||||||
- POST `/addons/{addon}/start`
|
- POST `/addons/{addon}/start`
|
||||||
|
|
||||||
@@ -491,6 +505,104 @@ Write data to add-on stdin
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Service discovery
|
||||||
|
|
||||||
|
- GET `/services/discovery`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"discovery": [
|
||||||
|
{
|
||||||
|
"provider": "name",
|
||||||
|
"uuid": "uuid",
|
||||||
|
"component": "component",
|
||||||
|
"platform": "null|platform",
|
||||||
|
"config": {}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/services/discovery/{UUID}`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"provider": "name",
|
||||||
|
"uuid": "uuid",
|
||||||
|
"component": "component",
|
||||||
|
"platform": "null|platform",
|
||||||
|
"config": {}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/services/discovery`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"component": "component",
|
||||||
|
"platform": "null|platform",
|
||||||
|
"config": {}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
return:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"uuid": "uuid"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- DEL `/services/discovery/{UUID}`
|
||||||
|
|
||||||
|
- GET `/services`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"services": [
|
||||||
|
{
|
||||||
|
"slug": "name",
|
||||||
|
"available": "bool",
|
||||||
|
"provider": "null|name|list"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/services/xy`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"available": "bool",
|
||||||
|
"xy": {}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### MQTT
|
||||||
|
|
||||||
|
This service perform a auto discovery to Home-Assistant.
|
||||||
|
|
||||||
|
- GET `/services/mqtt`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"provider": "name",
|
||||||
|
"host": "xy",
|
||||||
|
"port": "8883",
|
||||||
|
"ssl": "bool",
|
||||||
|
"username": "optional",
|
||||||
|
"password": "optional",
|
||||||
|
"protocol": "3.1.1"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/services/mqtt`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"host": "xy",
|
||||||
|
"port": "8883",
|
||||||
|
"ssl": "bool|optional",
|
||||||
|
"username": "optional",
|
||||||
|
"password": "optional",
|
||||||
|
"protocol": "3.1.1"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- DEL `/services/mqtt`
|
||||||
|
|
||||||
## Host Control
|
## Host Control
|
||||||
|
|
||||||
Communicate over UNIX socket with a host daemon.
|
Communicate over UNIX socket with a host daemon.
|
||||||
|
@@ -15,8 +15,9 @@ RUN apk add --no-cache \
|
|||||||
python3-dev \
|
python3-dev \
|
||||||
g++ \
|
g++ \
|
||||||
&& pip3 install --no-cache-dir \
|
&& pip3 install --no-cache-dir \
|
||||||
uvloop \
|
uvloop==0.9.1 \
|
||||||
cchardet \
|
cchardet==2.1.1 \
|
||||||
|
pycryptodome==3.4.11 \
|
||||||
&& apk del .build-dependencies
|
&& apk del .build-dependencies
|
||||||
|
|
||||||
# Install HassIO
|
# Install HassIO
|
||||||
|
10
README.md
10
README.md
@@ -1,8 +1,12 @@
|
|||||||
# Hass.io
|
# Hass.io
|
||||||
|
|
||||||
### First private cloud solution for home automation.
|
## First private cloud solution for home automation
|
||||||
|
|
||||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||||
|
and related applications. The system is controlled via Home Assistant which
|
||||||
|
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||||
|
installation. This includes changing network settings or installing
|
||||||
|
and updating software.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -11,4 +15,4 @@ Hass.io is a Docker based system for managing your Home Assistant installation a
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
|
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||||
|
@@ -4,7 +4,7 @@ import logging
|
|||||||
|
|
||||||
from .addon import Addon
|
from .addon import Addon
|
||||||
from .repository import Repository
|
from .repository import Repository
|
||||||
from .data import Data
|
from .data import AddonsData
|
||||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
@@ -19,7 +19,7 @@ class AddonManager(CoreSysAttributes):
|
|||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self.data = Data(coresys)
|
self.data = AddonsData(coresys)
|
||||||
self.addons_obj = {}
|
self.addons_obj = {}
|
||||||
self.repositories_obj = {}
|
self.repositories_obj = {}
|
||||||
|
|
||||||
@@ -28,15 +28,28 @@ class AddonManager(CoreSysAttributes):
|
|||||||
"""Return a list of all addons."""
|
"""Return a list of all addons."""
|
||||||
return list(self.addons_obj.values())
|
return list(self.addons_obj.values())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list_installed(self):
|
||||||
|
"""Return a list of installed addons."""
|
||||||
|
return [addon for addon in self.addons_obj.values()
|
||||||
|
if addon.is_installed]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_repositories(self):
|
def list_repositories(self):
|
||||||
"""Return list of addon repositories."""
|
"""Return list of addon repositories."""
|
||||||
return list(self.repositories_obj.values())
|
return list(self.repositories_obj.values())
|
||||||
|
|
||||||
def get(self, addon_slug):
|
def get(self, addon_slug):
|
||||||
"""Return a adddon from slug."""
|
"""Return a add-on from slug."""
|
||||||
return self.addons_obj.get(addon_slug)
|
return self.addons_obj.get(addon_slug)
|
||||||
|
|
||||||
|
def from_uuid(self, uuid):
|
||||||
|
"""Return a add-on from uuid."""
|
||||||
|
for addon in self.list_addons:
|
||||||
|
if addon.is_installed and uuid == addon.uuid:
|
||||||
|
return addon
|
||||||
|
return None
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Startup addon management."""
|
"""Startup addon management."""
|
||||||
self.data.reload()
|
self.data.reload()
|
||||||
|
@@ -12,7 +12,7 @@ import voluptuous as vol
|
|||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .validate import (
|
from .validate import (
|
||||||
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME)
|
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME, RE_SERVICE)
|
||||||
from .utils import check_installed
|
from .utils import check_installed
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||||
@@ -23,7 +23,7 @@ from ..const import (
|
|||||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
||||||
ATTR_HOST_DBUS, ATTR_AUTO_UART)
|
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..docker.addon import DockerAddon
|
from ..docker.addon import DockerAddon
|
||||||
from ..utils.json import write_json_file, read_json_file
|
from ..utils.json import write_json_file, read_json_file
|
||||||
@@ -87,25 +87,25 @@ class Addon(CoreSysAttributes):
|
|||||||
ATTR_OPTIONS: {},
|
ATTR_OPTIONS: {},
|
||||||
ATTR_VERSION: version,
|
ATTR_VERSION: version,
|
||||||
}
|
}
|
||||||
self._data.save()
|
self._data.save_data()
|
||||||
|
|
||||||
def _set_uninstall(self):
|
def _set_uninstall(self):
|
||||||
"""Set addon as uninstalled."""
|
"""Set addon as uninstalled."""
|
||||||
self._data.system.pop(self._id, None)
|
self._data.system.pop(self._id, None)
|
||||||
self._data.user.pop(self._id, None)
|
self._data.user.pop(self._id, None)
|
||||||
self._data.save()
|
self._data.save_data()
|
||||||
|
|
||||||
def _set_update(self, version):
|
def _set_update(self, version):
|
||||||
"""Update version of addon."""
|
"""Update version of addon."""
|
||||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||||
self._data.user[self._id][ATTR_VERSION] = version
|
self._data.user[self._id][ATTR_VERSION] = version
|
||||||
self._data.save()
|
self._data.save_data()
|
||||||
|
|
||||||
def _restore_data(self, user, system):
|
def _restore_data(self, user, system):
|
||||||
"""Restore data to addon."""
|
"""Restore data to addon."""
|
||||||
self._data.user[self._id] = deepcopy(user)
|
self._data.user[self._id] = deepcopy(user)
|
||||||
self._data.system[self._id] = deepcopy(system)
|
self._data.system[self._id] = deepcopy(system)
|
||||||
self._data.save()
|
self._data.save_data()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def options(self):
|
def options(self):
|
||||||
@@ -120,8 +120,10 @@ class Addon(CoreSysAttributes):
|
|||||||
@options.setter
|
@options.setter
|
||||||
def options(self, value):
|
def options(self, value):
|
||||||
"""Store user addon options."""
|
"""Store user addon options."""
|
||||||
|
if value is None:
|
||||||
|
self._data.user[self._id][ATTR_OPTIONS] = {}
|
||||||
|
else:
|
||||||
self._data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
self._data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||||
self._data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self):
|
def boot(self):
|
||||||
@@ -134,7 +136,6 @@ class Addon(CoreSysAttributes):
|
|||||||
def boot(self, value):
|
def boot(self, value):
|
||||||
"""Store user boot options."""
|
"""Store user boot options."""
|
||||||
self._data.user[self._id][ATTR_BOOT] = value
|
self._data.user[self._id][ATTR_BOOT] = value
|
||||||
self._data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auto_update(self):
|
def auto_update(self):
|
||||||
@@ -147,7 +148,6 @@ class Addon(CoreSysAttributes):
|
|||||||
def auto_update(self, value):
|
def auto_update(self, value):
|
||||||
"""Set auto update."""
|
"""Set auto update."""
|
||||||
self._data.user[self._id][ATTR_AUTO_UPDATE] = value
|
self._data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||||
self._data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@@ -160,7 +160,7 @@ class Addon(CoreSysAttributes):
|
|||||||
return self._mesh[ATTR_TIMEOUT]
|
return self._mesh[ATTR_TIMEOUT]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def api_token(self):
|
def uuid(self):
|
||||||
"""Return a API token for this add-on."""
|
"""Return a API token for this add-on."""
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
return self._data.user[self._id][ATTR_UUID]
|
return self._data.user[self._id][ATTR_UUID]
|
||||||
@@ -201,6 +201,26 @@ class Addon(CoreSysAttributes):
|
|||||||
"""Return startup type of addon."""
|
"""Return startup type of addon."""
|
||||||
return self._mesh.get(ATTR_STARTUP)
|
return self._mesh.get(ATTR_STARTUP)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def services(self):
|
||||||
|
"""Return dict of services with rights."""
|
||||||
|
raw_services = self._mesh.get(ATTR_SERVICES)
|
||||||
|
if not raw_services:
|
||||||
|
return None
|
||||||
|
|
||||||
|
formated_services = {}
|
||||||
|
for data in raw_services:
|
||||||
|
service = RE_SERVICE.match(data)
|
||||||
|
formated_services[service.group('service')] = \
|
||||||
|
service.group('rights') or 'ro'
|
||||||
|
|
||||||
|
return formated_services
|
||||||
|
|
||||||
|
@property
|
||||||
|
def discovery(self):
|
||||||
|
"""Return list of discoverable components/platforms."""
|
||||||
|
return self._mesh.get(ATTR_DISCOVERY)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ports(self):
|
def ports(self):
|
||||||
"""Return ports of addon."""
|
"""Return ports of addon."""
|
||||||
@@ -225,8 +245,6 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
self._data.user[self._id][ATTR_NETWORK] = new_ports
|
self._data.user[self._id][ATTR_NETWORK] = new_ports
|
||||||
|
|
||||||
self._data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def webui(self):
|
def webui(self):
|
||||||
"""Return URL to webui or None."""
|
"""Return URL to webui or None."""
|
||||||
@@ -347,7 +365,6 @@ class Addon(CoreSysAttributes):
|
|||||||
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||||
else:
|
else:
|
||||||
self._data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
self._data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||||
self._data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def audio_input(self):
|
def audio_input(self):
|
||||||
@@ -367,13 +384,17 @@ class Addon(CoreSysAttributes):
|
|||||||
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||||
else:
|
else:
|
||||||
self._data.user[self._id][ATTR_AUDIO_INPUT] = value
|
self._data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||||
self._data.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
"""Return url of addon."""
|
"""Return url of addon."""
|
||||||
return self._mesh.get(ATTR_URL)
|
return self._mesh.get(ATTR_URL)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_icon(self):
|
||||||
|
"""Return True if a icon exists."""
|
||||||
|
return self.path_icon.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_logo(self):
|
def with_logo(self):
|
||||||
"""Return True if a logo exists."""
|
"""Return True if a logo exists."""
|
||||||
@@ -438,6 +459,11 @@ class Addon(CoreSysAttributes):
|
|||||||
"""Return path to this addon."""
|
"""Return path to this addon."""
|
||||||
return Path(self._mesh[ATTR_LOCATON])
|
return Path(self._mesh[ATTR_LOCATON])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_icon(self):
|
||||||
|
"""Return path to addon icon."""
|
||||||
|
return Path(self.path_location, 'icon.png')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_logo(self):
|
def path_logo(self):
|
||||||
"""Return path to addon logo."""
|
"""Return path to addon logo."""
|
||||||
@@ -448,6 +474,10 @@ class Addon(CoreSysAttributes):
|
|||||||
"""Return path to addon changelog."""
|
"""Return path to addon changelog."""
|
||||||
return Path(self.path_location, 'CHANGELOG.md')
|
return Path(self.path_location, 'CHANGELOG.md')
|
||||||
|
|
||||||
|
def save_data(self):
|
||||||
|
"""Save data of addon."""
|
||||||
|
self._addons.data.save_data()
|
||||||
|
|
||||||
def write_options(self):
|
def write_options(self):
|
||||||
"""Return True if addon options is written to data."""
|
"""Return True if addon options is written to data."""
|
||||||
schema = self.schema
|
schema = self.schema
|
||||||
@@ -455,10 +485,14 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
schema(options)
|
schema(options)
|
||||||
return write_json_file(self.path_options, options)
|
write_json_file(self.path_options, options)
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
_LOGGER.error("Addon %s have wrong options: %s", self._id,
|
_LOGGER.error("Addon %s have wrong options: %s", self._id,
|
||||||
humanize_error(options, ex))
|
humanize_error(options, ex))
|
||||||
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
|
_LOGGER.error("Addon %s can't write options: %s", self._id, err)
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -547,12 +581,12 @@ class Addon(CoreSysAttributes):
|
|||||||
return STATE_STOPPED
|
return STATE_STOPPED
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
def start(self):
|
async def start(self):
|
||||||
"""Set options and start addon.
|
"""Set options and start addon."""
|
||||||
|
if not self.write_options():
|
||||||
|
return False
|
||||||
|
|
||||||
Return a coroutine.
|
return await self.instance.run()
|
||||||
"""
|
|
||||||
return self.instance.run()
|
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
def stop(self):
|
def stop(self):
|
||||||
@@ -577,16 +611,14 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
# restore state
|
# restore state
|
||||||
if last_state == STATE_STARTED:
|
if last_state == STATE_STARTED:
|
||||||
await self.instance.run()
|
await self.start()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
def restart(self):
|
async def restart(self):
|
||||||
"""Restart addon.
|
"""Restart addon."""
|
||||||
|
await self.stop()
|
||||||
Return a coroutine.
|
return await self.start()
|
||||||
"""
|
|
||||||
return self.instance.restart()
|
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
def logs(self):
|
def logs(self):
|
||||||
@@ -622,7 +654,7 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
# restore state
|
# restore state
|
||||||
if last_state == STATE_STARTED:
|
if last_state == STATE_STARTED:
|
||||||
await self.instance.run()
|
await self.start()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
@@ -654,21 +686,22 @@ class Addon(CoreSysAttributes):
|
|||||||
}
|
}
|
||||||
|
|
||||||
# store local configs/state
|
# store local configs/state
|
||||||
if not write_json_file(Path(temp, "addon.json"), data):
|
try:
|
||||||
_LOGGER.error("Can't write addon.json for %s", self._id)
|
write_json_file(Path(temp, "addon.json"), data)
|
||||||
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
|
_LOGGER.error("Can't save meta for %s: %s", self._id, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# write into tarfile
|
# write into tarfile
|
||||||
def _create_tar():
|
def _write_tarfile():
|
||||||
"""Write tar inside loop."""
|
"""Write tar inside loop."""
|
||||||
with tarfile.open(tar_file, "w:gz",
|
with tar_file as snapshot:
|
||||||
compresslevel=1) as snapshot:
|
|
||||||
snapshot.add(temp, arcname=".")
|
snapshot.add(temp, arcname=".")
|
||||||
snapshot.add(self.path_data, arcname="data")
|
snapshot.add(self.path_data, arcname="data")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Build snapshot for addon %s", self._id)
|
_LOGGER.info("Build snapshot for addon %s", self._id)
|
||||||
await self._loop.run_in_executor(None, _create_tar)
|
await self._loop.run_in_executor(None, _write_tarfile)
|
||||||
except (tarfile.TarError, OSError) as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
@@ -680,13 +713,13 @@ class Addon(CoreSysAttributes):
|
|||||||
"""Restore a state of a addon."""
|
"""Restore a state of a addon."""
|
||||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
||||||
# extract snapshot
|
# extract snapshot
|
||||||
def _extract_tar():
|
def _extract_tarfile():
|
||||||
"""Extract tar snapshot."""
|
"""Extract tar snapshot."""
|
||||||
with tarfile.open(tar_file, "r:gz") as snapshot:
|
with tar_file as snapshot:
|
||||||
snapshot.extractall(path=Path(temp))
|
snapshot.extractall(path=Path(temp))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self._loop.run_in_executor(None, _extract_tar)
|
await self._loop.run_in_executor(None, _extract_tarfile)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
|
@@ -10,18 +10,23 @@ from ..utils.json import JsonConfig
|
|||||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||||
"""Handle build options for addons."""
|
"""Handle build options for addons."""
|
||||||
|
|
||||||
def __init__(self, coresys, addon):
|
def __init__(self, coresys, slug):
|
||||||
"""Initialize addon builder."""
|
"""Initialize addon builder."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self.addon = addon
|
self._id = slug
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
Path(addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||||
|
|
||||||
def save(self):
|
def save_data(self):
|
||||||
"""Ignore save function."""
|
"""Ignore save function."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def addon(self):
|
||||||
|
"""Return addon of build data."""
|
||||||
|
return self._addons.get(self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def base_image(self):
|
def base_image(self):
|
||||||
"""Base images for this addon."""
|
"""Base images for this addon."""
|
||||||
|
@@ -9,7 +9,7 @@ from voluptuous.humanize import humanize_error
|
|||||||
|
|
||||||
from .utils import extract_hash_from_path
|
from .utils import extract_hash_from_path
|
||||||
from .validate import (
|
from .validate import (
|
||||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG)
|
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||||
from ..const import (
|
from ..const import (
|
||||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||||
@@ -19,12 +19,12 @@ from ..utils.json import JsonConfig, read_json_file
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Data(JsonConfig, CoreSysAttributes):
|
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||||
"""Hold data for addons inside HassIO."""
|
"""Hold data for addons inside HassIO."""
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
|
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self._repositories = {}
|
self._repositories = {}
|
||||||
self._cache = {}
|
self._cache = {}
|
||||||
@@ -159,4 +159,4 @@ class Data(JsonConfig, CoreSysAttributes):
|
|||||||
have_change = True
|
have_change = True
|
||||||
|
|
||||||
if have_change:
|
if have_change:
|
||||||
self.save()
|
self.save_data()
|
||||||
|
@@ -17,13 +17,15 @@ from ..const import (
|
|||||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
||||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
||||||
ATTR_HOST_DBUS, ATTR_AUTO_UART)
|
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY)
|
||||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
||||||
|
RE_SERVICE = re.compile(r"^(?P<service>mqtt)(?::(?P<rights>rw|:ro))?$")
|
||||||
|
RE_DISCOVERY = re.compile(r"^(?P<component>\w*)(?:/(?P<platform>\w*>))?$")
|
||||||
|
|
||||||
V_STR = 'str'
|
V_STR = 'str'
|
||||||
V_INT = 'int'
|
V_INT = 'int'
|
||||||
@@ -101,7 +103,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_TMPFS):
|
vol.Optional(ATTR_TMPFS):
|
||||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(RE_VOLUME)],
|
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||||
@@ -110,6 +112,8 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||||
|
vol.Optional(ATTR_DISCOVERY): [vol.Match(RE_DISCOVERY)],
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
vol.Required(ATTR_OPTIONS): dict,
|
||||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||||
@@ -141,7 +145,7 @@ SCHEMA_BUILD_CONFIG = vol.Schema({
|
|||||||
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
||||||
}),
|
}),
|
||||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ARGS, default={}): vol.Schema({
|
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
||||||
vol.Coerce(str): vol.Coerce(str)
|
vol.Coerce(str): vol.Coerce(str)
|
||||||
}),
|
}),
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
@@ -152,7 +156,7 @@ SCHEMA_ADDON_USER = vol.Schema({
|
|||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
||||||
vol.Match(r"^[0-9a-f]{32}$"),
|
vol.Match(r"^[0-9a-f]{32}$"),
|
||||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_BOOT):
|
vol.Optional(ATTR_BOOT):
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
@@ -168,11 +172,11 @@ SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_FILE = vol.Schema({
|
SCHEMA_ADDONS_FILE = vol.Schema({
|
||||||
vol.Optional(ATTR_USER, default={}): {
|
vol.Optional(ATTR_USER, default=dict): {
|
||||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||||
},
|
},
|
||||||
vol.Optional(ATTR_SYSTEM, default={}): {
|
vol.Optional(ATTR_SYSTEM, default=dict): {
|
||||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@@ -5,13 +5,15 @@ from pathlib import Path
|
|||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from .addons import APIAddons
|
from .addons import APIAddons
|
||||||
|
from .discovery import APIDiscovery
|
||||||
from .homeassistant import APIHomeAssistant
|
from .homeassistant import APIHomeAssistant
|
||||||
from .host import APIHost
|
from .host import APIHost
|
||||||
from .network import APINetwork
|
from .network import APINetwork
|
||||||
from .proxy import APIProxy
|
from .proxy import APIProxy
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
from .security import APISecurity
|
|
||||||
from .snapshots import APISnapshots
|
from .snapshots import APISnapshots
|
||||||
|
from .services import APIServices
|
||||||
|
from .security import security_layer
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -23,12 +25,16 @@ class RestAPI(CoreSysAttributes):
|
|||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self.webapp = web.Application(loop=self._loop)
|
self.webapp = web.Application(
|
||||||
|
middlewares=[security_layer], loop=self._loop)
|
||||||
|
|
||||||
# service stuff
|
# service stuff
|
||||||
self._handler = None
|
self._handler = None
|
||||||
self.server = None
|
self.server = None
|
||||||
|
|
||||||
|
# middleware
|
||||||
|
self.webapp['coresys'] = coresys
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Register REST API Calls."""
|
"""Register REST API Calls."""
|
||||||
self._register_supervisor()
|
self._register_supervisor()
|
||||||
@@ -38,8 +44,9 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self._register_panel()
|
self._register_panel()
|
||||||
self._register_addons()
|
self._register_addons()
|
||||||
self._register_snapshots()
|
self._register_snapshots()
|
||||||
self._register_security()
|
|
||||||
self._register_network()
|
self._register_network()
|
||||||
|
self._register_discovery()
|
||||||
|
self._register_services()
|
||||||
|
|
||||||
def _register_host(self):
|
def _register_host(self):
|
||||||
"""Register hostcontrol function."""
|
"""Register hostcontrol function."""
|
||||||
@@ -102,12 +109,14 @@ class RestAPI(CoreSysAttributes):
|
|||||||
'/homeassistant/api/websocket', api_proxy.websocket)
|
'/homeassistant/api/websocket', api_proxy.websocket)
|
||||||
self.webapp.router.add_get(
|
self.webapp.router.add_get(
|
||||||
'/homeassistant/websocket', api_proxy.websocket)
|
'/homeassistant/websocket', api_proxy.websocket)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api/stream', api_proxy.stream)
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/homeassistant/api/{path:.+}', api_proxy.api)
|
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||||
self.webapp.router.add_get(
|
self.webapp.router.add_get(
|
||||||
'/homeassistant/api/{path:.+}', api_proxy.api)
|
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||||
self.webapp.router.add_get(
|
self.webapp.router.add_get(
|
||||||
'/homeassistant/api', api_proxy.api)
|
'/homeassistant/api/', api_proxy.api)
|
||||||
|
|
||||||
def _register_addons(self):
|
def _register_addons(self):
|
||||||
"""Register homeassistant function."""
|
"""Register homeassistant function."""
|
||||||
@@ -132,22 +141,13 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/addons/{addon}/rebuild', api_addons.rebuild)
|
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||||
|
self.webapp.router.add_get('/addons/{addon}/icon', api_addons.icon)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||||
self.webapp.router.add_get(
|
self.webapp.router.add_get(
|
||||||
'/addons/{addon}/changelog', api_addons.changelog)
|
'/addons/{addon}/changelog', api_addons.changelog)
|
||||||
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
||||||
self.webapp.router.add_get('/addons/{addon}/stats', api_addons.stats)
|
self.webapp.router.add_get('/addons/{addon}/stats', api_addons.stats)
|
||||||
|
|
||||||
def _register_security(self):
|
|
||||||
"""Register security function."""
|
|
||||||
api_security = APISecurity()
|
|
||||||
api_security.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.router.add_get('/security/info', api_security.info)
|
|
||||||
self.webapp.router.add_post('/security/options', api_security.options)
|
|
||||||
self.webapp.router.add_post('/security/totp', api_security.totp)
|
|
||||||
self.webapp.router.add_post('/security/session', api_security.session)
|
|
||||||
|
|
||||||
def _register_snapshots(self):
|
def _register_snapshots(self):
|
||||||
"""Register snapshots function."""
|
"""Register snapshots function."""
|
||||||
api_snapshots = APISnapshots()
|
api_snapshots = APISnapshots()
|
||||||
@@ -160,6 +160,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||||
|
self.webapp.router.add_post(
|
||||||
|
'/snapshots/new/upload', api_snapshots.upload)
|
||||||
|
|
||||||
self.webapp.router.add_get(
|
self.webapp.router.add_get(
|
||||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||||
@@ -170,21 +172,59 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/snapshots/{snapshot}/restore/partial',
|
'/snapshots/{snapshot}/restore/partial',
|
||||||
api_snapshots.restore_partial)
|
api_snapshots.restore_partial)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/snapshots/{snapshot}/download',
|
||||||
|
api_snapshots.download)
|
||||||
|
|
||||||
|
def _register_services(self):
|
||||||
|
api_services = APIServices()
|
||||||
|
api_services.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.router.add_get('/services', api_services.list)
|
||||||
|
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/services/{service}', api_services.get_service)
|
||||||
|
self.webapp.router.add_post(
|
||||||
|
'/services/{service}', api_services.set_service)
|
||||||
|
self.webapp.router.add_delete(
|
||||||
|
'/services/{service}', api_services.del_service)
|
||||||
|
|
||||||
|
def _register_discovery(self):
|
||||||
|
api_discovery = APIDiscovery()
|
||||||
|
api_discovery.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/services/discovery', api_discovery.list)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/services/discovery/{uuid}', api_discovery.get_discovery)
|
||||||
|
self.webapp.router.add_delete(
|
||||||
|
'/services/discovery/{uuid}', api_discovery.del_discovery)
|
||||||
|
self.webapp.router.add_post(
|
||||||
|
'/services/discovery', api_discovery.set_discovery)
|
||||||
|
|
||||||
def _register_panel(self):
|
def _register_panel(self):
|
||||||
"""Register panel for homeassistant."""
|
"""Register panel for homeassistant."""
|
||||||
def create_panel_response(build_type):
|
def create_panel_response(build_type):
|
||||||
"""Create a function to generate a response."""
|
"""Create a function to generate a response."""
|
||||||
path = Path(__file__).parent.joinpath(
|
path = Path(__file__).parent.joinpath(
|
||||||
'panel/hassio-main-{}.html'.format(build_type))
|
f"panel/{build_type}.html")
|
||||||
|
|
||||||
return lambda request: web.FileResponse(path)
|
return lambda request: web.FileResponse(path)
|
||||||
|
|
||||||
# This route is for backwards compatibility with HA < 0.58
|
# This route is for backwards compatibility with HA < 0.58
|
||||||
self.webapp.router.add_get('/panel', create_panel_response('es5'))
|
|
||||||
self.webapp.router.add_get('/panel_es5', create_panel_response('es5'))
|
|
||||||
self.webapp.router.add_get(
|
self.webapp.router.add_get(
|
||||||
'/panel_latest', create_panel_response('latest'))
|
'/panel', create_panel_response('hassio-main-es5'))
|
||||||
|
|
||||||
|
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/panel_es5', create_panel_response('hassio-main-es5'))
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/panel_latest', create_panel_response('hassio-main-latest'))
|
||||||
|
|
||||||
|
# This route is for HA > 0.61
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/app-es5/index.html', create_panel_response('index'))
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/app-es5/hassio-app.html', create_panel_response('hassio-app'))
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
"""Run rest api webserver."""
|
"""Run rest api webserver."""
|
||||||
|
@@ -16,7 +16,8 @@ from ..const import (
|
|||||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE,
|
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||||
|
ATTR_DISCOVERY,
|
||||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
|
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..validate import DOCKER_PORTS
|
from ..validate import DOCKER_PORTS
|
||||||
@@ -73,6 +74,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -122,6 +124,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_HOST_DBUS: addon.host_dbus,
|
ATTR_HOST_DBUS: addon.host_dbus,
|
||||||
ATTR_PRIVILEGED: addon.privileged,
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
ATTR_DEVICES: self._pretty_devices(addon),
|
ATTR_DEVICES: self._pretty_devices(addon),
|
||||||
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
ATTR_CHANGELOG: addon.with_changelog,
|
ATTR_CHANGELOG: addon.with_changelog,
|
||||||
ATTR_WEBUI: addon.webui,
|
ATTR_WEBUI: addon.webui,
|
||||||
@@ -132,6 +135,8 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_AUDIO: addon.with_audio,
|
ATTR_AUDIO: addon.with_audio,
|
||||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||||
|
ATTR_SERVICES: addon.services,
|
||||||
|
ATTR_DISCOVERY: addon.discovery,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -140,7 +145,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
addon_schema = SCHEMA_OPTIONS.extend({
|
addon_schema = SCHEMA_OPTIONS.extend({
|
||||||
vol.Optional(ATTR_OPTIONS): addon.schema,
|
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||||
})
|
})
|
||||||
|
|
||||||
body = await api_validate(addon_schema, request)
|
body = await api_validate(addon_schema, request)
|
||||||
@@ -158,6 +163,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
if ATTR_AUDIO_OUTPUT in body:
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
|
||||||
|
addon.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -242,12 +248,22 @@ class APIAddons(CoreSysAttributes):
|
|||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return addon.logs()
|
return addon.logs()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
|
async def icon(self, request):
|
||||||
|
"""Return icon from addon."""
|
||||||
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_icon:
|
||||||
|
raise RuntimeError("No icon found!")
|
||||||
|
|
||||||
|
with addon.path_icon.open('rb') as png:
|
||||||
|
return png.read()
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_PNG)
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
async def logo(self, request):
|
async def logo(self, request):
|
||||||
"""Return logo from addon."""
|
"""Return logo from addon."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
if not addon.with_logo:
|
if not addon.with_logo:
|
||||||
raise RuntimeError("No image found!")
|
raise RuntimeError("No logo found!")
|
||||||
|
|
||||||
with addon.path_logo.open('rb') as png:
|
with addon.path_logo.open('rb') as png:
|
||||||
return png.read()
|
return png.read()
|
||||||
@@ -267,7 +283,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
"""Write to stdin of addon."""
|
"""Write to stdin of addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
if not addon.with_stdin:
|
if not addon.with_stdin:
|
||||||
raise RuntimeError("STDIN not supported by addons")
|
raise RuntimeError("STDIN not supported by addon")
|
||||||
|
|
||||||
data = await request.read()
|
data = await request.read()
|
||||||
return await asyncio.shield(addon.write_stdin(data), loop=self._loop)
|
return await asyncio.shield(addon.write_stdin(data), loop=self._loop)
|
||||||
|
72
hassio/api/discovery.py
Normal file
72
hassio/api/discovery.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"""Init file for HassIO network rest api."""
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from .utils import api_process, api_validate
|
||||||
|
from ..const import (
|
||||||
|
ATTR_PROVIDER, ATTR_UUID, ATTR_COMPONENT, ATTR_PLATFORM, ATTR_CONFIG,
|
||||||
|
ATTR_DISCOVERY, REQUEST_FROM)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_DISCOVERY = vol.Schema({
|
||||||
|
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_PLATFORM): vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_CONFIG): vol.Any(None, dict),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class APIDiscovery(CoreSysAttributes):
|
||||||
|
"""Handle rest api for discovery functions."""
|
||||||
|
|
||||||
|
def _extract_message(self, request):
|
||||||
|
"""Extract discovery message from URL."""
|
||||||
|
message = self._services.discovery.get(request.match_info.get('uuid'))
|
||||||
|
if not message:
|
||||||
|
raise RuntimeError("Discovery message not found")
|
||||||
|
return message
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def list(self, request):
|
||||||
|
"""Show register services."""
|
||||||
|
discovery = []
|
||||||
|
for message in self._services.discovery.list_messages:
|
||||||
|
discovery.append({
|
||||||
|
ATTR_PROVIDER: message.provider,
|
||||||
|
ATTR_UUID: message.uuid,
|
||||||
|
ATTR_COMPONENT: message.component,
|
||||||
|
ATTR_PLATFORM: message.platform,
|
||||||
|
ATTR_CONFIG: message.config,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {ATTR_DISCOVERY: discovery}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def set_discovery(self, request):
|
||||||
|
"""Write data into a discovery pipeline."""
|
||||||
|
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||||
|
message = self._services.discovery.send(
|
||||||
|
provider=request[REQUEST_FROM], **body)
|
||||||
|
|
||||||
|
return {ATTR_UUID: message.uuid}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def get_discovery(self, request):
|
||||||
|
"""Read data into a discovery message."""
|
||||||
|
message = self._extract_message(request)
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_PROVIDER: message.provider,
|
||||||
|
ATTR_UUID: message.uuid,
|
||||||
|
ATTR_COMPONENT: message.component,
|
||||||
|
ATTR_PLATFORM: message.platform,
|
||||||
|
ATTR_CONFIG: message.config,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def del_discovery(self, request):
|
||||||
|
"""Delete data into a discovery message."""
|
||||||
|
message = self._extract_message(request)
|
||||||
|
|
||||||
|
self._services.discovery.remove(message)
|
||||||
|
return True
|
@@ -9,9 +9,9 @@ from ..const import (
|
|||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
|
||||||
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
|
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
|
||||||
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
|
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
|
||||||
ATTR_BLK_READ, ATTR_BLK_WRITE, CONTENT_TYPE_BINARY)
|
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, CONTENT_TYPE_BINARY)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..validate import NETWORK_PORT
|
from ..validate import NETWORK_PORT, DOCKER_IMAGE
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -22,11 +22,13 @@ SCHEMA_OPTIONS = vol.Schema({
|
|||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
||||||
vol.Any(None, vol.Coerce(str)),
|
vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||||
vol.Any(None, vol.Coerce(str)),
|
vol.Any(None, DOCKER_IMAGE),
|
||||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WAIT_BOOT):
|
||||||
|
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||||
})
|
})
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({
|
SCHEMA_VERSION = vol.Schema({
|
||||||
@@ -49,6 +51,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
ATTR_PORT: self._homeassistant.api_port,
|
ATTR_PORT: self._homeassistant.api_port,
|
||||||
ATTR_SSL: self._homeassistant.api_ssl,
|
ATTR_SSL: self._homeassistant.api_ssl,
|
||||||
ATTR_WATCHDOG: self._homeassistant.watchdog,
|
ATTR_WATCHDOG: self._homeassistant.watchdog,
|
||||||
|
ATTR_WAIT_BOOT: self._homeassistant.wait_boot,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -75,7 +78,10 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
if ATTR_WATCHDOG in body:
|
if ATTR_WATCHDOG in body:
|
||||||
self._homeassistant.watchdog = body[ATTR_WATCHDOG]
|
self._homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||||
|
|
||||||
self._homeassistant.save()
|
if ATTR_WAIT_BOOT in body:
|
||||||
|
self._homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
|
self._homeassistant.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -115,7 +121,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
def start(self, request):
|
def start(self, request):
|
||||||
"""Start homeassistant."""
|
"""Start homeassistant."""
|
||||||
return asyncio.shield(self._homeassistant.run(), loop=self._loop)
|
return asyncio.shield(self._homeassistant.start(), loop=self._loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request):
|
def restart(self, request):
|
||||||
@@ -130,8 +136,8 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def check(self, request):
|
async def check(self, request):
|
||||||
"""Check config of homeassistant."""
|
"""Check config of homeassistant."""
|
||||||
code, message = await self._homeassistant.check_config()
|
result = await self._homeassistant.check_config()
|
||||||
if not code:
|
if not result.valid:
|
||||||
raise RuntimeError(message)
|
raise RuntimeError(result.log)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@@ -49,6 +49,7 @@ class APIHost(CoreSysAttributes):
|
|||||||
if ATTR_AUDIO_INPUT in body:
|
if ATTR_AUDIO_INPUT in body:
|
||||||
self._config.audio_input = body[ATTR_AUDIO_INPUT]
|
self._config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||||
|
|
||||||
|
self._config.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process_hostcontrol
|
@api_process_hostcontrol
|
||||||
|
82
hassio/api/panel/hassio-app.html
Normal file
82
hassio/api/panel/hassio-app.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
Binary file not shown.
37
hassio/api/panel/index.html
Normal file
37
hassio/api/panel/index.html
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>Hass.io</title>
|
||||||
|
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
height: 100vh;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<hassio-app></hassio-app>
|
||||||
|
<script>
|
||||||
|
function addScript(src) {
|
||||||
|
var e = document.createElement('script');
|
||||||
|
e.src = src;
|
||||||
|
document.head.appendChild(e);
|
||||||
|
}
|
||||||
|
if (!window.parent.HASS_DEV) {
|
||||||
|
addScript('/frontend_es5/custom-elements-es5-adapter.js');
|
||||||
|
}
|
||||||
|
var webComponentsSupported = (
|
||||||
|
'customElements' in window &&
|
||||||
|
'import' in document.createElement('link') &&
|
||||||
|
'content' in document.createElement('template'));
|
||||||
|
if (!webComponentsSupported) {
|
||||||
|
addScript('/static/webcomponents-lite.js');
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
<link rel='import' href='./hassio-app.html'>
|
||||||
|
<link rel='import' href='/static/mdi.html' async>
|
||||||
|
</body>
|
||||||
|
</html>
|
BIN
hassio/api/panel/index.html.gz
Normal file
BIN
hassio/api/panel/index.html.gz
Normal file
Binary file not shown.
@@ -4,7 +4,7 @@ import logging
|
|||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp.web_exceptions import HTTPBadGateway
|
from aiohttp.web_exceptions import HTTPBadGateway, HTTPInternalServerError
|
||||||
from aiohttp.hdrs import CONTENT_TYPE
|
from aiohttp.hdrs import CONTENT_TYPE
|
||||||
import async_timeout
|
import async_timeout
|
||||||
|
|
||||||
@@ -17,6 +17,16 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
class APIProxy(CoreSysAttributes):
|
class APIProxy(CoreSysAttributes):
|
||||||
"""API Proxy for Home-Assistant."""
|
"""API Proxy for Home-Assistant."""
|
||||||
|
|
||||||
|
def _check_access(self, request):
|
||||||
|
"""Check the Hass.io token."""
|
||||||
|
hassio_token = request.headers.get(HEADER_HA_ACCESS)
|
||||||
|
addon = self._addons.from_uuid(hassio_token)
|
||||||
|
|
||||||
|
if not addon:
|
||||||
|
_LOGGER.warning("Unknown Home-Assistant API access!")
|
||||||
|
else:
|
||||||
|
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||||
|
|
||||||
async def _api_client(self, request, path, timeout=300):
|
async def _api_client(self, request, path, timeout=300):
|
||||||
"""Return a client request with proxy origin for Home-Assistant."""
|
"""Return a client request with proxy origin for Home-Assistant."""
|
||||||
url = f"{self._homeassistant.api_url}/api/{path}"
|
url = f"{self._homeassistant.api_url}/api/{path}"
|
||||||
@@ -25,6 +35,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
data = None
|
data = None
|
||||||
headers = {}
|
headers = {}
|
||||||
method = getattr(self._websession_ssl, request.method.lower())
|
method = getattr(self._websession_ssl, request.method.lower())
|
||||||
|
params = request.query or None
|
||||||
|
|
||||||
# read data
|
# read data
|
||||||
with async_timeout.timeout(30, loop=self._loop):
|
with async_timeout.timeout(30, loop=self._loop):
|
||||||
@@ -42,7 +53,8 @@ class APIProxy(CoreSysAttributes):
|
|||||||
headers = None
|
headers = None
|
||||||
|
|
||||||
client = await method(
|
client = await method(
|
||||||
url, data=data, headers=headers, timeout=timeout
|
url, data=data, headers=headers, timeout=timeout,
|
||||||
|
params=params
|
||||||
)
|
)
|
||||||
|
|
||||||
return client
|
return client
|
||||||
@@ -55,14 +67,12 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
raise HTTPBadGateway()
|
raise HTTPBadGateway()
|
||||||
|
|
||||||
async def api(self, request):
|
async def stream(self, request):
|
||||||
"""Proxy HomeAssistant API Requests."""
|
"""Proxy HomeAssistant EventStream Requests."""
|
||||||
path = request.match_info.get('path', '')
|
self._check_access(request)
|
||||||
|
|
||||||
# API stream
|
_LOGGER.info("Home-Assistant EventStream start")
|
||||||
if path.startswith("stream"):
|
client = await self._api_client(request, 'stream', timeout=None)
|
||||||
_LOGGER.info("Home-Assistant Event-Stream start")
|
|
||||||
client = await self._api_client(request, path, timeout=None)
|
|
||||||
|
|
||||||
response = web.StreamResponse()
|
response = web.StreamResponse()
|
||||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||||
@@ -83,12 +93,16 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
client.close()
|
client.close()
|
||||||
|
_LOGGER.info("Home-Assistant EventStream close")
|
||||||
|
|
||||||
_LOGGER.info("Home-Assistant Event-Stream close")
|
return response
|
||||||
|
|
||||||
|
async def api(self, request):
|
||||||
|
"""Proxy HomeAssistant API Requests."""
|
||||||
|
self._check_access(request)
|
||||||
|
|
||||||
# Normal request
|
# Normal request
|
||||||
else:
|
path = request.match_info.get('path', '')
|
||||||
_LOGGER.info("Home-Assistant '/api/%s' request", path)
|
|
||||||
client = await self._api_client(request, path)
|
client = await self._api_client(request, path)
|
||||||
|
|
||||||
data = await client.read()
|
data = await client.read()
|
||||||
@@ -100,7 +114,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
async def _websocket_client(self):
|
async def _websocket_client(self):
|
||||||
"""Initialize a websocket api connection."""
|
"""Initialize a websocket api connection."""
|
||||||
url = f"{self.homeassistant.api_url}/api/websocket"
|
url = f"{self._homeassistant.api_url}/api/websocket"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client = await self._websession_ssl.ws_connect(
|
client = await self._websession_ssl.ws_connect(
|
||||||
@@ -133,9 +147,29 @@ class APIProxy(CoreSysAttributes):
|
|||||||
await server.prepare(request)
|
await server.prepare(request)
|
||||||
|
|
||||||
# handle authentication
|
# handle authentication
|
||||||
await server.send_json({'type': 'auth_required'})
|
try:
|
||||||
await server.receive_json() # get internal token
|
await server.send_json({
|
||||||
await server.send_json({'type': 'auth_ok'})
|
'type': 'auth_required',
|
||||||
|
'ha_version': self._homeassistant.version,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Check API access
|
||||||
|
response = await server.receive_json()
|
||||||
|
hassio_token = response.get('api_password')
|
||||||
|
addon = self._addons.from_uuid(hassio_token)
|
||||||
|
|
||||||
|
if not addon:
|
||||||
|
_LOGGER.warning("Unauthorized websocket access!")
|
||||||
|
else:
|
||||||
|
_LOGGER.info("Websocket access from %s", addon.slug)
|
||||||
|
|
||||||
|
await server.send_json({
|
||||||
|
'type': 'auth_ok',
|
||||||
|
'ha_version': self._homeassistant.version,
|
||||||
|
})
|
||||||
|
except (RuntimeError, ValueError) as err:
|
||||||
|
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||||
|
raise HTTPInternalServerError() from None
|
||||||
|
|
||||||
# init connection to hass
|
# init connection to hass
|
||||||
client = await self._websocket_client()
|
client = await self._websocket_client()
|
||||||
|
@@ -1,98 +1,50 @@
|
|||||||
"""Init file for HassIO security rest api."""
|
"""Handle security part of this API."""
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import io
|
|
||||||
import logging
|
import logging
|
||||||
import hashlib
|
import re
|
||||||
import os
|
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp.web import middleware
|
||||||
import voluptuous as vol
|
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||||
import pyotp
|
|
||||||
import pyqrcode
|
|
||||||
|
|
||||||
from .utils import api_process, api_validate, hash_password
|
from ..const import HEADER_TOKEN, REQUEST_FROM
|
||||||
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCHEMA_PASSWORD = vol.Schema({
|
NO_SECURITY_CHECK = set((
|
||||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
re.compile(r"^/homeassistant/api/.*$"),
|
||||||
})
|
re.compile(r"^/homeassistant/websocket$"),
|
||||||
|
re.compile(r"^/supervisor/ping$"),
|
||||||
SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
|
))
|
||||||
vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
class APISecurity(CoreSysAttributes):
|
@middleware
|
||||||
"""Handle rest api for security functions."""
|
async def security_layer(request, handler):
|
||||||
|
"""Check security access of this layer."""
|
||||||
|
coresys = request.app['coresys']
|
||||||
|
hassio_token = request.headers.get(HEADER_TOKEN)
|
||||||
|
|
||||||
def _check_password(self, body):
|
# Ignore security check
|
||||||
"""Check if password is valid and security is initialize."""
|
for rule in NO_SECURITY_CHECK:
|
||||||
if not self._config.security_initialize:
|
if rule.match(request.path):
|
||||||
raise RuntimeError("First set a password")
|
_LOGGER.debug("Passthrough %s", request.path)
|
||||||
|
return await handler(request)
|
||||||
|
|
||||||
password = hash_password(body[ATTR_PASSWORD])
|
# Need to be removed later
|
||||||
if password != self._config.security_password:
|
if not hassio_token:
|
||||||
raise RuntimeError("Wrong password")
|
_LOGGER.warning("Invalid token for access %s", request.path)
|
||||||
|
request[REQUEST_FROM] = 'UNKNOWN'
|
||||||
|
return await handler(request)
|
||||||
|
|
||||||
@api_process
|
# Home-Assistant
|
||||||
async def info(self, request):
|
if hassio_token == coresys.homeassistant.uuid:
|
||||||
"""Return host information."""
|
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
||||||
return {
|
request[REQUEST_FROM] = 'homeassistant'
|
||||||
ATTR_INITIALIZE: self._config.security_initialize,
|
return await handler(request)
|
||||||
ATTR_TOTP: self._config.security_totp is not None,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
# Add-on
|
||||||
async def options(self, request):
|
addon = coresys.addons.from_uuid(hassio_token)
|
||||||
"""Set options / password."""
|
if addon:
|
||||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||||
|
request[REQUEST_FROM] = addon.slug
|
||||||
|
return await handler(request)
|
||||||
|
|
||||||
if self._config.security_initialize:
|
raise HTTPUnauthorized()
|
||||||
raise RuntimeError("Password is already set!")
|
|
||||||
|
|
||||||
self._config.security_password = hash_password(body[ATTR_PASSWORD])
|
|
||||||
self._config.security_initialize = True
|
|
||||||
return True
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def totp(self, request):
|
|
||||||
"""Set and initialze TOTP."""
|
|
||||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
|
||||||
self._check_password(body)
|
|
||||||
|
|
||||||
# generate TOTP
|
|
||||||
totp_init_key = pyotp.random_base32()
|
|
||||||
totp = pyotp.TOTP(totp_init_key)
|
|
||||||
|
|
||||||
# init qrcode
|
|
||||||
buff = io.BytesIO()
|
|
||||||
|
|
||||||
qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO"))
|
|
||||||
qrcode.svg(buff)
|
|
||||||
|
|
||||||
# finish
|
|
||||||
self._config.security_totp = totp_init_key
|
|
||||||
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def session(self, request):
|
|
||||||
"""Set and initialze session."""
|
|
||||||
body = await api_validate(SCHEMA_SESSION, request)
|
|
||||||
self._check_password(body)
|
|
||||||
|
|
||||||
# check TOTP
|
|
||||||
if self._config.security_totp:
|
|
||||||
totp = pyotp.TOTP(self._config.security_totp)
|
|
||||||
if body[ATTR_TOTP] != totp.now():
|
|
||||||
raise RuntimeError("Invalid TOTP token!")
|
|
||||||
|
|
||||||
# create session
|
|
||||||
valid_until = datetime.now() + timedelta(days=1)
|
|
||||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
|
||||||
|
|
||||||
# store session
|
|
||||||
self._config.add_security_session(session, valid_until)
|
|
||||||
return {ATTR_SESSION: session}
|
|
||||||
|
55
hassio/api/services.py
Normal file
55
hassio/api/services.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
"""Init file for HassIO network rest api."""
|
||||||
|
|
||||||
|
from .utils import api_process, api_validate
|
||||||
|
from ..const import (
|
||||||
|
ATTR_AVAILABLE, ATTR_PROVIDER, ATTR_SLUG, ATTR_SERVICES, REQUEST_FROM)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
|
||||||
|
class APIServices(CoreSysAttributes):
|
||||||
|
"""Handle rest api for services functions."""
|
||||||
|
|
||||||
|
def _extract_service(self, request):
|
||||||
|
"""Return service and if not exists trow a exception."""
|
||||||
|
service = self._services.get(request.match_info.get('service'))
|
||||||
|
if not service:
|
||||||
|
raise RuntimeError("Service not exists")
|
||||||
|
|
||||||
|
return service
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def list(self, request):
|
||||||
|
"""Show register services."""
|
||||||
|
services = []
|
||||||
|
for service in self._services.list_services:
|
||||||
|
services.append({
|
||||||
|
ATTR_SLUG: service.slug,
|
||||||
|
ATTR_AVAILABLE: service.enabled,
|
||||||
|
ATTR_PROVIDER: service.provider,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {ATTR_SERVICES: services}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def set_service(self, request):
|
||||||
|
"""Write data into a service."""
|
||||||
|
service = self._extract_service(request)
|
||||||
|
body = await api_validate(service.schema, request)
|
||||||
|
|
||||||
|
return service.set_service_data(request[REQUEST_FROM], body)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def get_service(self, request):
|
||||||
|
"""Read data into a service."""
|
||||||
|
service = self._extract_service(request)
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_AVAILABLE: service.enabled,
|
||||||
|
service.slug: service.get_service_data(),
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def del_service(self, request):
|
||||||
|
"""Delete data into a service."""
|
||||||
|
service = self._extract_service(request)
|
||||||
|
return service.del_service_data(request[REQUEST_FROM])
|
@@ -1,7 +1,10 @@
|
|||||||
"""Init file for HassIO snapshot rest api."""
|
"""Init file for HassIO snapshot rest api."""
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
@@ -9,7 +12,7 @@ from ..snapshots.validate import ALL_FOLDERS
|
|||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||||
ATTR_SNAPSHOTS)
|
ATTR_SNAPSHOTS, ATTR_PASSWORD, ATTR_PROTECTED, CONTENT_TYPE_TAR)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -17,18 +20,28 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
vol.Optional(ATTR_ADDONS):
|
||||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||||
|
vol.Optional(ATTR_FOLDERS):
|
||||||
|
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||||
|
})
|
||||||
|
|
||||||
|
SCHEMA_RESTORE_FULL = vol.Schema({
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
})
|
})
|
||||||
|
|
||||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
})
|
})
|
||||||
|
|
||||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
vol.Optional(ATTR_ADDONS):
|
||||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||||
|
vol.Optional(ATTR_FOLDERS):
|
||||||
|
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
@@ -51,6 +64,8 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
ATTR_SLUG: snapshot.slug,
|
ATTR_SLUG: snapshot.slug,
|
||||||
ATTR_NAME: snapshot.name,
|
ATTR_NAME: snapshot.name,
|
||||||
ATTR_DATE: snapshot.date,
|
ATTR_DATE: snapshot.date,
|
||||||
|
ATTR_TYPE: snapshot.sys_type,
|
||||||
|
ATTR_PROTECTED: snapshot.protected,
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -74,6 +89,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||||
ATTR_NAME: addon_data[ATTR_NAME],
|
ATTR_NAME: addon_data[ATTR_NAME],
|
||||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||||
|
ATTR_SIZE: addon_data[ATTR_SIZE],
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -82,6 +98,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
ATTR_NAME: snapshot.name,
|
ATTR_NAME: snapshot.name,
|
||||||
ATTR_DATE: snapshot.date,
|
ATTR_DATE: snapshot.date,
|
||||||
ATTR_SIZE: snapshot.size,
|
ATTR_SIZE: snapshot.size,
|
||||||
|
ATTR_PROTECTED: snapshot.protected,
|
||||||
ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
|
ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
|
||||||
ATTR_ADDONS: data_addons,
|
ATTR_ADDONS: data_addons,
|
||||||
ATTR_REPOSITORIES: snapshot.repositories,
|
ATTR_REPOSITORIES: snapshot.repositories,
|
||||||
@@ -92,28 +109,40 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
async def snapshot_full(self, request):
|
async def snapshot_full(self, request):
|
||||||
"""Full-Snapshot a snapshot."""
|
"""Full-Snapshot a snapshot."""
|
||||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||||
return await asyncio.shield(
|
snapshot = await asyncio.shield(
|
||||||
self._snapshots.do_snapshot_full(**body), loop=self._loop)
|
self._snapshots.do_snapshot_full(**body), loop=self._loop)
|
||||||
|
|
||||||
|
if snapshot:
|
||||||
|
return {ATTR_SLUG: snapshot.slug}
|
||||||
|
return False
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def snapshot_partial(self, request):
|
async def snapshot_partial(self, request):
|
||||||
"""Partial-Snapshot a snapshot."""
|
"""Partial-Snapshot a snapshot."""
|
||||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||||
return await asyncio.shield(
|
snapshot = await asyncio.shield(
|
||||||
self._snapshots.do_snapshot_partial(**body), loop=self._loop)
|
self._snapshots.do_snapshot_partial(**body), loop=self._loop)
|
||||||
|
|
||||||
|
if snapshot:
|
||||||
|
return {ATTR_SLUG: snapshot.slug}
|
||||||
|
return False
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restore_full(self, request):
|
async def restore_full(self, request):
|
||||||
"""Full-Restore a snapshot."""
|
"""Full-Restore a snapshot."""
|
||||||
snapshot = self._extract_snapshot(request)
|
snapshot = self._extract_snapshot(request)
|
||||||
return asyncio.shield(
|
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||||
self._snapshots.do_restore_full(snapshot), loop=self._loop)
|
|
||||||
|
return await asyncio.shield(
|
||||||
|
self._snapshots.do_restore_full(snapshot, **body),
|
||||||
|
loop=self._loop
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_partial(self, request):
|
async def restore_partial(self, request):
|
||||||
"""Partial-Restore a snapshot."""
|
"""Partial-Restore a snapshot."""
|
||||||
snapshot = self._extract_snapshot(request)
|
snapshot = self._extract_snapshot(request)
|
||||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self._snapshots.do_restore_partial(snapshot, **body),
|
self._snapshots.do_restore_partial(snapshot, **body),
|
||||||
@@ -125,3 +154,37 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
"""Remove a snapshot."""
|
"""Remove a snapshot."""
|
||||||
snapshot = self._extract_snapshot(request)
|
snapshot = self._extract_snapshot(request)
|
||||||
return self._snapshots.remove(snapshot)
|
return self._snapshots.remove(snapshot)
|
||||||
|
|
||||||
|
async def download(self, request):
|
||||||
|
"""Download a snapshot file."""
|
||||||
|
snapshot = self._extract_snapshot(request)
|
||||||
|
|
||||||
|
_LOGGER.info("Download snapshot %s", snapshot.slug)
|
||||||
|
response = web.FileResponse(snapshot.tarfile)
|
||||||
|
response.content_type = CONTENT_TYPE_TAR
|
||||||
|
return response
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def upload(self, request):
|
||||||
|
"""Upload a snapshot file."""
|
||||||
|
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp_dir:
|
||||||
|
tar_file = Path(temp_dir, f"snapshot.tar")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with tar_file.open('wb') as snapshot:
|
||||||
|
async for data in request.content.iter_any():
|
||||||
|
snapshot.write(data)
|
||||||
|
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't write new snapshot file: %s", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
snapshot = await asyncio.shield(
|
||||||
|
self._snapshots.import_snapshot(tar_file), loop=self._loop)
|
||||||
|
|
||||||
|
if snapshot:
|
||||||
|
return {ATTR_SLUG: snapshot.slug}
|
||||||
|
return False
|
||||||
|
@@ -11,16 +11,16 @@ from ..const import (
|
|||||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY)
|
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..validate import validate_timezone, WAIT_BOOT
|
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema({
|
SCHEMA_OPTIONS = vol.Schema({
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
|
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||||
})
|
})
|
||||||
@@ -52,6 +52,7 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
ATTR_VERSION: addon.last_version,
|
ATTR_VERSION: addon.last_version,
|
||||||
ATTR_INSTALLED: addon.version_installed,
|
ATTR_INSTALLED: addon.version_installed,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -84,8 +85,8 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||||
await asyncio.shield(self._addons.load_repositories(new))
|
await asyncio.shield(self._addons.load_repositories(new))
|
||||||
|
|
||||||
self._updater.save()
|
self._updater.save_data()
|
||||||
self._config.save()
|
self._config.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
|
@@ -13,9 +13,10 @@ from .const import SOCKET_DOCKER
|
|||||||
from .coresys import CoreSys
|
from .coresys import CoreSys
|
||||||
from .supervisor import Supervisor
|
from .supervisor import Supervisor
|
||||||
from .homeassistant import HomeAssistant
|
from .homeassistant import HomeAssistant
|
||||||
from .snapshots import SnapshotsManager
|
from .snapshots import SnapshotManager
|
||||||
from .tasks import Tasks
|
from .tasks import Tasks
|
||||||
from .updater import Updater
|
from .updater import Updater
|
||||||
|
from .services import ServiceManager
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -30,8 +31,9 @@ def initialize_coresys(loop):
|
|||||||
coresys.supervisor = Supervisor(coresys)
|
coresys.supervisor = Supervisor(coresys)
|
||||||
coresys.homeassistant = HomeAssistant(coresys)
|
coresys.homeassistant = HomeAssistant(coresys)
|
||||||
coresys.addons = AddonManager(coresys)
|
coresys.addons = AddonManager(coresys)
|
||||||
coresys.snapshots = SnapshotsManager(coresys)
|
coresys.snapshots = SnapshotManager(coresys)
|
||||||
coresys.tasks = Tasks(coresys)
|
coresys.tasks = Tasks(coresys)
|
||||||
|
coresys.services = ServiceManager(coresys)
|
||||||
|
|
||||||
# bootstrap config
|
# bootstrap config
|
||||||
initialize_system_data(coresys)
|
initialize_system_data(coresys)
|
||||||
|
@@ -5,8 +5,7 @@ import os
|
|||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||||
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
|
||||||
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||||
from .utils.dt import parse_datetime
|
from .utils.dt import parse_datetime
|
||||||
from .utils.json import JsonConfig
|
from .utils.json import JsonConfig
|
||||||
@@ -46,7 +45,6 @@ class CoreConfig(JsonConfig):
|
|||||||
def timezone(self, value):
|
def timezone(self, value):
|
||||||
"""Set system timezone."""
|
"""Set system timezone."""
|
||||||
self._data[ATTR_TIMEZONE] = value
|
self._data[ATTR_TIMEZONE] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def wait_boot(self):
|
def wait_boot(self):
|
||||||
@@ -57,7 +55,6 @@ class CoreConfig(JsonConfig):
|
|||||||
def wait_boot(self, value):
|
def wait_boot(self, value):
|
||||||
"""Set wait boot time."""
|
"""Set wait boot time."""
|
||||||
self._data[ATTR_WAIT_BOOT] = value
|
self._data[ATTR_WAIT_BOOT] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_boot(self):
|
def last_boot(self):
|
||||||
@@ -73,7 +70,6 @@ class CoreConfig(JsonConfig):
|
|||||||
def last_boot(self, value):
|
def last_boot(self, value):
|
||||||
"""Set last boot datetime."""
|
"""Set last boot datetime."""
|
||||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_hassio(self):
|
def path_hassio(self):
|
||||||
@@ -171,7 +167,6 @@ class CoreConfig(JsonConfig):
|
|||||||
return
|
return
|
||||||
|
|
||||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||||
self.save()
|
|
||||||
|
|
||||||
def drop_addon_repository(self, repo):
|
def drop_addon_repository(self, repo):
|
||||||
"""Remove a custom repository from list."""
|
"""Remove a custom repository from list."""
|
||||||
@@ -179,60 +174,6 @@ class CoreConfig(JsonConfig):
|
|||||||
return
|
return
|
||||||
|
|
||||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_initialize(self):
|
|
||||||
"""Return is security was initialize."""
|
|
||||||
return self._data[ATTR_SECURITY]
|
|
||||||
|
|
||||||
@security_initialize.setter
|
|
||||||
def security_initialize(self, value):
|
|
||||||
"""Set is security initialize."""
|
|
||||||
self._data[ATTR_SECURITY] = value
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_totp(self):
|
|
||||||
"""Return the TOTP key."""
|
|
||||||
return self._data.get(ATTR_TOTP)
|
|
||||||
|
|
||||||
@security_totp.setter
|
|
||||||
def security_totp(self, value):
|
|
||||||
"""Set the TOTP key."""
|
|
||||||
self._data[ATTR_TOTP] = value
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_password(self):
|
|
||||||
"""Return the password key."""
|
|
||||||
return self._data.get(ATTR_PASSWORD)
|
|
||||||
|
|
||||||
@security_password.setter
|
|
||||||
def security_password(self, value):
|
|
||||||
"""Set the password key."""
|
|
||||||
self._data[ATTR_PASSWORD] = value
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_sessions(self):
|
|
||||||
"""Return api sessions."""
|
|
||||||
return {
|
|
||||||
session: parse_datetime(until) for
|
|
||||||
session, until in self._data[ATTR_SESSIONS].items()
|
|
||||||
}
|
|
||||||
|
|
||||||
def add_security_session(self, session, valid):
|
|
||||||
"""Set the a new session."""
|
|
||||||
self._data[ATTR_SESSIONS].update(
|
|
||||||
{session: valid.isoformat()}
|
|
||||||
)
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
def drop_security_session(self, session):
|
|
||||||
"""Delete the a session."""
|
|
||||||
self._data[ATTR_SESSIONS].pop(session, None)
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def audio_output(self):
|
def audio_output(self):
|
||||||
@@ -243,7 +184,6 @@ class CoreConfig(JsonConfig):
|
|||||||
def audio_output(self, value):
|
def audio_output(self, value):
|
||||||
"""Set ALSA audio output card,dev."""
|
"""Set ALSA audio output card,dev."""
|
||||||
self._data[ATTR_AUDIO_OUTPUT] = value
|
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def audio_input(self):
|
def audio_input(self):
|
||||||
@@ -254,4 +194,3 @@ class CoreConfig(JsonConfig):
|
|||||||
def audio_input(self, value):
|
def audio_input(self, value):
|
||||||
"""Set ALSA audio input card,dev."""
|
"""Set ALSA audio input card,dev."""
|
||||||
self._data[ATTR_AUDIO_INPUT] = value
|
self._data[ATTR_AUDIO_INPUT] = value
|
||||||
self.save()
|
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
|
|
||||||
HASSIO_VERSION = '0.80'
|
HASSIO_VERSION = '0.94'
|
||||||
|
|
||||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||||
'hassio/{}/version.json')
|
'hassio/{}/version.json')
|
||||||
@@ -15,6 +15,7 @@ FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
|||||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||||
|
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||||
|
|
||||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||||
@@ -42,7 +43,14 @@ CONTENT_TYPE_BINARY = 'application/octet-stream'
|
|||||||
CONTENT_TYPE_PNG = 'image/png'
|
CONTENT_TYPE_PNG = 'image/png'
|
||||||
CONTENT_TYPE_JSON = 'application/json'
|
CONTENT_TYPE_JSON = 'application/json'
|
||||||
CONTENT_TYPE_TEXT = 'text/plain'
|
CONTENT_TYPE_TEXT = 'text/plain'
|
||||||
|
CONTENT_TYPE_TAR = 'application/tar'
|
||||||
HEADER_HA_ACCESS = 'x-ha-access'
|
HEADER_HA_ACCESS = 'x-ha-access'
|
||||||
|
HEADER_TOKEN = 'X-HASSIO-KEY'
|
||||||
|
|
||||||
|
ENV_TOKEN = 'HASSIO_TOKEN'
|
||||||
|
ENV_TIME = 'TZ'
|
||||||
|
|
||||||
|
REQUEST_FROM = 'HASSIO_FROM'
|
||||||
|
|
||||||
ATTR_WAIT_BOOT = 'wait_boot'
|
ATTR_WAIT_BOOT = 'wait_boot'
|
||||||
ATTR_WATCHDOG = 'watchdog'
|
ATTR_WATCHDOG = 'watchdog'
|
||||||
@@ -79,6 +87,7 @@ ATTR_DETACHED = 'detached'
|
|||||||
ATTR_STATE = 'state'
|
ATTR_STATE = 'state'
|
||||||
ATTR_SCHEMA = 'schema'
|
ATTR_SCHEMA = 'schema'
|
||||||
ATTR_IMAGE = 'image'
|
ATTR_IMAGE = 'image'
|
||||||
|
ATTR_ICON = 'icon'
|
||||||
ATTR_LOGO = 'logo'
|
ATTR_LOGO = 'logo'
|
||||||
ATTR_STDIN = 'stdin'
|
ATTR_STDIN = 'stdin'
|
||||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||||
@@ -135,6 +144,22 @@ ATTR_MEMORY_LIMIT = 'memory_limit'
|
|||||||
ATTR_MEMORY_USAGE = 'memory_usage'
|
ATTR_MEMORY_USAGE = 'memory_usage'
|
||||||
ATTR_BLK_READ = 'blk_read'
|
ATTR_BLK_READ = 'blk_read'
|
||||||
ATTR_BLK_WRITE = 'blk_write'
|
ATTR_BLK_WRITE = 'blk_write'
|
||||||
|
ATTR_PROVIDER = 'provider'
|
||||||
|
ATTR_AVAILABLE = 'available'
|
||||||
|
ATTR_HOST = 'host'
|
||||||
|
ATTR_USERNAME = 'username'
|
||||||
|
ATTR_PROTOCOL = 'protocol'
|
||||||
|
ATTR_DISCOVERY = 'discovery'
|
||||||
|
ATTR_PLATFORM = 'platform'
|
||||||
|
ATTR_COMPONENT = 'component'
|
||||||
|
ATTR_CONFIG = 'config'
|
||||||
|
ATTR_DISCOVERY_ID = 'discovery_id'
|
||||||
|
ATTR_SERVICES = 'services'
|
||||||
|
ATTR_DISCOVERY = 'discovery'
|
||||||
|
ATTR_PROTECTED = 'protected'
|
||||||
|
ATTR_CRYPTO = 'crypto'
|
||||||
|
|
||||||
|
SERVICE_MQTT = 'mqtt'
|
||||||
|
|
||||||
STARTUP_INITIALIZE = 'initialize'
|
STARTUP_INITIALIZE = 'initialize'
|
||||||
STARTUP_SYSTEM = 'system'
|
STARTUP_SYSTEM = 'system'
|
||||||
@@ -170,3 +195,5 @@ FOLDER_SSL = 'ssl'
|
|||||||
|
|
||||||
SNAPSHOT_FULL = 'full'
|
SNAPSHOT_FULL = 'full'
|
||||||
SNAPSHOT_PARTIAL = 'partial'
|
SNAPSHOT_PARTIAL = 'partial'
|
||||||
|
|
||||||
|
CRYPTO_AES128 = 'aes128'
|
||||||
|
@@ -44,6 +44,9 @@ class HassIO(CoreSysAttributes):
|
|||||||
# load last available data
|
# load last available data
|
||||||
await self._snapshots.load()
|
await self._snapshots.load()
|
||||||
|
|
||||||
|
# load services
|
||||||
|
await self._services.load()
|
||||||
|
|
||||||
# start dns forwarding
|
# start dns forwarding
|
||||||
self._loop.create_task(self._dns.start())
|
self._loop.create_task(self._dns.start())
|
||||||
|
|
||||||
@@ -54,8 +57,9 @@ class HassIO(CoreSysAttributes):
|
|||||||
"""Start HassIO orchestration."""
|
"""Start HassIO orchestration."""
|
||||||
# on release channel, try update itself
|
# on release channel, try update itself
|
||||||
# on beta channel, only read new versions
|
# on beta channel, only read new versions
|
||||||
if not self._updater.beta_channel:
|
if not self._updater.beta_channel and self._supervisor.need_update:
|
||||||
await self._supervisor.update()
|
if await self._supervisor.update():
|
||||||
|
return
|
||||||
else:
|
else:
|
||||||
_LOGGER.info("Ignore Hass.io auto updates on beta mode")
|
_LOGGER.info("Ignore Hass.io auto updates on beta mode")
|
||||||
|
|
||||||
@@ -69,6 +73,9 @@ class HassIO(CoreSysAttributes):
|
|||||||
_LOGGER.info("Hass.io reboot detected")
|
_LOGGER.info("Hass.io reboot detected")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# reset register services / discovery
|
||||||
|
self._services.reset()
|
||||||
|
|
||||||
# start addon mark as system
|
# start addon mark as system
|
||||||
await self._addons.auto_boot(STARTUP_SYSTEM)
|
await self._addons.auto_boot(STARTUP_SYSTEM)
|
||||||
|
|
||||||
@@ -77,13 +84,14 @@ class HassIO(CoreSysAttributes):
|
|||||||
|
|
||||||
# run HomeAssistant
|
# run HomeAssistant
|
||||||
if self._homeassistant.boot:
|
if self._homeassistant.boot:
|
||||||
await self._homeassistant.run()
|
await self._homeassistant.start()
|
||||||
|
|
||||||
# start addon mark as application
|
# start addon mark as application
|
||||||
await self._addons.auto_boot(STARTUP_APPLICATION)
|
await self._addons.auto_boot(STARTUP_APPLICATION)
|
||||||
|
|
||||||
# store new last boot
|
# store new last boot
|
||||||
self._config.last_boot = self._hardware.last_boot
|
self._config.last_boot = self._hardware.last_boot
|
||||||
|
self._config.save_data()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Add core tasks into scheduler
|
# Add core tasks into scheduler
|
||||||
|
@@ -40,6 +40,7 @@ class CoreSys(object):
|
|||||||
self._updater = None
|
self._updater = None
|
||||||
self._snapshots = None
|
self._snapshots = None
|
||||||
self._tasks = None
|
self._tasks = None
|
||||||
|
self._services = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def arch(self):
|
def arch(self):
|
||||||
@@ -155,19 +156,19 @@ class CoreSys(object):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def snapshots(self):
|
def snapshots(self):
|
||||||
"""Return SnapshotsManager object."""
|
"""Return SnapshotManager object."""
|
||||||
return self._snapshots
|
return self._snapshots
|
||||||
|
|
||||||
@snapshots.setter
|
@snapshots.setter
|
||||||
def snapshots(self, value):
|
def snapshots(self, value):
|
||||||
"""Set a SnapshotsManager object."""
|
"""Set a SnapshotManager object."""
|
||||||
if self._snapshots:
|
if self._snapshots:
|
||||||
raise RuntimeError("SnapshotsManager already set!")
|
raise RuntimeError("SnapshotsManager already set!")
|
||||||
self._snapshots = value
|
self._snapshots = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tasks(self):
|
def tasks(self):
|
||||||
"""Return SnapshotsManager object."""
|
"""Return Tasks object."""
|
||||||
return self._tasks
|
return self._tasks
|
||||||
|
|
||||||
@tasks.setter
|
@tasks.setter
|
||||||
@@ -177,6 +178,18 @@ class CoreSys(object):
|
|||||||
raise RuntimeError("Tasks already set!")
|
raise RuntimeError("Tasks already set!")
|
||||||
self._tasks = value
|
self._tasks = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def services(self):
|
||||||
|
"""Return ServiceManager object."""
|
||||||
|
return self._services
|
||||||
|
|
||||||
|
@services.setter
|
||||||
|
def services(self, value):
|
||||||
|
"""Set a ServiceManager object."""
|
||||||
|
if self._services:
|
||||||
|
raise RuntimeError("Services already set!")
|
||||||
|
self._services = value
|
||||||
|
|
||||||
|
|
||||||
class CoreSysAttributes(object):
|
class CoreSysAttributes(object):
|
||||||
"""Inheret basic CoreSysAttributes."""
|
"""Inheret basic CoreSysAttributes."""
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
"""Init file for HassIO docker object."""
|
"""Init file for HassIO docker object."""
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
from collections import namedtuple
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
@@ -9,6 +10,8 @@ from ..const import SOCKET_DOCKER
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CommandReturn = namedtuple('CommandReturn', ['exit_code', 'output'])
|
||||||
|
|
||||||
|
|
||||||
class DockerAPI(object):
|
class DockerAPI(object):
|
||||||
"""Docker hassio wrapper.
|
"""Docker hassio wrapper.
|
||||||
@@ -19,7 +22,8 @@ class DockerAPI(object):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.docker = docker.DockerClient(
|
self.docker = docker.DockerClient(
|
||||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
|
||||||
|
version='auto', timeout=300)
|
||||||
self.network = DockerNetwork(self.docker)
|
self.network = DockerNetwork(self.docker)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -47,8 +51,10 @@ class DockerAPI(object):
|
|||||||
hostname = kwargs.get('hostname')
|
hostname = kwargs.get('hostname')
|
||||||
|
|
||||||
# setup network
|
# setup network
|
||||||
|
kwargs['dns_search'] = ["."]
|
||||||
if network_mode:
|
if network_mode:
|
||||||
kwargs['dns'] = [str(self.network.supervisor)]
|
kwargs['dns'] = [str(self.network.supervisor)]
|
||||||
|
kwargs['dns_opt'] = ["ndots:0"]
|
||||||
else:
|
else:
|
||||||
kwargs['network'] = None
|
kwargs['network'] = None
|
||||||
|
|
||||||
@@ -94,15 +100,15 @@ class DockerAPI(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# wait until command is done
|
# wait until command is done
|
||||||
exit_code = container.wait()
|
result = container.wait()
|
||||||
output = container.logs(stdout=stdout, stderr=stderr)
|
output = container.logs(stdout=stdout, stderr=stderr)
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't execute command: %s", err)
|
_LOGGER.error("Can't execute command: %s", err)
|
||||||
return (None, b"")
|
return CommandReturn(None, b"")
|
||||||
|
|
||||||
# cleanup container
|
# cleanup container
|
||||||
with suppress(docker.errors.DockerException):
|
with suppress(docker.errors.DockerException):
|
||||||
container.remove(force=True)
|
container.remove(force=True)
|
||||||
|
|
||||||
return (exit_code, output)
|
return CommandReturn(result.get('StatusCode'), output)
|
||||||
|
@@ -9,7 +9,8 @@ from .interface import DockerInterface
|
|||||||
from .utils import docker_process
|
from .utils import docker_process
|
||||||
from ..addons.build import AddonBuild
|
from ..addons.build import AddonBuild
|
||||||
from ..const import (
|
from ..const import (
|
||||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN,
|
||||||
|
ENV_TIME)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -26,7 +27,7 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def addon(self):
|
def addon(self):
|
||||||
"""Return name of docker image."""
|
"""Return addon of docker image."""
|
||||||
return self._addons.get(self._id)
|
return self._addons.get(self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -74,19 +75,18 @@ class DockerAddon(DockerInterface):
|
|||||||
def environment(self):
|
def environment(self):
|
||||||
"""Return environment for docker add-on."""
|
"""Return environment for docker add-on."""
|
||||||
addon_env = self.addon.environment or {}
|
addon_env = self.addon.environment or {}
|
||||||
|
|
||||||
|
# Need audio settings
|
||||||
if self.addon.with_audio:
|
if self.addon.with_audio:
|
||||||
addon_env.update({
|
addon_env.update({
|
||||||
'ALSA_OUTPUT': self.addon.audio_output,
|
'ALSA_OUTPUT': self.addon.audio_output,
|
||||||
'ALSA_INPUT': self.addon.audio_input,
|
'ALSA_INPUT': self.addon.audio_input,
|
||||||
})
|
})
|
||||||
|
|
||||||
# Set api token if any API access is needed
|
|
||||||
if self.addon.access_hassio_api or self.addon.access_homeassistant_api:
|
|
||||||
addon_env['API_TOKEN'] = self.addon.api_token
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
**addon_env,
|
**addon_env,
|
||||||
'TZ': self._config.timezone,
|
ENV_TIME: self._config.timezone,
|
||||||
|
ENV_TOKEN: self.addon.uuid,
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -225,10 +225,6 @@ class DockerAddon(DockerInterface):
|
|||||||
# cleanup
|
# cleanup
|
||||||
self._stop()
|
self._stop()
|
||||||
|
|
||||||
# write config
|
|
||||||
if not self.addon.write_options():
|
|
||||||
return False
|
|
||||||
|
|
||||||
ret = self._docker.run(
|
ret = self._docker.run(
|
||||||
self.image,
|
self.image,
|
||||||
name=self.name,
|
name=self.name,
|
||||||
@@ -269,13 +265,17 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
build_env = AddonBuild(self.coresys, self.addon)
|
build_env = AddonBuild(self.coresys, self._id)
|
||||||
|
|
||||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||||
try:
|
try:
|
||||||
image = self._docker.images.build(**build_env.get_docker_args(tag))
|
image, log = self._docker.images.build(
|
||||||
|
**build_env.get_docker_args(tag))
|
||||||
|
|
||||||
|
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||||
image.tag(self.image, tag='latest')
|
image.tag(self.image, tag='latest')
|
||||||
|
|
||||||
|
# Update meta data
|
||||||
self._meta = image.attrs
|
self._meta = image.attrs
|
||||||
|
|
||||||
except (docker.errors.DockerException) as err:
|
except (docker.errors.DockerException) as err:
|
||||||
@@ -301,15 +301,16 @@ class DockerAddon(DockerInterface):
|
|||||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||||
try:
|
try:
|
||||||
with tar_file.open("wb") as write_tar:
|
with tar_file.open("wb") as write_tar:
|
||||||
for chunk in image.stream():
|
for chunk in image:
|
||||||
write_tar.write(chunk)
|
write_tar.write(chunk)
|
||||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||||
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
_LOGGER.info("Export image %s done", self.image)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@docker_process
|
@docker_process
|
||||||
@@ -337,15 +338,6 @@ class DockerAddon(DockerInterface):
|
|||||||
self._cleanup()
|
self._cleanup()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _restart(self):
|
|
||||||
"""Restart docker container.
|
|
||||||
|
|
||||||
Addons prepare some thing on start and that is normaly not repeatable.
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
self._stop()
|
|
||||||
return self._run()
|
|
||||||
|
|
||||||
@docker_process
|
@docker_process
|
||||||
def write_stdin(self, data):
|
def write_stdin(self, data):
|
||||||
"""Write to add-on stdin."""
|
"""Write to add-on stdin."""
|
||||||
|
@@ -4,6 +4,7 @@ import logging
|
|||||||
import docker
|
import docker
|
||||||
|
|
||||||
from .interface import DockerInterface
|
from .interface import DockerInterface
|
||||||
|
from ..const import ENV_TOKEN, ENV_TIME
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -53,7 +54,8 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
network_mode='host',
|
network_mode='host',
|
||||||
environment={
|
environment={
|
||||||
'HASSIO': self._docker.network.supervisor,
|
'HASSIO': self._docker.network.supervisor,
|
||||||
'TZ': self._config.timezone,
|
ENV_TIME: self._config.timezone,
|
||||||
|
ENV_TOKEN: self._homeassistant.uuid,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
str(self._config.path_extern_config):
|
str(self._config.path_extern_config):
|
||||||
@@ -83,7 +85,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
stdout=True,
|
stdout=True,
|
||||||
stderr=True,
|
stderr=True,
|
||||||
environment={
|
environment={
|
||||||
'TZ': self._config.timezone,
|
ENV_TIME: self._config.timezone,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
str(self._config.path_extern_config):
|
str(self._config.path_extern_config):
|
||||||
|
@@ -9,7 +9,10 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class DockerNetwork(object):
|
class DockerNetwork(object):
|
||||||
"""Internal HassIO Network."""
|
"""Internal HassIO Network.
|
||||||
|
|
||||||
|
This class is not AsyncIO safe!
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, dock):
|
def __init__(self, dock):
|
||||||
"""Initialize internal hassio network."""
|
"""Initialize internal hassio network."""
|
||||||
@@ -52,7 +55,8 @@ class DockerNetwork(object):
|
|||||||
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||||
|
|
||||||
return self.docker.networks.create(
|
return self.docker.networks.create(
|
||||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config, options={
|
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
|
||||||
|
enable_ipv6=False, options={
|
||||||
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@@ -1,16 +1,19 @@
|
|||||||
"""HomeAssistant control object."""
|
"""HomeAssistant control object."""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections import namedtuple
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import socket
|
||||||
|
import time
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from aiohttp.hdrs import CONTENT_TYPE
|
from aiohttp.hdrs import CONTENT_TYPE
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION,
|
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID,
|
||||||
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||||
HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
ATTR_WAIT_BOOT, HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
||||||
from .coresys import CoreSysAttributes
|
from .coresys import CoreSysAttributes
|
||||||
from .docker.homeassistant import DockerHomeAssistant
|
from .docker.homeassistant import DockerHomeAssistant
|
||||||
from .utils import convert_to_ascii
|
from .utils import convert_to_ascii
|
||||||
@@ -21,6 +24,8 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||||
|
|
||||||
|
ConfigResult = namedtuple('ConfigResult', ['valid', 'log'])
|
||||||
|
|
||||||
|
|
||||||
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||||
"""Hass core object for handle it."""
|
"""Hass core object for handle it."""
|
||||||
@@ -53,7 +58,6 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
def api_port(self, value):
|
def api_port(self, value):
|
||||||
"""Set network port for home-assistant instance."""
|
"""Set network port for home-assistant instance."""
|
||||||
self._data[ATTR_PORT] = value
|
self._data[ATTR_PORT] = value
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def api_password(self):
|
def api_password(self):
|
||||||
@@ -92,6 +96,16 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
"""Return True if the watchdog should protect Home-Assistant."""
|
"""Return True if the watchdog should protect Home-Assistant."""
|
||||||
self._data[ATTR_WATCHDOG] = value
|
self._data[ATTR_WATCHDOG] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def wait_boot(self):
|
||||||
|
"""Return time to wait for Home-Assistant startup."""
|
||||||
|
return self._data[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
|
@wait_boot.setter
|
||||||
|
def wait_boot(self, value):
|
||||||
|
"""Set time to wait for Home-Assistant startup."""
|
||||||
|
self._data[ATTR_WAIT_BOOT] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
"""Return version of running homeassistant."""
|
"""Return version of running homeassistant."""
|
||||||
@@ -143,6 +157,11 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
"""Set home-assistant boot options."""
|
"""Set home-assistant boot options."""
|
||||||
self._data[ATTR_BOOT] = value
|
self._data[ATTR_BOOT] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uuid(self):
|
||||||
|
"""Return a UUID of this HomeAssistant."""
|
||||||
|
return self._data[ATTR_UUID]
|
||||||
|
|
||||||
async def install_landingpage(self):
|
async def install_landingpage(self):
|
||||||
"""Install a landingpage."""
|
"""Install a landingpage."""
|
||||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||||
@@ -152,8 +171,8 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||||
await asyncio.sleep(60, loop=self._loop)
|
await asyncio.sleep(60, loop=self._loop)
|
||||||
|
|
||||||
# run landingpage after installation
|
# Run landingpage after installation
|
||||||
await self.instance.run()
|
await self.start()
|
||||||
|
|
||||||
async def install(self):
|
async def install(self):
|
||||||
"""Install a landingpage."""
|
"""Install a landingpage."""
|
||||||
@@ -172,7 +191,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
# finishing
|
# finishing
|
||||||
_LOGGER.info("HomeAssistant docker now installed")
|
_LOGGER.info("HomeAssistant docker now installed")
|
||||||
if self.boot:
|
if self.boot:
|
||||||
await self.instance.run()
|
await self.start()
|
||||||
await self.instance.cleanup()
|
await self.instance.cleanup()
|
||||||
|
|
||||||
async def update(self, version=None):
|
async def update(self, version=None):
|
||||||
@@ -189,14 +208,14 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
return await self.instance.update(version)
|
return await self.instance.update(version)
|
||||||
finally:
|
finally:
|
||||||
if running:
|
if running:
|
||||||
await self.instance.run()
|
await self.start()
|
||||||
|
|
||||||
def run(self):
|
async def start(self):
|
||||||
"""Run HomeAssistant docker.
|
"""Run HomeAssistant docker."""
|
||||||
|
if not await self.instance.run():
|
||||||
|
return False
|
||||||
|
|
||||||
Return a coroutine.
|
return await self._block_till_run()
|
||||||
"""
|
|
||||||
return self.instance.run()
|
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
"""Stop HomeAssistant docker.
|
"""Stop HomeAssistant docker.
|
||||||
@@ -205,12 +224,12 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
"""
|
"""
|
||||||
return self.instance.stop()
|
return self.instance.stop()
|
||||||
|
|
||||||
def restart(self):
|
async def restart(self):
|
||||||
"""Restart HomeAssistant docker.
|
"""Restart HomeAssistant docker."""
|
||||||
|
if not await self.instance.restart():
|
||||||
|
return False
|
||||||
|
|
||||||
Return a coroutine.
|
return await self._block_till_run()
|
||||||
"""
|
|
||||||
return self.instance.restart()
|
|
||||||
|
|
||||||
def logs(self):
|
def logs(self):
|
||||||
"""Get HomeAssistant docker logs.
|
"""Get HomeAssistant docker logs.
|
||||||
@@ -247,19 +266,19 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
|
|
||||||
async def check_config(self):
|
async def check_config(self):
|
||||||
"""Run homeassistant config check."""
|
"""Run homeassistant config check."""
|
||||||
exit_code, log = await self.instance.execute_command(
|
result = await self.instance.execute_command(
|
||||||
"python3 -m homeassistant -c /config --script check_config"
|
"python3 -m homeassistant -c /config --script check_config"
|
||||||
)
|
)
|
||||||
|
|
||||||
# if not valid
|
# if not valid
|
||||||
if exit_code is None:
|
if result.exit_code is None:
|
||||||
return (False, "")
|
return ConfigResult(False, "")
|
||||||
|
|
||||||
# parse output
|
# parse output
|
||||||
log = convert_to_ascii(log)
|
log = convert_to_ascii(result.output)
|
||||||
if exit_code != 0 or RE_YAML_ERROR.search(log):
|
if result.exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||||
return (False, log)
|
return ConfigResult(False, log)
|
||||||
return (True, log)
|
return ConfigResult(True, log)
|
||||||
|
|
||||||
async def check_api_state(self):
|
async def check_api_state(self):
|
||||||
"""Check if Home-Assistant up and running."""
|
"""Check if Home-Assistant up and running."""
|
||||||
@@ -281,3 +300,54 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
if status not in (200, 201):
|
if status not in (200, 201):
|
||||||
_LOGGER.warning("Home-Assistant API config missmatch")
|
_LOGGER.warning("Home-Assistant API config missmatch")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
async def send_event(self, event_type, event_data=None):
|
||||||
|
"""Send event to Home-Assistant."""
|
||||||
|
url = f"{self.api_url}/api/events/{event_type}"
|
||||||
|
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||||
|
|
||||||
|
if self.api_password:
|
||||||
|
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||||
|
|
||||||
|
try:
|
||||||
|
# pylint: disable=bad-continuation
|
||||||
|
async with self._websession_ssl.post(
|
||||||
|
url, headers=header, timeout=30,
|
||||||
|
json=event_data) as request:
|
||||||
|
status = request.status
|
||||||
|
|
||||||
|
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Home-Assistant event %s fails: %s", event_type, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if status not in (200, 201):
|
||||||
|
_LOGGER.warning("Home-Assistant event %s fails", event_type)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def _block_till_run(self):
|
||||||
|
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||||
|
start_time = time.monotonic()
|
||||||
|
|
||||||
|
def check_port():
|
||||||
|
"""Check if port is mapped."""
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
try:
|
||||||
|
result = sock.connect_ex((str(self.api_ip), self.api_port))
|
||||||
|
sock.close()
|
||||||
|
|
||||||
|
if result == 0:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
while time.monotonic() - start_time < self.wait_boot:
|
||||||
|
if await self._loop.run_in_executor(None, check_port):
|
||||||
|
_LOGGER.info("Detect a running Home-Assistant instance")
|
||||||
|
return True
|
||||||
|
await asyncio.sleep(10, loop=self._loop)
|
||||||
|
|
||||||
|
_LOGGER.warning("Don't wait anymore of Home-Assistant startup!")
|
||||||
|
return False
|
||||||
|
45
hassio/services/__init__.py
Normal file
45
hassio/services/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
"""Handle internal services discovery."""
|
||||||
|
|
||||||
|
from .mqtt import MQTTService
|
||||||
|
from .data import ServicesData
|
||||||
|
from .discovery import Discovery
|
||||||
|
from ..const import SERVICE_MQTT
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
|
||||||
|
AVAILABLE_SERVICES = {
|
||||||
|
SERVICE_MQTT: MQTTService
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceManager(CoreSysAttributes):
|
||||||
|
"""Handle internal services discovery."""
|
||||||
|
|
||||||
|
def __init__(self, coresys):
|
||||||
|
"""Initialize Services handler."""
|
||||||
|
self.coresys = coresys
|
||||||
|
self.data = ServicesData()
|
||||||
|
self.discovery = Discovery(coresys)
|
||||||
|
self.services_obj = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list_services(self):
|
||||||
|
"""Return a list of services."""
|
||||||
|
return list(self.services_obj.values())
|
||||||
|
|
||||||
|
def get(self, slug):
|
||||||
|
"""Return service object from slug."""
|
||||||
|
return self.services_obj.get(slug)
|
||||||
|
|
||||||
|
async def load(self):
|
||||||
|
"""Load available services."""
|
||||||
|
for slug, service in AVAILABLE_SERVICES.items():
|
||||||
|
self.services_obj[slug] = service(self.coresys)
|
||||||
|
|
||||||
|
# Read exists discovery messages
|
||||||
|
self.discovery.load()
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
"""Reset available data."""
|
||||||
|
self.data.reset_data()
|
||||||
|
self.discovery.load()
|
23
hassio/services/data.py
Normal file
23
hassio/services/data.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
"""Handle service data for persistent supervisor reboot."""
|
||||||
|
|
||||||
|
from .validate import SCHEMA_SERVICES_FILE
|
||||||
|
from ..const import FILE_HASSIO_SERVICES, ATTR_DISCOVERY, SERVICE_MQTT
|
||||||
|
from ..utils.json import JsonConfig
|
||||||
|
|
||||||
|
|
||||||
|
class ServicesData(JsonConfig):
|
||||||
|
"""Class to handle services data."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize services data."""
|
||||||
|
super().__init__(FILE_HASSIO_SERVICES, SCHEMA_SERVICES_FILE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def discovery(self):
|
||||||
|
"""Return discovery data for home-assistant."""
|
||||||
|
return self._data[ATTR_DISCOVERY]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mqtt(self):
|
||||||
|
"""Return settings for mqtt service."""
|
||||||
|
return self._data[SERVICE_MQTT]
|
107
hassio/services/discovery.py
Normal file
107
hassio/services/discovery.py
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
"""Handle discover message for Home-Assistant."""
|
||||||
|
import logging
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from ..const import ATTR_UUID
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
EVENT_DISCOVERY_ADD = 'hassio_discovery_add'
|
||||||
|
EVENT_DISCOVERY_DEL = 'hassio_discovery_del'
|
||||||
|
|
||||||
|
|
||||||
|
class Discovery(CoreSysAttributes):
|
||||||
|
"""Home-Assistant Discovery handler."""
|
||||||
|
|
||||||
|
def __init__(self, coresys):
|
||||||
|
"""Initialize discovery handler."""
|
||||||
|
self.coresys = coresys
|
||||||
|
self.message_obj = {}
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
"""Load exists discovery message into storage."""
|
||||||
|
messages = {}
|
||||||
|
for message in self._data:
|
||||||
|
discovery = Message(**message)
|
||||||
|
messages[discovery.uuid] = discovery
|
||||||
|
|
||||||
|
self.message_obj = messages
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""Write discovery message into data file."""
|
||||||
|
messages = []
|
||||||
|
for message in self.message_obj.values():
|
||||||
|
messages.append(message.raw())
|
||||||
|
|
||||||
|
self._data.clear()
|
||||||
|
self._data.extend(messages)
|
||||||
|
self._services.data.save_data()
|
||||||
|
|
||||||
|
def get(self, uuid):
|
||||||
|
"""Return discovery message."""
|
||||||
|
return self.message_obj.get(uuid)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _data(self):
|
||||||
|
"""Return discovery data."""
|
||||||
|
return self._services.data.discovery
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list_messages(self):
|
||||||
|
"""Return list of available discovery messages."""
|
||||||
|
return self.message_obj.values()
|
||||||
|
|
||||||
|
def send(self, provider, component, platform=None, config=None):
|
||||||
|
"""Send a discovery message to Home-Assistant."""
|
||||||
|
message = Message(provider, component, platform, config)
|
||||||
|
|
||||||
|
# Allready exists?
|
||||||
|
for exists_message in self.message_obj:
|
||||||
|
if exists_message == message:
|
||||||
|
_LOGGER.warning("Found douplicate discovery message from %s",
|
||||||
|
provider)
|
||||||
|
return exists_message
|
||||||
|
|
||||||
|
_LOGGER.info("Send discovery to Home-Assistant %s/%s from %s",
|
||||||
|
component, platform, provider)
|
||||||
|
self.message_obj[message.uuid] = message
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
# send event to Home-Assistant
|
||||||
|
self._loop.create_task(self._homeassistant.send_event(
|
||||||
|
EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid}))
|
||||||
|
|
||||||
|
return message
|
||||||
|
|
||||||
|
def remove(self, message):
|
||||||
|
"""Remove a discovery message from Home-Assistant."""
|
||||||
|
self.message_obj.pop(message.uuid, None)
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
# send event to Home-Assistant
|
||||||
|
self._loop.create_task(self._homeassistant.send_event(
|
||||||
|
EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid}))
|
||||||
|
|
||||||
|
|
||||||
|
class Message(object):
|
||||||
|
"""Represent a single Discovery message."""
|
||||||
|
|
||||||
|
def __init__(self, provider, component, platform, config, uuid=None):
|
||||||
|
"""Initialize discovery message."""
|
||||||
|
self.provider = provider
|
||||||
|
self.component = component
|
||||||
|
self.platform = platform
|
||||||
|
self.config = config
|
||||||
|
self.uuid = uuid or uuid4().hex
|
||||||
|
|
||||||
|
def raw(self):
|
||||||
|
"""Return raw discovery message."""
|
||||||
|
return self.__dict__
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
"""Compare with other message."""
|
||||||
|
for attribute in ('provider', 'component', 'platform', 'config'):
|
||||||
|
if getattr(self, attribute) != getattr(other, attribute):
|
||||||
|
return False
|
||||||
|
return True
|
54
hassio/services/interface.py
Normal file
54
hassio/services/interface.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
"""Interface for single service."""
|
||||||
|
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceInterface(CoreSysAttributes):
|
||||||
|
"""Interface class for service integration."""
|
||||||
|
|
||||||
|
def __init__(self, coresys):
|
||||||
|
"""Initialize service interface."""
|
||||||
|
self.coresys = coresys
|
||||||
|
|
||||||
|
@property
|
||||||
|
def slug(self):
|
||||||
|
"""Return slug of this service."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _data(self):
|
||||||
|
"""Return data of this service."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def schema(self):
|
||||||
|
"""Return data schema of this service."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def provider(self):
|
||||||
|
"""Return name of service provider."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def enabled(self):
|
||||||
|
"""Return True if the service is in use."""
|
||||||
|
return bool(self._data)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""Save changes."""
|
||||||
|
self._services.data.save_data()
|
||||||
|
|
||||||
|
def get_service_data(self):
|
||||||
|
"""Return the requested service data."""
|
||||||
|
if self.enabled:
|
||||||
|
return self._data
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_service_data(self, provider, data):
|
||||||
|
"""Write the data into service object."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def del_service_data(self, provider):
|
||||||
|
"""Remove the data from service object."""
|
||||||
|
raise NotImplementedError()
|
89
hassio/services/mqtt.py
Normal file
89
hassio/services/mqtt.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""Provide MQTT Service."""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .interface import ServiceInterface
|
||||||
|
from .validate import SCHEMA_SERVICE_MQTT
|
||||||
|
from ..const import (
|
||||||
|
ATTR_PROVIDER, SERVICE_MQTT, ATTR_HOST, ATTR_PORT, ATTR_USERNAME,
|
||||||
|
ATTR_PASSWORD, ATTR_PROTOCOL, ATTR_DISCOVERY_ID)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MQTTService(ServiceInterface):
|
||||||
|
"""Provide mqtt services."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def slug(self):
|
||||||
|
"""Return slug of this service."""
|
||||||
|
return SERVICE_MQTT
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _data(self):
|
||||||
|
"""Return data of this service."""
|
||||||
|
return self._services.data.mqtt
|
||||||
|
|
||||||
|
@property
|
||||||
|
def schema(self):
|
||||||
|
"""Return data schema of this service."""
|
||||||
|
return SCHEMA_SERVICE_MQTT
|
||||||
|
|
||||||
|
@property
|
||||||
|
def provider(self):
|
||||||
|
"""Return name of service provider."""
|
||||||
|
return self._data.get(ATTR_PROVIDER)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hass_config(self):
|
||||||
|
"""Return Home-Assistant mqtt config."""
|
||||||
|
if not self.enabled:
|
||||||
|
return None
|
||||||
|
|
||||||
|
hass_config = {
|
||||||
|
'host': self._data[ATTR_HOST],
|
||||||
|
'port': self._data[ATTR_PORT],
|
||||||
|
'protocol': self._data[ATTR_PROTOCOL]
|
||||||
|
}
|
||||||
|
if ATTR_USERNAME in self._data:
|
||||||
|
hass_config['user']: self._data[ATTR_USERNAME]
|
||||||
|
if ATTR_PASSWORD in self._data:
|
||||||
|
hass_config['password']: self._data[ATTR_PASSWORD]
|
||||||
|
|
||||||
|
return hass_config
|
||||||
|
|
||||||
|
def set_service_data(self, provider, data):
|
||||||
|
"""Write the data into service object."""
|
||||||
|
if self.enabled:
|
||||||
|
_LOGGER.error("It is already a mqtt in use from %s", self.provider)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._data.update(data)
|
||||||
|
self._data[ATTR_PROVIDER] = provider
|
||||||
|
|
||||||
|
if provider == 'homeassistant':
|
||||||
|
_LOGGER.info("Use mqtt settings from Home-Assistant")
|
||||||
|
self.save()
|
||||||
|
return True
|
||||||
|
|
||||||
|
# discover mqtt to homeassistant
|
||||||
|
message = self._services.discovery.send(
|
||||||
|
provider, SERVICE_MQTT, None, self.hass_config)
|
||||||
|
|
||||||
|
self._data[ATTR_DISCOVERY_ID] = message.uuid
|
||||||
|
self.save()
|
||||||
|
return True
|
||||||
|
|
||||||
|
def del_service_data(self, provider):
|
||||||
|
"""Remove the data from service object."""
|
||||||
|
if not self.enabled:
|
||||||
|
_LOGGER.warning("Can't remove not exists services.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
discovery_id = self._data.get(ATTR_DISCOVERY_ID)
|
||||||
|
if discovery_id:
|
||||||
|
self._services.discovery.remove(
|
||||||
|
self._services.discovery.get(discovery_id))
|
||||||
|
|
||||||
|
self._data.clear()
|
||||||
|
self.save()
|
||||||
|
return True
|
44
hassio/services/validate.py
Normal file
44
hassio/services/validate.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"""Validate services schema."""
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
SERVICE_MQTT, ATTR_HOST, ATTR_PORT, ATTR_PASSWORD, ATTR_USERNAME, ATTR_SSL,
|
||||||
|
ATTR_PROVIDER, ATTR_PROTOCOL, ATTR_DISCOVERY, ATTR_COMPONENT, ATTR_UUID,
|
||||||
|
ATTR_PLATFORM, ATTR_CONFIG, ATTR_DISCOVERY_ID)
|
||||||
|
from ..validate import NETWORK_PORT
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_DISCOVERY = vol.Schema([
|
||||||
|
vol.Schema({
|
||||||
|
vol.Required(ATTR_UUID): vol.Match(r"^[0-9a-f]{32}$"),
|
||||||
|
vol.Required(ATTR_PROVIDER): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_PLATFORM): vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Required(ATTR_CONFIG): vol.Any(None, dict),
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_SERVICE_MQTT = vol.Schema({
|
||||||
|
vol.Required(ATTR_HOST): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_PORT): NETWORK_PORT,
|
||||||
|
vol.Optional(ATTR_USERNAME): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_PROTOCOL, default='3.1.1'):
|
||||||
|
vol.All(vol.Coerce(str), vol.In(['3.1', '3.1.1'])),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_CONFIG_MQTT = SCHEMA_SERVICE_MQTT.extend({
|
||||||
|
vol.Required(ATTR_PROVIDER): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_DISCOVERY_ID): vol.Match(r"^[0-9a-f]{32}$"),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_SERVICES_FILE = vol.Schema({
|
||||||
|
vol.Optional(SERVICE_MQTT, default=dict): vol.Any({}, SCHEMA_CONFIG_MQTT),
|
||||||
|
vol.Optional(ATTR_DISCOVERY, default=list): vol.Any([], SCHEMA_DISCOVERY),
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
@@ -1,20 +1,19 @@
|
|||||||
"""Snapshot system control."""
|
"""Snapshot system control."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tarfile
|
|
||||||
|
|
||||||
from .snapshot import Snapshot
|
from .snapshot import Snapshot
|
||||||
from .utils import create_slug
|
from .utils import create_slug
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..utils.dt import utcnow
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SnapshotsManager(CoreSysAttributes):
|
class SnapshotManager(CoreSysAttributes):
|
||||||
"""Manage snapshots."""
|
"""Manage snapshots."""
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
@@ -32,15 +31,15 @@ class SnapshotsManager(CoreSysAttributes):
|
|||||||
"""Return snapshot object."""
|
"""Return snapshot object."""
|
||||||
return self.snapshots_obj.get(slug)
|
return self.snapshots_obj.get(slug)
|
||||||
|
|
||||||
def _create_snapshot(self, name, sys_type):
|
def _create_snapshot(self, name, sys_type, password):
|
||||||
"""Initialize a new snapshot object from name."""
|
"""Initialize a new snapshot object from name."""
|
||||||
date_str = datetime.utcnow().isoformat()
|
date_str = utcnow().isoformat()
|
||||||
slug = create_slug(name, date_str)
|
slug = create_slug(name, date_str)
|
||||||
tar_file = Path(self._config.path_backup, "{}.tar".format(slug))
|
tar_file = Path(self._config.path_backup, f"{slug}.tar")
|
||||||
|
|
||||||
# init object
|
# init object
|
||||||
snapshot = Snapshot(self.coresys, tar_file)
|
snapshot = Snapshot(self.coresys, tar_file)
|
||||||
snapshot.create(slug, name, date_str, sys_type)
|
snapshot.new(slug, name, date_str, sys_type, password)
|
||||||
|
|
||||||
# set general data
|
# set general data
|
||||||
snapshot.store_homeassistant()
|
snapshot.store_homeassistant()
|
||||||
@@ -75,64 +74,91 @@ class SnapshotsManager(CoreSysAttributes):
|
|||||||
def remove(self, snapshot):
|
def remove(self, snapshot):
|
||||||
"""Remove a snapshot."""
|
"""Remove a snapshot."""
|
||||||
try:
|
try:
|
||||||
snapshot.tar_file.unlink()
|
snapshot.tarfile.unlink()
|
||||||
self.snapshots_obj.pop(snapshot.slug, None)
|
self.snapshots_obj.pop(snapshot.slug, None)
|
||||||
|
_LOGGER.info("Removed snapshot file %s", snapshot.slug)
|
||||||
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't remove snapshot %s: %s", snapshot.slug, err)
|
_LOGGER.error("Can't remove snapshot %s: %s", snapshot.slug, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def do_snapshot_full(self, name=""):
|
async def import_snapshot(self, tar_file):
|
||||||
|
"""Check snapshot tarfile and import it."""
|
||||||
|
snapshot = Snapshot(self.coresys, tar_file)
|
||||||
|
|
||||||
|
# Read meta data
|
||||||
|
if not await snapshot.load():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Allready exists?
|
||||||
|
if snapshot.slug in self.snapshots_obj:
|
||||||
|
_LOGGER.error("Snapshot %s allready exists!", snapshot.slug)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Move snapshot to backup
|
||||||
|
tar_origin = Path(self._config.path_backup, f"{snapshot.slug}.tar")
|
||||||
|
try:
|
||||||
|
snapshot.tarfile.rename(tar_origin)
|
||||||
|
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't move snapshot file to storage: %s", err)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Load new snapshot
|
||||||
|
snapshot = Snapshot(self.coresys, tar_origin)
|
||||||
|
if not await snapshot.load():
|
||||||
|
return None
|
||||||
|
_LOGGER.info("Success import %s", snapshot.slug)
|
||||||
|
|
||||||
|
self.snapshots_obj[snapshot.slug] = snapshot
|
||||||
|
return snapshot
|
||||||
|
|
||||||
|
async def do_snapshot_full(self, name="", password=None):
|
||||||
"""Create a full snapshot."""
|
"""Create a full snapshot."""
|
||||||
if self.lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.error("It is already a snapshot/restore process running")
|
_LOGGER.error("It is already a snapshot/restore process running")
|
||||||
return False
|
return None
|
||||||
|
|
||||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
|
snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
|
||||||
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self._scheduler.suspend = True
|
self._scheduler.suspend = True
|
||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
# snapshot addons
|
# Snapshot add-ons
|
||||||
tasks = []
|
_LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
|
||||||
for addon in self._addons.list_addons:
|
await snapshot.store_addons()
|
||||||
if not addon.is_installed:
|
|
||||||
continue
|
|
||||||
tasks.append(snapshot.import_addon(addon))
|
|
||||||
|
|
||||||
if tasks:
|
# Snapshot folders
|
||||||
_LOGGER.info("Full-Snapshot %s run %d addons",
|
_LOGGER.info("Snapshot %s store folders", snapshot.slug)
|
||||||
snapshot.slug, len(tasks))
|
|
||||||
await asyncio.wait(tasks, loop=self._loop)
|
|
||||||
|
|
||||||
# snapshot folders
|
|
||||||
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
|
|
||||||
await snapshot.store_folders()
|
await snapshot.store_folders()
|
||||||
|
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
_LOGGER.exception("Snapshot %s error", snapshot.slug)
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
||||||
self.snapshots_obj[snapshot.slug] = snapshot
|
self.snapshots_obj[snapshot.slug] = snapshot
|
||||||
return True
|
return snapshot
|
||||||
|
|
||||||
except (OSError, ValueError, tarfile.TarError) as err:
|
|
||||||
_LOGGER.info("Full-Snapshot %s error: %s", snapshot.slug, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scheduler.suspend = False
|
self._scheduler.suspend = False
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_snapshot_partial(self, name="", addons=None, folders=None):
|
async def do_snapshot_partial(self, name="", addons=None, folders=None,
|
||||||
|
password=None):
|
||||||
"""Create a partial snapshot."""
|
"""Create a partial snapshot."""
|
||||||
if self.lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.error("It is already a snapshot/restore process running")
|
_LOGGER.error("It is already a snapshot/restore process running")
|
||||||
return False
|
return None
|
||||||
|
|
||||||
addons = addons or []
|
addons = addons or []
|
||||||
folders = folders or []
|
folders = folders or []
|
||||||
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL)
|
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL, password)
|
||||||
|
|
||||||
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
@@ -140,44 +166,48 @@ class SnapshotsManager(CoreSysAttributes):
|
|||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
# snapshot addons
|
# Snapshot add-ons
|
||||||
tasks = []
|
addon_list = []
|
||||||
for slug in addons:
|
for addon_slug in addons:
|
||||||
addon = self._addons.get(slug)
|
addon = self._addons.get(addon_slug)
|
||||||
if addon.is_installed:
|
if addon and addon.is_installed:
|
||||||
tasks.append(snapshot.import_addon(addon))
|
addon_list.append(addon)
|
||||||
|
continue
|
||||||
|
_LOGGER.warning("Add-on %s not found", addon_slug)
|
||||||
|
|
||||||
if tasks:
|
_LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
|
||||||
_LOGGER.info("Partial-Snapshot %s run %d addons",
|
await snapshot.store_addons(addon_list)
|
||||||
snapshot.slug, len(tasks))
|
|
||||||
await asyncio.wait(tasks, loop=self._loop)
|
|
||||||
|
|
||||||
# snapshot folders
|
# snapshot folders
|
||||||
_LOGGER.info("Partial-Snapshot %s store folders %s",
|
_LOGGER.info("Snapshot %s store folders", snapshot.slug)
|
||||||
snapshot.slug, folders)
|
|
||||||
await snapshot.store_folders(folders)
|
await snapshot.store_folders(folders)
|
||||||
|
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
_LOGGER.exception("Snapshot %s error", snapshot.slug)
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
||||||
self.snapshots_obj[snapshot.slug] = snapshot
|
self.snapshots_obj[snapshot.slug] = snapshot
|
||||||
return True
|
return snapshot
|
||||||
|
|
||||||
except (OSError, ValueError, tarfile.TarError) as err:
|
|
||||||
_LOGGER.info("Partial-Snapshot %s error: %s", snapshot.slug, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scheduler.suspend = False
|
self._scheduler.suspend = False
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_restore_full(self, snapshot):
|
async def do_restore_full(self, snapshot, password=None):
|
||||||
"""Restore a snapshot."""
|
"""Restore a snapshot."""
|
||||||
if self.lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.error("It is already a snapshot/restore process running")
|
_LOGGER.error("It is already a snapshot/restore process running")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if snapshot.sys_type != SNAPSHOT_FULL:
|
if snapshot.sys_type != SNAPSHOT_FULL:
|
||||||
_LOGGER.error(
|
_LOGGER.error("Restore %s is only a partial snapshot!",
|
||||||
"Full-Restore %s is only a partial snapshot!", snapshot.slug)
|
snapshot.slug)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if snapshot.protected and not snapshot.set_password(password):
|
||||||
|
_LOGGER.error("Invalid password for snapshot %s", snapshot.slug)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
||||||
@@ -186,87 +216,75 @@ class SnapshotsManager(CoreSysAttributes):
|
|||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
# stop system
|
|
||||||
tasks = []
|
tasks = []
|
||||||
tasks.append(self._homeassistant.stop())
|
|
||||||
|
|
||||||
|
# Stop Home-Assistant / Add-ons
|
||||||
|
tasks.append(self._homeassistant.stop())
|
||||||
for addon in self._addons.list_addons:
|
for addon in self._addons.list_addons:
|
||||||
if addon.is_installed:
|
if addon.is_installed:
|
||||||
tasks.append(addon.stop())
|
tasks.append(addon.stop())
|
||||||
|
|
||||||
|
if tasks:
|
||||||
|
_LOGGER.info("Restore %s stop tasks", snapshot.slug)
|
||||||
await asyncio.wait(tasks, loop=self._loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
# restore folders
|
# Restore folders
|
||||||
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
|
_LOGGER.info("Restore %s run folders", snapshot.slug)
|
||||||
await snapshot.restore_folders()
|
await snapshot.restore_folders()
|
||||||
|
|
||||||
# start homeassistant restore
|
# Start homeassistant restore
|
||||||
_LOGGER.info("Full-Restore %s restore Home-Assistant",
|
_LOGGER.info("Restore %s run Home-Assistant", snapshot.slug)
|
||||||
snapshot.slug)
|
|
||||||
snapshot.restore_homeassistant()
|
snapshot.restore_homeassistant()
|
||||||
task_hass = self._loop.create_task(
|
task_hass = self._loop.create_task(
|
||||||
self._homeassistant.update(snapshot.homeassistant_version))
|
self._homeassistant.update(snapshot.homeassistant_version))
|
||||||
|
|
||||||
# restore repositories
|
# Restore repositories
|
||||||
_LOGGER.info("Full-Restore %s restore Repositories",
|
_LOGGER.info("Restore %s run Repositories", snapshot.slug)
|
||||||
snapshot.slug)
|
|
||||||
await snapshot.restore_repositories()
|
await snapshot.restore_repositories()
|
||||||
|
|
||||||
# restore addons
|
# Delete delta add-ons
|
||||||
tasks = []
|
tasks.clear()
|
||||||
actual_addons = \
|
for addon in self._addons.list_installed:
|
||||||
set(addon.slug for addon in self._addons.list_addons
|
if addon.slug not in snapshot.addon_list:
|
||||||
if addon.is_installed)
|
|
||||||
restore_addons = \
|
|
||||||
set(data[ATTR_SLUG] for data in snapshot.addons)
|
|
||||||
remove_addons = actual_addons - restore_addons
|
|
||||||
|
|
||||||
_LOGGER.info("Full-Restore %s restore addons %s, remove %s",
|
|
||||||
snapshot.slug, restore_addons, remove_addons)
|
|
||||||
|
|
||||||
for slug in remove_addons:
|
|
||||||
addon = self._addons.get(slug)
|
|
||||||
if addon:
|
|
||||||
tasks.append(addon.uninstall())
|
tasks.append(addon.uninstall())
|
||||||
else:
|
|
||||||
_LOGGER.warning("Can't remove addon %s", slug)
|
|
||||||
|
|
||||||
for slug in restore_addons:
|
|
||||||
addon = self._addons.get(slug)
|
|
||||||
if addon:
|
|
||||||
tasks.append(snapshot.export_addon(addon))
|
|
||||||
else:
|
|
||||||
_LOGGER.warning("Can't restore addon %s", slug)
|
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
_LOGGER.info("Full-Restore %s restore addons tasks %d",
|
_LOGGER.info("Restore %s remove add-ons", snapshot.slug)
|
||||||
snapshot.slug, len(tasks))
|
|
||||||
await asyncio.wait(tasks, loop=self._loop)
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
|
# Restore add-ons
|
||||||
|
_LOGGER.info("Restore %s old add-ons", snapshot.slug)
|
||||||
|
await snapshot.restore_addons()
|
||||||
|
|
||||||
# finish homeassistant task
|
# finish homeassistant task
|
||||||
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
|
_LOGGER.info("Restore %s wait until homeassistant ready",
|
||||||
snapshot.slug)
|
snapshot.slug)
|
||||||
await task_hass
|
await task_hass
|
||||||
await self._homeassistant.run()
|
await self._homeassistant.start()
|
||||||
|
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||||
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
_LOGGER.info("Full-Restore %s done", snapshot.slug)
|
_LOGGER.info("Full-Restore %s done", snapshot.slug)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except (OSError, ValueError, tarfile.TarError) as err:
|
|
||||||
_LOGGER.info("Full-Restore %s error: %s", slug, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scheduler.suspend = False
|
self._scheduler.suspend = False
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_restore_partial(self, snapshot, homeassistant=False,
|
async def do_restore_partial(self, snapshot, homeassistant=False,
|
||||||
addons=None, folders=None):
|
addons=None, folders=None, password=None):
|
||||||
"""Restore a snapshot."""
|
"""Restore a snapshot."""
|
||||||
if self.lock.locked():
|
if self.lock.locked():
|
||||||
_LOGGER.error("It is already a snapshot/restore process running")
|
_LOGGER.error("It is already a snapshot/restore process running")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if snapshot.protected and not snapshot.set_password(password):
|
||||||
|
_LOGGER.error("Invalid password for snapshot %s", snapshot.slug)
|
||||||
|
return False
|
||||||
|
|
||||||
addons = addons or []
|
addons = addons or []
|
||||||
folders = folders or []
|
folders = folders or []
|
||||||
|
|
||||||
@@ -276,45 +294,53 @@ class SnapshotsManager(CoreSysAttributes):
|
|||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
tasks = []
|
# Stop Home-Assistant if they will be restored later
|
||||||
|
if homeassistant and FOLDER_HOMEASSISTANT in folders:
|
||||||
if FOLDER_HOMEASSISTANT in folders:
|
|
||||||
await self._homeassistant.stop()
|
await self._homeassistant.stop()
|
||||||
|
|
||||||
|
# Process folders
|
||||||
if folders:
|
if folders:
|
||||||
_LOGGER.info("Partial-Restore %s restore folders %s",
|
_LOGGER.info("Restore %s run folders", snapshot.slug)
|
||||||
snapshot.slug, folders)
|
|
||||||
await snapshot.restore_folders(folders)
|
await snapshot.restore_folders(folders)
|
||||||
|
|
||||||
|
# Process Home-Assistant
|
||||||
|
task_hass = None
|
||||||
if homeassistant:
|
if homeassistant:
|
||||||
_LOGGER.info("Partial-Restore %s restore Home-Assistant",
|
_LOGGER.info("Restore %s run Home-Assistant",
|
||||||
snapshot.slug)
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant()
|
snapshot.restore_homeassistant()
|
||||||
tasks.append(self._homeassistant.update(
|
task_hass = self._loop.create_task(
|
||||||
|
self._homeassistant.update(
|
||||||
snapshot.homeassistant_version))
|
snapshot.homeassistant_version))
|
||||||
|
|
||||||
|
# Process Add-ons
|
||||||
|
addon_list = []
|
||||||
for slug in addons:
|
for slug in addons:
|
||||||
addon = self._addons.get(slug)
|
addon = self._addons.get(slug)
|
||||||
if addon:
|
if addon:
|
||||||
tasks.append(snapshot.export_addon(addon))
|
addon_list.append(addon)
|
||||||
else:
|
continue
|
||||||
_LOGGER.warning("Can't restore addon %s", slug)
|
_LOGGER.warning("Can't restore addon %s", snapshot.slug)
|
||||||
|
|
||||||
if tasks:
|
if addon_list:
|
||||||
_LOGGER.info("Partial-Restore %s run %d tasks",
|
_LOGGER.info("Restore %s old add-ons", snapshot.slug)
|
||||||
snapshot.slug, len(tasks))
|
await snapshot.restore_addons(addon_list)
|
||||||
await asyncio.wait(tasks, loop=self._loop)
|
|
||||||
|
|
||||||
# make sure homeassistant run agen
|
# make sure homeassistant run agen
|
||||||
await self._homeassistant.run()
|
if task_hass:
|
||||||
|
_LOGGER.info("Restore %s wait for Home-Assistant",
|
||||||
|
snapshot.slug)
|
||||||
|
await task_hass
|
||||||
|
await self._homeassistant.start()
|
||||||
|
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||||
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
|
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except (OSError, ValueError, tarfile.TarError) as err:
|
|
||||||
_LOGGER.info("Partial-Restore %s error: %s", slug, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scheduler.suspend = False
|
self._scheduler.suspend = False
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
@@ -1,23 +1,29 @@
|
|||||||
"""Represent a snapshot file."""
|
"""Represent a snapshot file."""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tarfile
|
import tarfile
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Util import Padding
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
||||||
from .utils import remove_folder
|
from .utils import (
|
||||||
|
remove_folder, password_to_key, password_for_validating, key_to_iv)
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE,
|
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE,
|
||||||
ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT,
|
ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT, ATTR_CRYPTO,
|
||||||
ATTR_LAST_VERSION)
|
ATTR_LAST_VERSION, ATTR_PROTECTED, ATTR_WAIT_BOOT, ATTR_SIZE,
|
||||||
|
CRYPTO_AES128)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..utils.json import write_json_file
|
from ..utils.json import write_json_file
|
||||||
|
from ..utils.tar import SecureTarFile
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -28,9 +34,11 @@ class Snapshot(CoreSysAttributes):
|
|||||||
def __init__(self, coresys, tar_file):
|
def __init__(self, coresys, tar_file):
|
||||||
"""Initialize a snapshot."""
|
"""Initialize a snapshot."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self.tar_file = tar_file
|
self._tarfile = tar_file
|
||||||
self._data = {}
|
self._data = {}
|
||||||
self._tmp = None
|
self._tmp = None
|
||||||
|
self._key = None
|
||||||
|
self._aes = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def slug(self):
|
def slug(self):
|
||||||
@@ -52,11 +60,21 @@ class Snapshot(CoreSysAttributes):
|
|||||||
"""Return snapshot date."""
|
"""Return snapshot date."""
|
||||||
return self._data[ATTR_DATE]
|
return self._data[ATTR_DATE]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def protected(self):
|
||||||
|
"""Return snapshot date."""
|
||||||
|
return self._data.get(ATTR_PROTECTED) is not None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def addons(self):
|
def addons(self):
|
||||||
"""Return snapshot date."""
|
"""Return snapshot date."""
|
||||||
return self._data[ATTR_ADDONS]
|
return self._data[ATTR_ADDONS]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def addon_list(self):
|
||||||
|
"""Return a list of addons slugs."""
|
||||||
|
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def folders(self):
|
def folders(self):
|
||||||
"""Return list of saved folders."""
|
"""Return list of saved folders."""
|
||||||
@@ -77,89 +95,29 @@ class Snapshot(CoreSysAttributes):
|
|||||||
"""Return snapshot homeassistant version."""
|
"""Return snapshot homeassistant version."""
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)
|
||||||
|
|
||||||
@homeassistant_version.setter
|
|
||||||
def homeassistant_version(self, value):
|
|
||||||
"""Set snapshot homeassistant version."""
|
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant_last_version(self):
|
def homeassistant(self):
|
||||||
"""Return snapshot homeassistant last version (custom)."""
|
"""Return snapshot homeassistant data."""
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_LAST_VERSION)
|
return self._data[ATTR_HOMEASSISTANT]
|
||||||
|
|
||||||
@homeassistant_last_version.setter
|
|
||||||
def homeassistant_last_version(self, value):
|
|
||||||
"""Set snapshot homeassistant last version (custom)."""
|
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_LAST_VERSION] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def homeassistant_image(self):
|
|
||||||
"""Return snapshot homeassistant custom image."""
|
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_IMAGE)
|
|
||||||
|
|
||||||
@homeassistant_image.setter
|
|
||||||
def homeassistant_image(self, value):
|
|
||||||
"""Set snapshot homeassistant custom image."""
|
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def homeassistant_ssl(self):
|
|
||||||
"""Return snapshot homeassistant api ssl."""
|
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_SSL)
|
|
||||||
|
|
||||||
@homeassistant_ssl.setter
|
|
||||||
def homeassistant_ssl(self, value):
|
|
||||||
"""Set snapshot homeassistant api ssl."""
|
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_SSL] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def homeassistant_port(self):
|
|
||||||
"""Return snapshot homeassistant api port."""
|
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_PORT)
|
|
||||||
|
|
||||||
@homeassistant_port.setter
|
|
||||||
def homeassistant_port(self, value):
|
|
||||||
"""Set snapshot homeassistant api port."""
|
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_PORT] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def homeassistant_password(self):
|
|
||||||
"""Return snapshot homeassistant api password."""
|
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_PASSWORD)
|
|
||||||
|
|
||||||
@homeassistant_password.setter
|
|
||||||
def homeassistant_password(self, value):
|
|
||||||
"""Set snapshot homeassistant api password."""
|
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_PASSWORD] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def homeassistant_watchdog(self):
|
|
||||||
"""Return snapshot homeassistant watchdog options."""
|
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_WATCHDOG)
|
|
||||||
|
|
||||||
@homeassistant_watchdog.setter
|
|
||||||
def homeassistant_watchdog(self, value):
|
|
||||||
"""Set snapshot homeassistant watchdog options."""
|
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_WATCHDOG] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def homeassistant_boot(self):
|
|
||||||
"""Return snapshot homeassistant boot options."""
|
|
||||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_BOOT)
|
|
||||||
|
|
||||||
@homeassistant_boot.setter
|
|
||||||
def homeassistant_boot(self, value):
|
|
||||||
"""Set snapshot homeassistant boot options."""
|
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_BOOT] = value
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def size(self):
|
def size(self):
|
||||||
"""Return snapshot size."""
|
"""Return snapshot size."""
|
||||||
if not self.tar_file.is_file():
|
if not self.tarfile.is_file():
|
||||||
return 0
|
return 0
|
||||||
return self.tar_file.stat().st_size / 1048576 # calc mbyte
|
return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte
|
||||||
|
|
||||||
def create(self, slug, name, date, sys_type):
|
@property
|
||||||
|
def is_new(self):
|
||||||
|
"""Return True if there is new."""
|
||||||
|
return not self.tarfile.exists()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tarfile(self):
|
||||||
|
"""Return path to Snapshot tarfile."""
|
||||||
|
return self._tarfile
|
||||||
|
|
||||||
|
def new(self, slug, name, date, sys_type, password=None):
|
||||||
"""Initialize a new snapshot."""
|
"""Initialize a new snapshot."""
|
||||||
# init metadata
|
# init metadata
|
||||||
self._data[ATTR_SLUG] = slug
|
self._data[ATTR_SLUG] = slug
|
||||||
@@ -170,15 +128,52 @@ class Snapshot(CoreSysAttributes):
|
|||||||
# Add defaults
|
# Add defaults
|
||||||
self._data = SCHEMA_SNAPSHOT(self._data)
|
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||||
|
|
||||||
|
# Set password
|
||||||
|
if password:
|
||||||
|
self._key = password_to_key(password)
|
||||||
|
self._aes = AES.new(
|
||||||
|
self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
||||||
|
self._data[ATTR_PROTECTED] = password_for_validating(password)
|
||||||
|
self._data[ATTR_CRYPTO] = CRYPTO_AES128
|
||||||
|
|
||||||
|
def set_password(self, password):
|
||||||
|
"""Set the password for a exists snapshot."""
|
||||||
|
if not password:
|
||||||
|
return False
|
||||||
|
|
||||||
|
validating = password_for_validating(password)
|
||||||
|
if validating != self._data[ATTR_PROTECTED]:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._key = password_to_key(password)
|
||||||
|
self._aes = AES.new(self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _encrypt_data(self, data):
|
||||||
|
"""Make data secure."""
|
||||||
|
if not self._key:
|
||||||
|
return data
|
||||||
|
|
||||||
|
return b64encode(
|
||||||
|
self._aes.encrypt(Padding.pad(data.encode(), 16))).decode()
|
||||||
|
|
||||||
|
def _decrypt_data(self, data):
|
||||||
|
"""Make data readable."""
|
||||||
|
if not self._key:
|
||||||
|
return data
|
||||||
|
|
||||||
|
return Padding.unpad(
|
||||||
|
self._aes.decrypt(b64decode(data)), 16).decode()
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Read snapshot.json from tar file."""
|
"""Read snapshot.json from tar file."""
|
||||||
if not self.tar_file.is_file():
|
if not self.tarfile.is_file():
|
||||||
_LOGGER.error("No tarfile %s", self.tar_file)
|
_LOGGER.error("No tarfile %s", self.tarfile)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _load_file():
|
def _load_file():
|
||||||
"""Read snapshot.json."""
|
"""Read snapshot.json."""
|
||||||
with tarfile.open(self.tar_file, "r:") as snapshot:
|
with tarfile.open(self.tarfile, "r:") as snapshot:
|
||||||
json_file = snapshot.extractfile("./snapshot.json")
|
json_file = snapshot.extractfile("./snapshot.json")
|
||||||
return json_file.read()
|
return json_file.read()
|
||||||
|
|
||||||
@@ -187,21 +182,21 @@ class Snapshot(CoreSysAttributes):
|
|||||||
raw = await self._loop.run_in_executor(None, _load_file)
|
raw = await self._loop.run_in_executor(None, _load_file)
|
||||||
except (tarfile.TarError, KeyError) as err:
|
except (tarfile.TarError, KeyError) as err:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Can't read snapshot tarfile %s: %s", self.tar_file, err)
|
"Can't read snapshot tarfile %s: %s", self.tarfile, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# parse data
|
# parse data
|
||||||
try:
|
try:
|
||||||
raw_dict = json.loads(raw)
|
raw_dict = json.loads(raw)
|
||||||
except json.JSONDecodeError as err:
|
except json.JSONDecodeError as err:
|
||||||
_LOGGER.error("Can't read data for %s: %s", self.tar_file, err)
|
_LOGGER.error("Can't read data for %s: %s", self.tarfile, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# validate
|
# validate
|
||||||
try:
|
try:
|
||||||
self._data = SCHEMA_SNAPSHOT(raw_dict)
|
self._data = SCHEMA_SNAPSHOT(raw_dict)
|
||||||
except vol.Invalid as err:
|
except vol.Invalid as err:
|
||||||
_LOGGER.error("Can't validate data for %s: %s", self.tar_file,
|
_LOGGER.error("Can't validate data for %s: %s", self.tarfile,
|
||||||
humanize_error(raw_dict, err))
|
humanize_error(raw_dict, err))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -212,13 +207,13 @@ class Snapshot(CoreSysAttributes):
|
|||||||
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))
|
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))
|
||||||
|
|
||||||
# create a snapshot
|
# create a snapshot
|
||||||
if not self.tar_file.is_file():
|
if not self.tarfile.is_file():
|
||||||
return self
|
return self
|
||||||
|
|
||||||
# extract a exists snapshot
|
# extract a exists snapshot
|
||||||
def _extract_snapshot():
|
def _extract_snapshot():
|
||||||
"""Extract a snapshot."""
|
"""Extract a snapshot."""
|
||||||
with tarfile.open(self.tar_file, "r:") as tar:
|
with tarfile.open(self.tarfile, "r:") as tar:
|
||||||
tar.extractall(path=self._tmp.name)
|
tar.extractall(path=self._tmp.name)
|
||||||
|
|
||||||
await self._loop.run_in_executor(None, _extract_snapshot)
|
await self._loop.run_in_executor(None, _extract_snapshot)
|
||||||
@@ -226,7 +221,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||||
"""Async context to close a snapshot."""
|
"""Async context to close a snapshot."""
|
||||||
# exists snapshot or exception on build
|
# exists snapshot or exception on build
|
||||||
if self.tar_file.is_file() or exception_type is not None:
|
if self.tarfile.is_file() or exception_type is not None:
|
||||||
self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -234,72 +229,109 @@ class Snapshot(CoreSysAttributes):
|
|||||||
try:
|
try:
|
||||||
self._data = SCHEMA_SNAPSHOT(self._data)
|
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||||
except vol.Invalid as err:
|
except vol.Invalid as err:
|
||||||
_LOGGER.error("Invalid data for %s: %s", self.tar_file,
|
_LOGGER.error("Invalid data for %s: %s", self.tarfile,
|
||||||
humanize_error(self._data, err))
|
humanize_error(self._data, err))
|
||||||
raise ValueError("Invalid config") from None
|
raise ValueError("Invalid config") from None
|
||||||
|
|
||||||
# new snapshot, build it
|
# new snapshot, build it
|
||||||
def _create_snapshot():
|
def _create_snapshot():
|
||||||
"""Create a new snapshot."""
|
"""Create a new snapshot."""
|
||||||
with tarfile.open(self.tar_file, "w:") as tar:
|
with tarfile.open(self.tarfile, "w:") as tar:
|
||||||
tar.add(self._tmp.name, arcname=".")
|
tar.add(self._tmp.name, arcname=".")
|
||||||
|
|
||||||
if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data):
|
try:
|
||||||
|
write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
|
||||||
await self._loop.run_in_executor(None, _create_snapshot)
|
await self._loop.run_in_executor(None, _create_snapshot)
|
||||||
else:
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
_LOGGER.error("Can't write snapshot.json")
|
_LOGGER.error("Can't write snapshot: %s", err)
|
||||||
|
finally:
|
||||||
self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
|
|
||||||
async def import_addon(self, addon):
|
async def store_addons(self, addon_list=None):
|
||||||
"""Add a addon into snapshot."""
|
"""Add a list of add-ons into snapshot."""
|
||||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
addon_list = addon_list or self._addons.list_installed
|
||||||
|
|
||||||
if not await addon.snapshot(snapshot_file):
|
async def _addon_save(addon):
|
||||||
|
"""Task to store a add-on into snapshot."""
|
||||||
|
addon_file = SecureTarFile(
|
||||||
|
Path(self._tmp.name, f"{addon.slug}.tar.gz"),
|
||||||
|
'w', key=self._key)
|
||||||
|
|
||||||
|
# Take snapshot
|
||||||
|
if not await addon.snapshot(addon_file):
|
||||||
_LOGGER.error("Can't make snapshot from %s", addon.slug)
|
_LOGGER.error("Can't make snapshot from %s", addon.slug)
|
||||||
return False
|
return
|
||||||
|
|
||||||
# store to config
|
# Store to config
|
||||||
self._data[ATTR_ADDONS].append({
|
self._data[ATTR_ADDONS].append({
|
||||||
ATTR_SLUG: addon.slug,
|
ATTR_SLUG: addon.slug,
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
ATTR_VERSION: addon.version_installed,
|
ATTR_VERSION: addon.version_installed,
|
||||||
|
ATTR_SIZE: addon_file.size,
|
||||||
})
|
})
|
||||||
|
|
||||||
return True
|
# Run tasks
|
||||||
|
tasks = [_addon_save(addon) for addon in addon_list]
|
||||||
|
if tasks:
|
||||||
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
async def export_addon(self, addon):
|
async def restore_addons(self, addon_list=None):
|
||||||
"""Restore a addon from snapshot."""
|
"""Restore a list add-on from snapshot."""
|
||||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
if not addon_list:
|
||||||
|
addon_list = []
|
||||||
|
for addon_slug in self.addon_list:
|
||||||
|
addon = self._addons.get(addon_slug)
|
||||||
|
if addon:
|
||||||
|
addon_list.append(addon)
|
||||||
|
|
||||||
if not await addon.restore(snapshot_file):
|
async def _addon_restore(addon):
|
||||||
|
"""Task to restore a add-on into snapshot."""
|
||||||
|
addon_file = SecureTarFile(
|
||||||
|
Path(self._tmp.name, f"{addon.slug}.tar.gz"),
|
||||||
|
'r', key=self._key)
|
||||||
|
|
||||||
|
# If exists inside snapshot
|
||||||
|
if not addon_file.path.exists():
|
||||||
|
_LOGGER.error("Can't find snapshot for %s", addon.slug)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Performe a restore
|
||||||
|
if not await addon.restore(addon_file):
|
||||||
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
|
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
|
||||||
return False
|
return
|
||||||
|
|
||||||
return True
|
# Run tasks
|
||||||
|
tasks = [_addon_restore(addon) for addon in addon_list]
|
||||||
|
if tasks:
|
||||||
|
await asyncio.wait(tasks, loop=self._loop)
|
||||||
|
|
||||||
async def store_folders(self, folder_list=None):
|
async def store_folders(self, folder_list=None):
|
||||||
"""Backup hassio data into snapshot."""
|
"""Backup hassio data into snapshot."""
|
||||||
folder_list = folder_list or ALL_FOLDERS
|
folder_list = set(folder_list or ALL_FOLDERS)
|
||||||
|
|
||||||
def _folder_save(name):
|
def _folder_save(name):
|
||||||
"""Intenal function to snapshot a folder."""
|
"""Intenal function to snapshot a folder."""
|
||||||
slug_name = name.replace("/", "_")
|
slug_name = name.replace("/", "_")
|
||||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||||
origin_dir = Path(self._config.path_hassio, name)
|
origin_dir = Path(self._config.path_hassio, name)
|
||||||
|
|
||||||
|
# Check if exsits
|
||||||
|
if not origin_dir.is_dir():
|
||||||
|
_LOGGER.warning("Can't find snapshot folder %s", name)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Take snapshot
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Snapshot folder %s", name)
|
_LOGGER.info("Snapshot folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "w:gz",
|
with SecureTarFile(tar_name, 'w', key=self._key) as tar_file:
|
||||||
compresslevel=1) as tar_file:
|
|
||||||
tar_file.add(origin_dir, arcname=".")
|
tar_file.add(origin_dir, arcname=".")
|
||||||
_LOGGER.info("Snapshot folder %s done", name)
|
|
||||||
|
|
||||||
|
_LOGGER.info("Snapshot folder %s done", name)
|
||||||
self._data[ATTR_FOLDERS].append(name)
|
self._data[ATTR_FOLDERS].append(name)
|
||||||
except (tarfile.TarError, OSError) as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
|
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
|
||||||
|
|
||||||
# run tasks
|
# Run tasks
|
||||||
tasks = [self._loop.run_in_executor(None, _folder_save, folder)
|
tasks = [self._loop.run_in_executor(None, _folder_save, folder)
|
||||||
for folder in folder_list]
|
for folder in folder_list]
|
||||||
if tasks:
|
if tasks:
|
||||||
@@ -307,27 +339,33 @@ class Snapshot(CoreSysAttributes):
|
|||||||
|
|
||||||
async def restore_folders(self, folder_list=None):
|
async def restore_folders(self, folder_list=None):
|
||||||
"""Backup hassio data into snapshot."""
|
"""Backup hassio data into snapshot."""
|
||||||
folder_list = folder_list or ALL_FOLDERS
|
folder_list = set(folder_list or self.folders)
|
||||||
|
|
||||||
def _folder_restore(name):
|
def _folder_restore(name):
|
||||||
"""Intenal function to restore a folder."""
|
"""Intenal function to restore a folder."""
|
||||||
slug_name = name.replace("/", "_")
|
slug_name = name.replace("/", "_")
|
||||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||||
origin_dir = Path(self._config.path_hassio, name)
|
origin_dir = Path(self._config.path_hassio, name)
|
||||||
|
|
||||||
# clean old stuff
|
# Check if exists inside snapshot
|
||||||
|
if not tar_name.exists():
|
||||||
|
_LOGGER.warning("Can't find restore folder %s", name)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Clean old stuff
|
||||||
if origin_dir.is_dir():
|
if origin_dir.is_dir():
|
||||||
remove_folder(origin_dir)
|
remove_folder(origin_dir)
|
||||||
|
|
||||||
|
# Performe a restore
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Restore folder %s", name)
|
_LOGGER.info("Restore folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
with SecureTarFile(tar_name, 'r', key=self._key) as tar_file:
|
||||||
tar_file.extractall(path=origin_dir)
|
tar_file.extractall(path=origin_dir)
|
||||||
_LOGGER.info("Restore folder %s done", name)
|
_LOGGER.info("Restore folder %s done", name)
|
||||||
except (tarfile.TarError, OSError) as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
||||||
|
|
||||||
# run tasks
|
# Run tasks
|
||||||
tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
|
tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
|
||||||
for folder in folder_list]
|
for folder in folder_list]
|
||||||
if tasks:
|
if tasks:
|
||||||
@@ -335,37 +373,43 @@ class Snapshot(CoreSysAttributes):
|
|||||||
|
|
||||||
def store_homeassistant(self):
|
def store_homeassistant(self):
|
||||||
"""Read all data from homeassistant object."""
|
"""Read all data from homeassistant object."""
|
||||||
self.homeassistant_version = self._homeassistant.version
|
self.homeassistant[ATTR_VERSION] = self._homeassistant.version
|
||||||
self.homeassistant_watchdog = self._homeassistant.watchdog
|
self.homeassistant[ATTR_WATCHDOG] = self._homeassistant.watchdog
|
||||||
self.homeassistant_boot = self._homeassistant.boot
|
self.homeassistant[ATTR_BOOT] = self._homeassistant.boot
|
||||||
|
self.homeassistant[ATTR_WAIT_BOOT] = self._homeassistant.wait_boot
|
||||||
|
|
||||||
# custom image
|
# Custom image
|
||||||
if self._homeassistant.is_custom_image:
|
if self._homeassistant.is_custom_image:
|
||||||
self.homeassistant_image = self._homeassistant.image
|
self.homeassistant[ATTR_IMAGE] = self._homeassistant.image
|
||||||
self.homeassistant_last_version = self._homeassistant.last_version
|
self.homeassistant[ATTR_LAST_VERSION] = \
|
||||||
|
self._homeassistant.last_version
|
||||||
|
|
||||||
# api
|
# API/Proxy
|
||||||
self.homeassistant_port = self._homeassistant.api_port
|
self.homeassistant[ATTR_PORT] = self._homeassistant.api_port
|
||||||
self.homeassistant_ssl = self._homeassistant.api_ssl
|
self.homeassistant[ATTR_SSL] = self._homeassistant.api_ssl
|
||||||
self.homeassistant_password = self._homeassistant.api_password
|
self.homeassistant[ATTR_PASSWORD] = \
|
||||||
|
self._encrypt_data(self._homeassistant.api_password)
|
||||||
|
|
||||||
def restore_homeassistant(self):
|
def restore_homeassistant(self):
|
||||||
"""Write all data to homeassistant object."""
|
"""Write all data to homeassistant object."""
|
||||||
self._homeassistant.watchdog = self.homeassistant_watchdog
|
self._homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
|
||||||
self._homeassistant.boot = self.homeassistant_boot
|
self._homeassistant.boot = self.homeassistant[ATTR_BOOT]
|
||||||
|
self._homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
# custom image
|
# Custom image
|
||||||
if self.homeassistant_image:
|
if self.homeassistant.get(ATTR_IMAGE):
|
||||||
self._homeassistant.image = self.homeassistant_image
|
self._homeassistant.image = self.homeassistant[ATTR_IMAGE]
|
||||||
self._homeassistant.last_version = self.homeassistant_last_version
|
self._homeassistant.last_version = \
|
||||||
|
self.homeassistant[ATTR_LAST_VERSION]
|
||||||
|
|
||||||
# api
|
# API/Proxy
|
||||||
self._homeassistant.api_port = self.homeassistant_port
|
self._homeassistant.api_port = self.homeassistant[ATTR_PORT]
|
||||||
self._homeassistant.api_ssl = self.homeassistant_ssl
|
self._homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
|
||||||
self._homeassistant.api_password = self.homeassistant_password
|
self._homeassistant.api_password = \
|
||||||
|
self._decrypt_data(self.homeassistant[ATTR_PASSWORD])
|
||||||
|
|
||||||
# save
|
# save
|
||||||
self._homeassistant.save()
|
self._homeassistant.save_data()
|
||||||
|
|
||||||
def store_repositories(self):
|
def store_repositories(self):
|
||||||
"""Store repository list into snapshot."""
|
"""Store repository list into snapshot."""
|
||||||
|
@@ -1,6 +1,34 @@
|
|||||||
"""Util addons functions."""
|
"""Util addons functions."""
|
||||||
import hashlib
|
import hashlib
|
||||||
import shutil
|
import shutil
|
||||||
|
import re
|
||||||
|
|
||||||
|
RE_DIGITS = re.compile(r"\d+")
|
||||||
|
|
||||||
|
|
||||||
|
def password_to_key(password):
|
||||||
|
"""Generate a AES Key from password."""
|
||||||
|
password = password.encode()
|
||||||
|
for _ in range(100):
|
||||||
|
password = hashlib.sha256(password).digest()
|
||||||
|
return password[:16]
|
||||||
|
|
||||||
|
|
||||||
|
def password_for_validating(password):
|
||||||
|
"""Generate a SHA256 hash from password."""
|
||||||
|
for _ in range(100):
|
||||||
|
password = hashlib.sha256(password.encode()).hexdigest()
|
||||||
|
try:
|
||||||
|
return str(sum(map(int, RE_DIGITS.findall(password))))[0]
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
return "0"
|
||||||
|
|
||||||
|
|
||||||
|
def key_to_iv(key):
|
||||||
|
"""Generate a iv from Key."""
|
||||||
|
for _ in range(100):
|
||||||
|
key = hashlib.sha256(key).digest()
|
||||||
|
return key[:16]
|
||||||
|
|
||||||
|
|
||||||
def create_slug(name, date_str):
|
def create_slug(name, date_str):
|
||||||
|
@@ -5,35 +5,52 @@ import voluptuous as vol
|
|||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE,
|
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE,
|
||||||
ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT,
|
ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, ATTR_SIZE,
|
||||||
ATTR_LAST_VERSION,
|
ATTR_LAST_VERSION, ATTR_WAIT_BOOT, ATTR_PROTECTED, ATTR_CRYPTO,
|
||||||
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
SNAPSHOT_FULL, SNAPSHOT_PARTIAL, CRYPTO_AES128)
|
||||||
from ..validate import NETWORK_PORT
|
from ..validate import NETWORK_PORT, REPOSITORIES, DOCKER_IMAGE
|
||||||
|
|
||||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||||
|
|
||||||
|
|
||||||
|
def unique_addons(addons_list):
|
||||||
|
"""Validate that a add-on is unique."""
|
||||||
|
single = set([addon[ATTR_SLUG] for addon in addons_list])
|
||||||
|
|
||||||
|
if len(single) != len(addons_list):
|
||||||
|
raise vol.Invalid("Invalid addon list on snapshot!")
|
||||||
|
return addons_list
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_SNAPSHOT = vol.Schema({
|
SCHEMA_SNAPSHOT = vol.Schema({
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_HOMEASSISTANT, default={}): vol.Schema({
|
vol.Inclusive(ATTR_PROTECTED, 'encrypted'):
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.All(vol.Coerce(str), vol.Length(min=1, max=1)),
|
||||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
vol.Inclusive(ATTR_CRYPTO, 'encrypted'): CRYPTO_AES128,
|
||||||
vol.Optional(ATTR_LAST_VERSION): vol.Coerce(str),
|
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema({
|
||||||
|
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||||
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WAIT_BOOT, default=600):
|
||||||
|
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||||
}, extra=vol.REMOVE_EXTRA),
|
}, extra=vol.REMOVE_EXTRA),
|
||||||
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
vol.Optional(ATTR_FOLDERS, default=list):
|
||||||
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||||
|
vol.Optional(ATTR_ADDONS, default=list): vol.All([vol.Schema({
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
}, extra=vol.REMOVE_EXTRA)],
|
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
||||||
vol.Optional(ATTR_REPOSITORIES, default=[]): [vol.Url()],
|
}, extra=vol.REMOVE_EXTRA)], unique_addons),
|
||||||
|
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
||||||
}, extra=vol.ALLOW_EXTRA)
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
@@ -21,6 +21,11 @@ class Supervisor(CoreSysAttributes):
|
|||||||
_LOGGER.fatal("Can't setup supervisor docker container!")
|
_LOGGER.fatal("Can't setup supervisor docker container!")
|
||||||
await self.instance.cleanup()
|
await self.instance.cleanup()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def need_update(self):
|
||||||
|
"""Return True if a update is available."""
|
||||||
|
return self.version != self.last_version
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
"""Return version of running homeassistant."""
|
"""Return version of running homeassistant."""
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
"""Multible tasks."""
|
"""Multible tasks."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .coresys import CoreSysAttributes
|
from .coresys import CoreSysAttributes
|
||||||
@@ -22,8 +21,6 @@ class Tasks(CoreSysAttributes):
|
|||||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||||
|
|
||||||
RUN_CLEANUP_API_SESSIONS = 900
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
"""Initialize Tasks."""
|
"""Initialize Tasks."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
@@ -32,7 +29,6 @@ class Tasks(CoreSysAttributes):
|
|||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Add Tasks to scheduler."""
|
"""Add Tasks to scheduler."""
|
||||||
|
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self._scheduler.register_task(
|
||||||
self._update_addons, self.RUN_UPDATE_ADDONS))
|
self._update_addons, self.RUN_UPDATE_ADDONS))
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self._scheduler.register_task(
|
||||||
@@ -54,12 +50,7 @@ class Tasks(CoreSysAttributes):
|
|||||||
self._watchdog_homeassistant_api,
|
self._watchdog_homeassistant_api,
|
||||||
self.RUN_WATCHDOG_HOMEASSISTANT_API))
|
self.RUN_WATCHDOG_HOMEASSISTANT_API))
|
||||||
|
|
||||||
async def _cleanup_sessions(self):
|
_LOGGER.info("All core tasks are scheduled")
|
||||||
"""Cleanup old api sessions."""
|
|
||||||
now = datetime.now()
|
|
||||||
for session, until_valid in self._config.security_sessions.items():
|
|
||||||
if now >= until_valid:
|
|
||||||
self._config.drop_security_session(session)
|
|
||||||
|
|
||||||
async def _update_addons(self):
|
async def _update_addons(self):
|
||||||
"""Check if a update is available of a addon and update it."""
|
"""Check if a update is available of a addon and update it."""
|
||||||
@@ -83,8 +74,7 @@ class Tasks(CoreSysAttributes):
|
|||||||
|
|
||||||
async def _update_supervisor(self):
|
async def _update_supervisor(self):
|
||||||
"""Check and run update of supervisor hassio."""
|
"""Check and run update of supervisor hassio."""
|
||||||
await self._updater.reload()
|
if not self._supervisor.need_update:
|
||||||
if self._supervisor.last_version == self._supervisor.version:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# don't perform a update on beta/dev channel
|
# don't perform a update on beta/dev channel
|
||||||
@@ -108,7 +98,7 @@ class Tasks(CoreSysAttributes):
|
|||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
|
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
|
||||||
await self._homeassistant.run()
|
await self._homeassistant.start()
|
||||||
|
|
||||||
async def _watchdog_homeassistant_api(self):
|
async def _watchdog_homeassistant_api(self):
|
||||||
"""Create scheduler task for montoring running state of API.
|
"""Create scheduler task for montoring running state of API.
|
||||||
|
@@ -89,4 +89,4 @@ class Updater(JsonConfig, CoreSysAttributes):
|
|||||||
# update versions
|
# update versions
|
||||||
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
||||||
self._data[ATTR_HASSIO] = data.get('hassio')
|
self._data[ATTR_HASSIO] = data.get('hassio')
|
||||||
self.save()
|
self.save_data()
|
||||||
|
@@ -8,6 +8,8 @@ import aiohttp
|
|||||||
import async_timeout
|
import async_timeout
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
|
UTC = pytz.utc
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||||
@@ -61,7 +63,7 @@ def parse_datetime(dt_str):
|
|||||||
|
|
||||||
tzinfo = None # type: Optional[dt.tzinfo]
|
tzinfo = None # type: Optional[dt.tzinfo]
|
||||||
if tzinfo_str == 'Z':
|
if tzinfo_str == 'Z':
|
||||||
tzinfo = pytz.utc
|
tzinfo = UTC
|
||||||
elif tzinfo_str is not None:
|
elif tzinfo_str is not None:
|
||||||
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
||||||
offset_hours = int(tzinfo_str[1:3])
|
offset_hours = int(tzinfo_str[1:3])
|
||||||
@@ -74,3 +76,8 @@ def parse_datetime(dt_str):
|
|||||||
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
||||||
kws['tzinfo'] = tzinfo
|
kws['tzinfo'] = tzinfo
|
||||||
return datetime(**kws)
|
return datetime(**kws)
|
||||||
|
|
||||||
|
|
||||||
|
def utcnow():
|
||||||
|
"""Returns current timestamp including timezone."""
|
||||||
|
return datetime.now(UTC)
|
||||||
|
@@ -10,14 +10,9 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
def write_json_file(jsonfile, data):
|
def write_json_file(jsonfile, data):
|
||||||
"""Write a json file."""
|
"""Write a json file."""
|
||||||
try:
|
|
||||||
json_str = json.dumps(data, indent=2)
|
json_str = json.dumps(data, indent=2)
|
||||||
with jsonfile.open('w') as conf_file:
|
with jsonfile.open('w') as conf_file:
|
||||||
conf_file.write(json_str)
|
conf_file.write(json_str)
|
||||||
except (OSError, json.JSONDecodeError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def read_json_file(jsonfile):
|
def read_json_file(jsonfile):
|
||||||
@@ -35,7 +30,18 @@ class JsonConfig(object):
|
|||||||
self._schema = schema
|
self._schema = schema
|
||||||
self._data = {}
|
self._data = {}
|
||||||
|
|
||||||
# init or load data
|
self.read_data()
|
||||||
|
|
||||||
|
def reset_data(self):
|
||||||
|
"""Reset json file to default."""
|
||||||
|
try:
|
||||||
|
self._data = self._schema({})
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
_LOGGER.error("Can't reset %s: %s",
|
||||||
|
self._file, humanize_error(self._data, ex))
|
||||||
|
|
||||||
|
def read_data(self):
|
||||||
|
"""Read json file & validate."""
|
||||||
if self._file.is_file():
|
if self._file.is_file():
|
||||||
try:
|
try:
|
||||||
self._data = read_json_file(self._file)
|
self._data = read_json_file(self._file)
|
||||||
@@ -43,27 +49,33 @@ class JsonConfig(object):
|
|||||||
_LOGGER.warning("Can't read %s", self._file)
|
_LOGGER.warning("Can't read %s", self._file)
|
||||||
self._data = {}
|
self._data = {}
|
||||||
|
|
||||||
# validate
|
# Validate
|
||||||
try:
|
try:
|
||||||
self._data = self._schema(self._data)
|
self._data = self._schema(self._data)
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
_LOGGER.error("Can't parse %s: %s",
|
_LOGGER.error("Can't parse %s: %s",
|
||||||
self._file, humanize_error(self._data, ex))
|
self._file, humanize_error(self._data, ex))
|
||||||
# reset data to default
|
|
||||||
|
# Reset data to default
|
||||||
|
_LOGGER.warning("Reset %s to default", self._file)
|
||||||
self._data = self._schema({})
|
self._data = self._schema({})
|
||||||
|
|
||||||
def save(self):
|
def save_data(self):
|
||||||
"""Store data to config file."""
|
"""Store data to config file."""
|
||||||
# validate
|
# Validate
|
||||||
try:
|
try:
|
||||||
self._data = self._schema(self._data)
|
self._data = self._schema(self._data)
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
_LOGGER.error("Can't parse data: %s",
|
_LOGGER.error("Can't parse data: %s",
|
||||||
humanize_error(self._data, ex))
|
humanize_error(self._data, ex))
|
||||||
return False
|
|
||||||
|
# Load last valid data
|
||||||
|
_LOGGER.warning("Reset %s to last version", self._file)
|
||||||
|
self.read_data()
|
||||||
|
return
|
||||||
|
|
||||||
# write
|
# write
|
||||||
if not write_json_file(self._file, self._data):
|
try:
|
||||||
_LOGGER.error("Can't store config in %s", self._file)
|
write_json_file(self._file, self._data)
|
||||||
return False
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
return True
|
_LOGGER.error("Can't store config in %s: %s", self._file, err)
|
||||||
|
88
hassio/utils/tar.py
Normal file
88
hassio/utils/tar.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
"""Tarfile fileobject handler for encrypted files."""
|
||||||
|
import tarfile
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Random import get_random_bytes
|
||||||
|
from Crypto.Util.Padding import pad
|
||||||
|
|
||||||
|
BLOCK_SIZE = 16
|
||||||
|
|
||||||
|
MOD_READ = 'r'
|
||||||
|
MOD_WRITE = 'w'
|
||||||
|
|
||||||
|
|
||||||
|
class SecureTarFile(object):
|
||||||
|
"""Handle encrypted files for tarfile library."""
|
||||||
|
|
||||||
|
def __init__(self, name, mode, key=None, gzip=True):
|
||||||
|
"""Initialize encryption handler."""
|
||||||
|
self._file = None
|
||||||
|
self._mode = mode
|
||||||
|
self._name = name
|
||||||
|
|
||||||
|
# Tarfile options
|
||||||
|
self._tar = None
|
||||||
|
self._tar_mode = f"{mode}|gz" if gzip else f"{mode}|"
|
||||||
|
|
||||||
|
# Encryption/Decription
|
||||||
|
self._aes = None
|
||||||
|
self._key = key
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Start context manager tarfile."""
|
||||||
|
if not self._key:
|
||||||
|
self._tar = tarfile.open(name=str(self._name), mode=self._tar_mode)
|
||||||
|
return self._tar
|
||||||
|
|
||||||
|
# Encrypted/Decryped Tarfile
|
||||||
|
self._file = self._name.open(f"{self._mode}b")
|
||||||
|
|
||||||
|
# Extract IV for CBC
|
||||||
|
if self._mode == MOD_READ:
|
||||||
|
cbc_rand = self._file.read(16)
|
||||||
|
else:
|
||||||
|
cbc_rand = get_random_bytes(16)
|
||||||
|
self._file.write(cbc_rand)
|
||||||
|
self._aes = AES.new(
|
||||||
|
self._key, AES.MODE_CBC, iv=_generate_iv(self._key, cbc_rand))
|
||||||
|
|
||||||
|
self._tar = tarfile.open(fileobj=self, mode=self._tar_mode)
|
||||||
|
return self._tar
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
"""Close file."""
|
||||||
|
if self._tar:
|
||||||
|
self._tar.close()
|
||||||
|
if self._file:
|
||||||
|
self._file.close()
|
||||||
|
|
||||||
|
def write(self, data):
|
||||||
|
"""Write data."""
|
||||||
|
if len(data) % BLOCK_SIZE != 0:
|
||||||
|
data = pad(data, BLOCK_SIZE)
|
||||||
|
self._file.write(self._aes.encrypt(data))
|
||||||
|
|
||||||
|
def read(self, size=0):
|
||||||
|
"""Read data."""
|
||||||
|
return self._aes.decrypt(self._file.read(size))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
"""Return path object of tarfile."""
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size(self):
|
||||||
|
"""Return snapshot size."""
|
||||||
|
if not self._name.is_file():
|
||||||
|
return 0
|
||||||
|
return round(self._name.stat().st_size / 1048576, 2) # calc mbyte
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_iv(key, salt):
|
||||||
|
"""Generate a iv from data."""
|
||||||
|
temp_iv = key + salt
|
||||||
|
for _ in range(100):
|
||||||
|
temp_iv = hashlib.sha256(temp_iv).digest()
|
||||||
|
return temp_iv[:16]
|
@@ -1,19 +1,23 @@
|
|||||||
"""Validate functions."""
|
"""Validate functions."""
|
||||||
import voluptuous as vol
|
import uuid
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD, ATTR_TOTP,
|
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||||
ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT,
|
||||||
ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG,
|
ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_UUID)
|
||||||
ATTR_WAIT_BOOT)
|
|
||||||
|
|
||||||
|
|
||||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||||
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
||||||
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
||||||
|
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
REPOSITORIES = vol.All([vol.Url()], vol.Unique())
|
||||||
|
|
||||||
|
|
||||||
def validate_timezone(timezone):
|
def validate_timezone(timezone):
|
||||||
@@ -59,13 +63,17 @@ DOCKER_PORTS = vol.Schema({
|
|||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||||
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
||||||
|
vol.Match(r"^[0-9a-f]{32}$"),
|
||||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WAIT_BOOT, default=600):
|
||||||
|
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
@@ -83,12 +91,7 @@ SCHEMA_HASSIO_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[
|
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[
|
||||||
"https://github.com/hassio-addons/repository",
|
"https://github.com/hassio-addons/repository",
|
||||||
]): [vol.Url()],
|
]): REPOSITORIES,
|
||||||
vol.Optional(ATTR_SECURITY, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_TOTP): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_SESSIONS, default={}):
|
|
||||||
vol.Schema({vol.Coerce(str): vol.Coerce(str)}),
|
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
|
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
|
||||||
|
Submodule home-assistant-polymer updated: ea16ebd4f0...1a18ee2755
BIN
misc/hassio.png
BIN
misc/hassio.png
Binary file not shown.
Before Width: | Height: | Size: 42 KiB After Width: | Height: | Size: 37 KiB |
@@ -1 +1 @@
|
|||||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36" version="6.5.6" editor="www.draw.io" type="device"><diagram name="Page-1">5Vptc6M2EP41/ng3gHj9mPiSy820c5n6Q3sfsVBsNTJyhYid/voKkABZkOBY+KYtmYnR6pVn99ld1l6A5e74laX77a80Q2ThOdlxAb4sPC8OY/G/Erw2At9xG8GG4awR9QQr/DeSQkdKS5yhQhvIKSUc73UhpHmOINdkKWP0oA97okTfdZ9ukCFYwZSY0t9xxrdS6oZJ1/GA8GYrt469sOlYp/B5w2iZy/0WHniqr6Z7l6q15IMW2zSjh54I3C3AklHKm7vdcYlIBa2CrZl3P9LbnpuhnE+Z4DUTXlJSInXikIipt09UrCAOyF8lKOFfJVUdn4paZTdigNjtKD5ERw206DtIYKrenLJdSrrJ4m5TfX5fqX3E2Zqtmg4JS7urd9hijlb7FFbtg7A2MWjLd0S03Oo0mJAlJZTVowXYKIRQyAvO6DPq9Tj1Jc+/kutLvF4Q4+g4CqHbKkbYO6I7xNmrGKImJKCZIm09SKRuD53l+Arobc9oQjkulca6aZfuFCZupM6G9QcM/X3LcaW31WvB0e5CNGGG1vF6CE0QggRkrb7sAhhNBNCzAKBvAPiFwmfELkUOokCQ/trI+SZy3hBywAJyoYHcw9JArXaFqJpRUe9MLscQDXN5HQd+4NjB0A8DHcPQxDBwTAgDCxAmBl4oE3FINinjW7qheUruOumtjmgPPXTE/I9K/DkKZPOH6srFwZq+QDV/yBX+RJy/ygiclpwKUbfxL5Tu5RrNUavzvQ20eBxaMihHRTJ4p2yDeM9uTHUwRFKOX/TVLwFX5RK20fXeQDcB3im+deMRMSweALGfBbp/JdCj0Xxi3UX48xIMN6wSjNMEYlXuEXvBhXAJagOm+h7Sovj2fTTBaMXr0aSjMwP3fbdluKflMgybVEN3aFmA4sy347ZAoLstMJB1uPGA33JtRE3Xm4Nbbo9Yyou13NJ4VbuxeUnkqveOHouiK7EIzOO6NHh1dE/iQtc89VyFwIPfVK9YQgCJYBqGSnyPidpzqm5QnpmLCWFvqcFMfrm0qlgvvlZQUm8cvaxJrPLpRjy6wLByU9dxRSmKn6CtLFR3Rd5A/t56HS1/9224ovDKXHE/O3qQ/+zG8aWBfiKtPmjxwLR4d0Sn1i3enyVUSJ30srCJCPYcTk5zpHmb8xQ2Vl+AJXtp+WpPYdeKPa5ZUrjJMpoXhhqLbbqvbveMQlQU73sn3ZVN9lX34qr9fZMTCt07XhiBxANhEHtx7PhgpqRqyJN5bmB6ssSCI1O1nDmJ0rVOHdWlqYAkU59uc7zoXEAAOfWR4vq9Q5WqneE0Wq3Q0FJO6hdSz1ynobKxTm0U7dNMs5PYJCjk1KxYKX6WO9IMALcVOzAUyKdrRB5pgTmmuRiyppzTnRhAqo7btoitVVbrMna3xg3Bm2oup+fRvCvEnpZu5QYWiHxS0wEDNR0wkJBYqciaNJ5AUifSWOq/x1LX5OgUOk5Ity8PgO97LQshEng/L0SqvXsMPBwOpvcmBO+LWg2SiZDQMrs4Tl6FQInuz3xnIKeP5iovgLcLo9K4P5DEn8mRmTLEXqzt3hyaQ3qj0faDNPFNmjTmaz+S+icmc+pN7YVAMP6tjfNQrkcjIUzZ5fQL62uAfkH1Z4d+CThJJ4boN1TdsxLBopnY17f7yGaWOT9lP8i+YAb2TVZjYJDkK+bbuekxFp2QmwUomocevnppvQo94v9LcEpCnaOR5dgU/idjk/m9+G9oX71qUYbReBXl30s+Vf6dgXyi2f0WqlFG93szcPcP</diagram></mxfile>
|
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36" version="7.9.5" editor="www.draw.io" type="device"><diagram name="Page-1" id="535f6c39-9b73-04c2-941c-82630de90f1a">5VrLcqM4FP0aLzsFiOcycefRVTPVqfFippdYKLYmMvII4cd8/QiQDEKQ4Bicnmp7Yevqybk691zJnoH55vDI4u36d5ogMnOs5DADX2eOY1vAER+F5VhZ3DCoDCuGE9moNizwv0j1lNYcJyjTGnJKCcdb3QhpmiLINVvMGN3rzV4o0WfdxitkGBYwJqb1T5zwtbTaflRXPCG8WsupQ8evKpYxfF0xmqdyvpkDXspXVb2J1VjyQbN1nNB9wwTuZ2DOKOXVt81hjkiBrYKt6vfQU3taN0MpH9JB+mkXkxypFftEdL17oWIEsUB+lKD4/+RUVXzJSpfdigZitoP4EBUl0KJuL4EpalPKNjGpO4tvq+Lz+0LNI9ZWTVVVSFhOszr7NeZosY1hUd6L7SYarfmGiJJdrAYTMqeEsrK1ABv5EAp7xhl9RY0aq3zJ9S/k+B14SdMOMY4ODZPE7xHRDeLsKJqo2ghUXeRe9yLp2329c1wF9LqxaXzZLpabdXUaunaY+CJ91u0/YPjvW4oLvy2OGUebC9GECVqGyy40gQ8ikJz8NS6AwUAAnREAdA0Av1L4itilyEHkCdJfGznXRM7pQg6MgJxvIPc0N1ArQyEqehTUO5PLIUTdXF6GnutZ42Do+p6OoW9i6FkmhN4IEEYGXigROiSLlPE1XdE0Jve19U5HtIEeOmD+V2G+CTxZ/KGqUrGwqs5TxR9yhL8R50epwHHOqTDVE/9G6VaO0Qt1RnMG5fKlyvOYrRDXtknxYG+6gyESc7zTBfgScFUuMTa6zhvoRiLxaeFbFp4Rw+IBELsS6O5ngR705hPLWuHPSzBsv0gw2gnEIt8itsOZCAlqAqbqnuIs+/a9N8E4mZe9SUe9Dez3w5YRnuZz369SDT2gJR4KE3ecsAU8PWyBjqzDDjvilj2GatrOFNyyG8RSUezELY1XZRgbSqJMMIPfFqcCYYBEbA4MlfkBE7WKQVyz1WmkQbbgs8gGpolwmhd0J7Tkoy62A9xAzIe6EKWJOZgwNobqTPjn80sc64Sfpl0qHjSSKzHKl1vx6ALDIppdJ2LFKHyBYyWresRyOtL8U3DS0nx3jIjlX5kr9o2l5wI3dhhemg8MpFWDLilNkcaVN9NmjRHAZITal9dnhDuJ4kifNZK5kRAe7tC+awqYs92Jzx922Kdpk2veTHzAgRoIvd4832d9InK52zrx/rjrrqE1pqduk4SmmeGvbB1vi69bRiHKsvd1RhelwarzIF6lcleHAMFSy/EDEDnA90InDC0XTJRFd2mSY3umJkUjSJK6vJsypNWltuRcmtTJsNck2Sgn2/FClez6THF50JQuV2ei9rlJjVDRUnZyGjfnZ45TUdkYp9wUp6cZtk9Ck6CQU/OKUvEz35CqAbgrqIChQD5eIvJMM8wxTUWTJeWcbkQDUlTcnX610K7Sy98t6jFuCV4VfTk9j+b1zXv7rl5OMAKRW5d4oOMSD3SklqNcwZs0HkBSK9BY6r7HUtvk6BA6XkXzztTxQYqofkH8KZIZtZgGA/f7vRm9CcHbrHSDZCIkNE8u1smrECjS45lrdZzOgqnuk8DbN+Fyc3/gOHYmRybK5RtaW58Bq0U6vWo7jCauSRO1WydXUre1ZdrRdDwJBP0/01lP+bJXCWHMLqefX7466OcV73HoF4FWOtFFv67r3FEULJiIfc19H4yZZU5P2WHs867BvsFu9AySPGK+npoefeqE7MRDwTT0cNWh9Sr0CH8VcYp8naPBZdrk/xraZP4R4g+0LY5alGHUf4vy/yWfusifgHyiWP/5rXJG/Q9DcP8f</diagram></mxfile>
|
Binary file not shown.
Before Width: | Height: | Size: 36 KiB |
@@ -1 +0,0 @@
|
|||||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0" version="6.5.8" editor="www.draw.io" type="device"><diagram name="Page-1">5Vxdd5s4EP01fmwOkgCbx9hp2j7sNrvpnnYfiVFsTjDyghwn++tXGMmAxileEB9O+9BjBhjM3GHmzjXKhCw2L58Sf7v+jQU0mmAreJmQmwnGU4eI/zPDa24gyM0NqyQMchMqDPfhv1QaLWndhQFNKwdyxiIebqvGJYtjuuQVm58kbF897JFF1atu/RUFhvulH0Hr9zDg69w6w25h/0zD1VpdGblevufBXz6tEraL5fUmmDwe/uW7N77yJW80XfsB25dM5OOELBLGeP5p87KgURZaFbb8vNs39h6/d0Jjfs4JOD/h2Y928tZvwyTlwnTP/YTLL8lfVWA4fRF+52u+iYQBiY8pT9gTXbCIJcISs1gcOX8Mo0gz+VG4isXmUnwzKuzzZ5rwUIT8Wu7YhEGQXWa+X4ec3m/9ZXbNvcivzCGL+b38Go7aztMGeWIb3rcMRXYV+lIyyTh8omxDefIqDpF7ySw/Q6asKxHaF/gjS9rWJewVkr5MudXRcRF28UFG/jQKBKDwVypipAe/FPUtC2N+uKIznzg3mYUmobhwFtoblvA1W7HYj+4KawcxQhgGyT0Vo5mBINkgSJ/9NB1hkDAiw0XJAVFaiyhdffk6wkDZ7oCBckGg2JbGh1uKs2b2drT0wvXAOGcbsYPGwXXWfDJbxJZPP4uSqK4ryiuZTYNKU4JhK4VFRSChkc/D52rbOhUW6e0uQ7pAwNOeZ1sLbMp2yZLKk8ptRPMjoNMc4aqj/HaBowNIxzs8C7cpwE2ckdLlLgm5uNPbMH5kvaLnDIYenmrPj9sQPuLUODIH3wzCNxVxFtdz/9llrGcexiEvtibkOiNwfpTS7KjpTVtsD085mQd+uqaBPE/slmRilm29hPyH+PzBurIcuf232LauCFH7S5XwxvpZpuQQVDKlyaPfMlNsy60AjK2mmYJrHJnLFA9kip8+ZfsP+WHdfe8+E856/kk/EOqsApOGECJS48gchGqcK2GYUm4Sw8vss7hpoT5GVDlyvM6wg6NhtdGyLQ9ZLAi4G2WF+kHMK+7qULK1gr4VBHTPkkAv6nrJt7b70iFGir1Kj/K4iC6vsWPPUGMHjgzmCxxiq/mS0jQVCfNGvvyvZOk1VxQdQFcWmlbowNRtRQfsMacc0XWNpikHHL2RcgIG/7V0mJxJWyYlFA306lSk5Rv5Jg94oq+mM66egDSqW31xSm16J9OmGTOrcWSwSEF5xMi43xGSA1FL0rTd6NQSODKIJNRvfmfJxodQvmPJGlfZoN2nZo2gEHMZorWDYJQ6UxkR1DsuRLXuN0xw2L8c2brXSGE4Ug+mW6vkHn6gdpqKIbpw7RDcVcc6JtpolGv11I1g3HAcQ+MGcGQQwBOKyBnaNU/E0XhROY4zvn2fGrfKqUZ1wrDK7TSWTXCNI4NJBWWTXOYejb6tiF7fU4jbVIHQpxDgyCB6UF/IZ4Xete3x9GK3aSnXxW3X7kzcPvHrfzdi5SAypVuVKV3itqros1EzhykyxByAoz6FylOvNbx7obI3XqANbNPG70nMahwZrFBQOBizUjkUSZjqM3VTkgAcGYQSihuXoZR5fQobBAobF6KU9RsmqCJcjlLWb6TguD6YUqaSe3h27plSyrzulDJS9ypB70qZeupGwHc9U0oZcGQQwPqf3dsoZflxFy6UkTZlwrBQ5pkSyoAjgzkFf7ovhLLbb1+/3XWfDGfVCnzubGyYCiPLlGAGPRmEESovZcXMCJAX2pqRZUo5Q1Z30hmpW4DRjXSWdYVDLzgcNcu64gVqaSrZRsotEDIlpkFPfapppH6VyftT03ojD/qqvebLjmZ1ngyWLSjCjFlPG4xEIFOCGvRkDky1TPHEy3+iSooiia2TPOLXeRVw5kqeVWoauKtXAW2oSY1U4LQ1noQ9G4SpuwXsGIRptAqnM2ScoPwzZolz0FBBouMvRTvwOT3WQJ2GywJZEHAzHLrgzIpB54wZ2a0Ys32iOaoHaQDGfHyd+rjQXWld7ZfMqwbaQb+E5Kc6s0mVzeDANsR6LNIy1fCJVDt3CUYXw5lWWWyvYaoRp85Tn8OZA8nbH39+WLCAts2YrtZTnVtuWg9Wem1pysXJTAPcsc8DvAmckPyNHM5z9ZbWo5UOgtvw+UWkzpNBOCFJ/ZKvzv7lJiqtPx8LV3l1lXpNp+VIJTaLv/mWo1b8XT3y8T8=</diagram></mxfile>
|
|
19
setup.py
19
setup.py
@@ -40,15 +40,14 @@ setup(
|
|||||||
],
|
],
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
install_requires=[
|
install_requires=[
|
||||||
'async_timeout',
|
'async_timeout==2.0.0',
|
||||||
'aiohttp',
|
'aiohttp==2.3.10',
|
||||||
'docker',
|
'docker==3.1.0',
|
||||||
'colorlog',
|
'colorlog==3.1.2',
|
||||||
'voluptuous',
|
'voluptuous==0.11.1',
|
||||||
'gitpython',
|
'gitpython==2.1.8',
|
||||||
'pyotp',
|
'pytz==2018.3',
|
||||||
'pyqrcode',
|
'pyudev==0.21.0',
|
||||||
'pytz',
|
'pycryptodome==3.4.11'
|
||||||
'pyudev'
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"hassio": "0.80",
|
"hassio": "0.94",
|
||||||
"homeassistant": "0.60.1",
|
"homeassistant": "0.63.3",
|
||||||
"resinos": "1.1",
|
"resinos": "1.1",
|
||||||
"resinhup": "0.3",
|
"resinhup": "0.3",
|
||||||
"generic": "0.3",
|
"generic": "0.3",
|
||||||
|
Reference in New Issue
Block a user