mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-18 13:39:21 +00:00
Compare commits
218 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
8fd1599173 | ||
![]() |
63302b73b0 | ||
![]() |
f591f67a2a | ||
![]() |
cda3184a55 | ||
![]() |
afc811e975 | ||
![]() |
2e169dcb42 | ||
![]() |
34e24e184f | ||
![]() |
2e4751ed7d | ||
![]() |
8c82c467d4 | ||
![]() |
f3f6771534 | ||
![]() |
0a75a4dcbc | ||
![]() |
1a4542fc4e | ||
![]() |
7e0525749e | ||
![]() |
b33b26018d | ||
![]() |
66c93e7176 | ||
![]() |
5674d32bad | ||
![]() |
7a84972770 | ||
![]() |
638f0f5371 | ||
![]() |
dca1b6f1d3 | ||
![]() |
2b0ee109d6 | ||
![]() |
e7430d87d7 | ||
![]() |
9751c1de79 | ||
![]() |
c497167b64 | ||
![]() |
7fb2aca88b | ||
![]() |
0d544845b1 | ||
![]() |
602eb472f9 | ||
![]() |
f22fa46bdb | ||
![]() |
4171a28260 | ||
![]() |
55365a631a | ||
![]() |
547415b30b | ||
![]() |
cbf79f1fab | ||
![]() |
31cc1dce82 | ||
![]() |
8a11e6c845 | ||
![]() |
2df4f80aa5 | ||
![]() |
68566ee9e1 | ||
![]() |
fe04b7ec59 | ||
![]() |
38f96d7ddd | ||
![]() |
2b2edd6e98 | ||
![]() |
361969aca2 | ||
![]() |
e61e7f41f2 | ||
![]() |
75150fd149 | ||
![]() |
bd1c8be1e1 | ||
![]() |
f167197640 | ||
![]() |
f084ecc007 | ||
![]() |
65becbd0ae | ||
![]() |
f38e28a4d9 | ||
![]() |
2998cd94ff | ||
![]() |
79e2f3e8ab | ||
![]() |
13291f52f2 | ||
![]() |
4baa80c3de | ||
![]() |
be28a6b012 | ||
![]() |
d94ada6216 | ||
![]() |
b2d7743e06 | ||
![]() |
40324beb72 | ||
![]() |
c02f6913b3 | ||
![]() |
d56af22d5e | ||
![]() |
1795103086 | ||
![]() |
02e1689dd1 | ||
![]() |
ab4d96331f | ||
![]() |
cb881cba28 | ||
![]() |
44b247f397 | ||
![]() |
8bb43daf91 | ||
![]() |
a7e65613d6 | ||
![]() |
3c04c71401 | ||
![]() |
1353d52bd1 | ||
![]() |
7701457791 | ||
![]() |
b7820bc6a6 | ||
![]() |
df66102de0 | ||
![]() |
4b308d0de1 | ||
![]() |
4448ba886b | ||
![]() |
f39006be01 | ||
![]() |
e5204eef8a | ||
![]() |
1f07d47fd6 | ||
![]() |
ba352abf0b | ||
![]() |
2bf440a744 | ||
![]() |
3b26136636 | ||
![]() |
8249f042c0 | ||
![]() |
84bbaeee5f | ||
![]() |
b7620b7adf | ||
![]() |
5a80be9fd4 | ||
![]() |
a733886803 | ||
![]() |
834fd29fab | ||
![]() |
fd1caf8aa6 | ||
![]() |
975c9e8061 | ||
![]() |
0b3c5885ec | ||
![]() |
711b63e2d0 | ||
![]() |
c7b833b5eb | ||
![]() |
fd472b3084 | ||
![]() |
dcbb6a2160 | ||
![]() |
56fa1550d2 | ||
![]() |
e1f97860ee | ||
![]() |
6ab3fe18d9 | ||
![]() |
7969f3dfd7 | ||
![]() |
6f05b90e4e | ||
![]() |
3aa53d99d7 | ||
![]() |
3525f5a02f | ||
![]() |
04514a9f5c | ||
![]() |
1c915ef4cd | ||
![]() |
b03a2c5c5f | ||
![]() |
64988b285e | ||
![]() |
5c69dca7b3 | ||
![]() |
dfda7dc748 | ||
![]() |
cb7710c23f | ||
![]() |
f9b12a2eb2 | ||
![]() |
6a7617faad | ||
![]() |
05554ccf7e | ||
![]() |
a94e6c5303 | ||
![]() |
d6fc8892db | ||
![]() |
fa9b3b939e | ||
![]() |
70685c41be | ||
![]() |
a3209c4bde | ||
![]() |
f3e60f6c28 | ||
![]() |
7798e7cde2 | ||
![]() |
4af92b9d25 | ||
![]() |
eab958860c | ||
![]() |
09bba96940 | ||
![]() |
a34806d4e2 | ||
![]() |
f00b21dc28 | ||
![]() |
021946e181 | ||
![]() |
6cab017042 | ||
![]() |
5999b48be4 | ||
![]() |
57f3178408 | ||
![]() |
14013ac923 | ||
![]() |
d08343d040 | ||
![]() |
2f9f9c6165 | ||
![]() |
8ab0ed5047 | ||
![]() |
0119b52e11 | ||
![]() |
1382a7b36e | ||
![]() |
2eeb8bf388 | ||
![]() |
5af3040223 | ||
![]() |
47491ca55b | ||
![]() |
b06ce9b6b4 | ||
![]() |
38284e036d | ||
![]() |
27a079742d | ||
![]() |
7f33b3b5aa | ||
![]() |
261bda82db | ||
![]() |
c39d6357f3 | ||
![]() |
d1b30a0e95 | ||
![]() |
6a74893a30 | ||
![]() |
b61d5625fe | ||
![]() |
8d468328f3 | ||
![]() |
cd3b382902 | ||
![]() |
99cf44aacd | ||
![]() |
eaa489abec | ||
![]() |
46f323791d | ||
![]() |
ec72d38220 | ||
![]() |
f5b166a7f0 | ||
![]() |
8afde1e881 | ||
![]() |
f751b0e6fc | ||
![]() |
3809f20c6a | ||
![]() |
68390469df | ||
![]() |
4c122a0630 | ||
![]() |
d06696cd94 | ||
![]() |
8d094d5c70 | ||
![]() |
068c463c98 | ||
![]() |
fc95933098 | ||
![]() |
630137a576 | ||
![]() |
857f346b35 | ||
![]() |
d98b4f039f | ||
![]() |
8fee52da5e | ||
![]() |
0f9ad3658b | ||
![]() |
1155ee07e5 | ||
![]() |
fa687e982e | ||
![]() |
4e902af937 | ||
![]() |
6455ad14a7 | ||
![]() |
4753c058a3 | ||
![]() |
1567cbfe37 | ||
![]() |
3ed66c802e | ||
![]() |
980baf23a8 | ||
![]() |
d69af6a62b | ||
![]() |
863456525f | ||
![]() |
dae49df7b1 | ||
![]() |
282fc03687 | ||
![]() |
f9f7e07c52 | ||
![]() |
12a2ccf0ec | ||
![]() |
a98d76618a | ||
![]() |
7a59e7392b | ||
![]() |
446aff3fa6 | ||
![]() |
3272403141 | ||
![]() |
d1f265da9e | ||
![]() |
4915c935dd | ||
![]() |
e78d935824 | ||
![]() |
934ca64a32 | ||
![]() |
0860e6d202 | ||
![]() |
c3e1c8b58e | ||
![]() |
44e48095c7 | ||
![]() |
a13eb7841d | ||
![]() |
b5701c5878 | ||
![]() |
803eb0f8c9 | ||
![]() |
58c5ed7ba1 | ||
![]() |
c4d7d671d1 | ||
![]() |
9d88255225 | ||
![]() |
bfbc366f55 | ||
![]() |
0f30a23f3e | ||
![]() |
7e1bb42bb7 | ||
![]() |
251a43216e | ||
![]() |
4801b9903c | ||
![]() |
cd5a09938f | ||
![]() |
14bf834224 | ||
![]() |
8aec943a5c | ||
![]() |
d817e75d98 | ||
![]() |
fbd8abdcd5 | ||
![]() |
ca02977505 | ||
![]() |
6533b57c6d | ||
![]() |
0a818282d3 | ||
![]() |
ce2f5f9f7a | ||
![]() |
01f767e66c | ||
![]() |
106ab924e3 | ||
![]() |
d031594bf9 | ||
![]() |
f2f146063b | ||
![]() |
5abe7a3fb9 | ||
![]() |
f592971b6e | ||
![]() |
ed2caa0d81 | ||
![]() |
0b04c90b1f | ||
![]() |
2eac4b8d9b | ||
![]() |
143a358b0c | ||
![]() |
fa049066fc | ||
![]() |
3877dcf355 |
9
.dockerignore
Normal file
9
.dockerignore
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# General files
|
||||||
|
.git
|
||||||
|
.github
|
||||||
|
|
||||||
|
# Test related files
|
||||||
|
.tox
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
**/__pycache__
|
179
API.md
179
API.md
@@ -1,10 +1,11 @@
|
|||||||
# HassIO Server
|
# Hass.io Server
|
||||||
|
|
||||||
## HassIO REST API
|
## Hass.io RESTful API
|
||||||
|
|
||||||
Interface for HomeAssistant to control things from supervisor.
|
Interface for Home Assistant to control things from supervisor.
|
||||||
|
|
||||||
On error:
|
On error:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"result": "error",
|
"result": "error",
|
||||||
@@ -12,7 +13,8 @@ On error:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
On success
|
On success:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"result": "ok",
|
"result": "ok",
|
||||||
@@ -20,10 +22,9 @@ On success
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### HassIO
|
### Hass.io
|
||||||
|
|
||||||
- GET `/supervisor/ping`
|
- GET `/supervisor/ping`
|
||||||
|
|
||||||
- GET `/supervisor/info`
|
- GET `/supervisor/info`
|
||||||
|
|
||||||
The addons from `addons` are only installed one.
|
The addons from `addons` are only installed one.
|
||||||
@@ -40,13 +41,11 @@ The addons from `addons` are only installed one.
|
|||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
"slug": "xy",
|
"slug": "xy",
|
||||||
"description": "description",
|
"description": "description",
|
||||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
|
||||||
"repository": "12345678|null",
|
"repository": "12345678|null",
|
||||||
"version": "LAST_VERSION",
|
"version": "LAST_VERSION",
|
||||||
"installed": "INSTALL_VERSION",
|
"installed": "INSTALL_VERSION",
|
||||||
"detached": "bool",
|
"logo": "bool",
|
||||||
"build": "bool",
|
"state": "started|stopped",
|
||||||
"url": "null|url"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"addons_repositories": [
|
"addons_repositories": [
|
||||||
@@ -55,12 +54,10 @@ The addons from `addons` are only installed one.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- GET `/supervisor/addons`
|
|
||||||
|
|
||||||
Get all available addons. Will be delete soon. Look to `/addons`
|
|
||||||
|
|
||||||
- POST `/supervisor/update`
|
- POST `/supervisor/update`
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "VERSION"
|
"version": "VERSION"
|
||||||
@@ -68,6 +65,7 @@ Optional:
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/supervisor/options`
|
- POST `/supervisor/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"beta_channel": "true|false",
|
"beta_channel": "true|false",
|
||||||
@@ -84,11 +82,12 @@ Reload addons/version.
|
|||||||
|
|
||||||
- GET `/supervisor/logs`
|
- GET `/supervisor/logs`
|
||||||
|
|
||||||
Output the raw docker log
|
Output is the raw docker log.
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
|
|
||||||
- GET `/security/info`
|
- GET `/security/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"initialize": "bool",
|
"initialize": "bool",
|
||||||
@@ -97,6 +96,7 @@ Output the raw docker log
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/security/options`
|
- POST `/security/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"password": "xy"
|
"password": "xy"
|
||||||
@@ -104,6 +104,7 @@ Output the raw docker log
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/security/totp`
|
- POST `/security/totp`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"password": "xy"
|
"password": "xy"
|
||||||
@@ -123,6 +124,7 @@ Return QR-Code
|
|||||||
### Backup/Snapshot
|
### Backup/Snapshot
|
||||||
|
|
||||||
- GET `/snapshots`
|
- GET `/snapshots`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"snapshots": [
|
"snapshots": [
|
||||||
@@ -138,6 +140,7 @@ Return QR-Code
|
|||||||
- POST `/snapshots/reload`
|
- POST `/snapshots/reload`
|
||||||
|
|
||||||
- POST `/snapshots/new/full`
|
- POST `/snapshots/new/full`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"name": "Optional"
|
"name": "Optional"
|
||||||
@@ -145,6 +148,7 @@ Return QR-Code
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/snapshots/new/partial`
|
- POST `/snapshots/new/partial`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"name": "Optional",
|
"name": "Optional",
|
||||||
@@ -156,6 +160,7 @@ Return QR-Code
|
|||||||
- POST `/snapshots/reload`
|
- POST `/snapshots/reload`
|
||||||
|
|
||||||
- GET `/snapshots/{slug}/info`
|
- GET `/snapshots/{slug}/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"slug": "SNAPSHOT ID",
|
"slug": "SNAPSHOT ID",
|
||||||
@@ -180,10 +185,9 @@ Return QR-Code
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/remove`
|
- POST `/snapshots/{slug}/remove`
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/full`
|
- POST `/snapshots/{slug}/restore/full`
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/partial`
|
- POST `/snapshots/{slug}/restore/partial`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"homeassistant": "bool",
|
"homeassistant": "bool",
|
||||||
@@ -193,36 +197,69 @@ Return QR-Code
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Host
|
### Host
|
||||||
|
|
||||||
- POST `/host/reload`
|
- POST `/host/reload`
|
||||||
|
|
||||||
- POST `/host/shutdown`
|
- POST `/host/shutdown`
|
||||||
|
|
||||||
- POST `/host/reboot`
|
- POST `/host/reboot`
|
||||||
|
|
||||||
- GET `/host/info`
|
- GET `/host/info`
|
||||||
See HostControl info command.
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"type": "",
|
"type": "",
|
||||||
"version": "",
|
"version": "",
|
||||||
"last_version": "",
|
"last_version": "",
|
||||||
"features": ["shutdown", "reboot", "update", "network_info", "network_control"],
|
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
|
||||||
"hostname": "",
|
"hostname": "",
|
||||||
"os": ""
|
"os": "",
|
||||||
|
"audio": {
|
||||||
|
"input": "0,0",
|
||||||
|
"output": "0,0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/host/options`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"audio_input": "0,0",
|
||||||
|
"audio_output": "0,0"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- POST `/host/update`
|
- POST `/host/update`
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "VERSION"
|
"version": "VERSION"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- GET `/host/hardware`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"serial": ["/dev/xy"],
|
||||||
|
"input": ["Input device name"],
|
||||||
|
"disk": ["/dev/sdax"],
|
||||||
|
"gpio": ["gpiochip0", "gpiochip100"],
|
||||||
|
"audio": {
|
||||||
|
"CARD_ID": {
|
||||||
|
"name": "xy",
|
||||||
|
"type": "microphone",
|
||||||
|
"devices": {
|
||||||
|
"DEV_ID": "type of device"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### Network
|
### Network
|
||||||
|
|
||||||
- GET `/network/info`
|
- GET `/network/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"hostname": ""
|
"hostname": ""
|
||||||
@@ -230,18 +267,14 @@ Optional:
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/network/options`
|
- POST `/network/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"hostname": "",
|
"hostname": "",
|
||||||
"mode": "dhcp|fixed",
|
|
||||||
"ssid": "",
|
|
||||||
"ip": "",
|
|
||||||
"netmask": "",
|
|
||||||
"gateway": ""
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### HomeAssistant
|
### Home Assistant
|
||||||
|
|
||||||
- GET `/homeassistant/info`
|
- GET `/homeassistant/info`
|
||||||
|
|
||||||
@@ -251,12 +284,18 @@ Optional:
|
|||||||
"last_version": "LAST_VERSION",
|
"last_version": "LAST_VERSION",
|
||||||
"devices": [""],
|
"devices": [""],
|
||||||
"image": "str",
|
"image": "str",
|
||||||
"custom": "bool -> if custom image"
|
"custom": "bool -> if custom image",
|
||||||
|
"boot": "bool",
|
||||||
|
"port": 8123,
|
||||||
|
"ssl": "bool",
|
||||||
|
"watchdog": "bool"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- POST `/homeassistant/update`
|
- POST `/homeassistant/update`
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "VERSION"
|
"version": "VERSION"
|
||||||
@@ -265,26 +304,38 @@ Optional:
|
|||||||
|
|
||||||
- GET `/homeassistant/logs`
|
- GET `/homeassistant/logs`
|
||||||
|
|
||||||
Output the raw docker log
|
Output is the raw Docker log.
|
||||||
|
|
||||||
- POST `/homeassistant/restart`
|
- POST `/homeassistant/restart`
|
||||||
|
- POST `/homeassistant/check`
|
||||||
|
- POST `/homeassistant/start`
|
||||||
|
- POST `/homeassistant/stop`
|
||||||
|
|
||||||
- POST `/homeassistant/options`
|
- POST `/homeassistant/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"devices": [],
|
"devices": [],
|
||||||
"image": "Optional|null",
|
"image": "Optional|null",
|
||||||
"last_version": "Optional for custom image|null"
|
"last_version": "Optional for custom image|null",
|
||||||
|
"port": "port for access hass",
|
||||||
|
"ssl": "bool",
|
||||||
|
"password": "",
|
||||||
|
"watchdog": "bool"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Image with `null` and last_version with `null` reset this options.
|
Image with `null` and last_version with `null` reset this options.
|
||||||
|
|
||||||
### REST API addons
|
- POST/GET `/homeassistant/api`
|
||||||
|
|
||||||
|
Proxy to real home-assistant instance.
|
||||||
|
|
||||||
|
### RESTful for API addons
|
||||||
|
|
||||||
- GET `/addons`
|
- GET `/addons`
|
||||||
|
|
||||||
Get all available addons
|
Get all available addons.
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -299,7 +350,15 @@ Get all available addons
|
|||||||
"installed": "none|INSTALL_VERSION",
|
"installed": "none|INSTALL_VERSION",
|
||||||
"detached": "bool",
|
"detached": "bool",
|
||||||
"build": "bool",
|
"build": "bool",
|
||||||
"url": "null|url"
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
|
"devices": ["/dev/xy"],
|
||||||
|
"url": "null|url",
|
||||||
|
"logo": "bool",
|
||||||
|
"audio": "bool",
|
||||||
|
"gpio": "bool",
|
||||||
|
"stdin": "bool",
|
||||||
|
"hassio_api": "bool",
|
||||||
|
"homeassistant_api": "bool"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"repositories": [
|
"repositories": [
|
||||||
@@ -315,8 +374,8 @@ Get all available addons
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/addons/reload`
|
- POST `/addons/reload`
|
||||||
|
|
||||||
- GET `/addons/{addon}/info`
|
- GET `/addons/{addon}/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
@@ -332,11 +391,25 @@ Get all available addons
|
|||||||
"build": "bool",
|
"build": "bool",
|
||||||
"options": "{}",
|
"options": "{}",
|
||||||
"network": "{}|null",
|
"network": "{}|null",
|
||||||
"host_network": "bool"
|
"host_network": "bool",
|
||||||
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
|
"devices": ["/dev/xy"],
|
||||||
|
"logo": "bool",
|
||||||
|
"hassio_api": "bool",
|
||||||
|
"homeassistant_api": "bool",
|
||||||
|
"stdin": "bool",
|
||||||
|
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||||
|
"gpio": "bool",
|
||||||
|
"audio": "bool",
|
||||||
|
"audio_input": "null|0,0",
|
||||||
|
"audio_output": "null|0,0"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/logo`
|
||||||
|
|
||||||
- POST `/addons/{addon}/options`
|
- POST `/addons/{addon}/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"boot": "auto|manual",
|
"boot": "auto|manual",
|
||||||
@@ -345,44 +418,43 @@ Get all available addons
|
|||||||
"CONTAINER": "port|[ip, port]"
|
"CONTAINER": "port|[ip, port]"
|
||||||
},
|
},
|
||||||
"options": {},
|
"options": {},
|
||||||
|
"audio_output": "null|0,0",
|
||||||
|
"audio_input": "null|0,0"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
For reset custom network settings, set it `null`.
|
For reset custom network/audio settings, set it `null`.
|
||||||
|
|
||||||
- POST `/addons/{addon}/start`
|
- POST `/addons/{addon}/start`
|
||||||
|
|
||||||
- POST `/addons/{addon}/stop`
|
- POST `/addons/{addon}/stop`
|
||||||
|
|
||||||
- POST `/addons/{addon}/install`
|
- POST `/addons/{addon}/install`
|
||||||
Optional:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "VERSION"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/uninstall`
|
- POST `/addons/{addon}/uninstall`
|
||||||
|
|
||||||
- POST `/addons/{addon}/update`
|
- POST `/addons/{addon}/update`
|
||||||
Optional:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "VERSION"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- GET `/addons/{addon}/logs`
|
- GET `/addons/{addon}/logs`
|
||||||
|
|
||||||
Output the raw docker log
|
Output is the raw Docker log.
|
||||||
|
|
||||||
- POST `/addons/{addon}/restart`
|
- POST `/addons/{addon}/restart`
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/rebuild`
|
||||||
|
|
||||||
|
Only supported for local build addons
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/stdin`
|
||||||
|
|
||||||
|
Write data to add-on stdin
|
||||||
|
|
||||||
## Host Control
|
## Host Control
|
||||||
|
|
||||||
Communicate over unix socket with a host daemon.
|
Communicate over UNIX socket with a host daemon.
|
||||||
|
|
||||||
- commands
|
- commands
|
||||||
|
|
||||||
```
|
```
|
||||||
# info
|
# info
|
||||||
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
||||||
@@ -401,7 +473,8 @@ Communicate over unix socket with a host daemon.
|
|||||||
# network int route xy
|
# network int route xy
|
||||||
```
|
```
|
||||||
|
|
||||||
features:
|
Features:
|
||||||
|
|
||||||
- shutdown
|
- shutdown
|
||||||
- reboot
|
- reboot
|
||||||
- update
|
- update
|
||||||
|
23
Dockerfile
Normal file
23
Dockerfile
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
ARG BUILD_FROM
|
||||||
|
FROM $BUILD_FROM
|
||||||
|
|
||||||
|
# add env
|
||||||
|
ENV LANG C.UTF-8
|
||||||
|
|
||||||
|
# setup base
|
||||||
|
RUN apk add --no-cache python3 python3-dev \
|
||||||
|
libressl libressl-dev \
|
||||||
|
libffi libffi-dev \
|
||||||
|
musl musl-dev \
|
||||||
|
gcc libstdc++ \
|
||||||
|
git socat \
|
||||||
|
&& pip3 install --no-cache-dir --upgrade pip \
|
||||||
|
&& pip3 install --no-cache-dir --upgrade cryptography jwcrypto \
|
||||||
|
&& apk del python3-dev libressl-dev libffi-dev musl-dev gcc
|
||||||
|
|
||||||
|
# install HassIO
|
||||||
|
COPY . /usr/src/hassio
|
||||||
|
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||||
|
&& rm -rf /usr/src/hassio
|
||||||
|
|
||||||
|
CMD [ "python3", "-m", "hassio" ]
|
218
LICENSE
218
LICENSE
@@ -1,29 +1,201 @@
|
|||||||
BSD 3-Clause License
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
Copyright (c) 2017, Pascal Vizeli
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
1. Definitions.
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice, this
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
list of conditions and the following disclaimer.
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
* Redistributions in binary form must reproduce the above copyright notice,
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
this list of conditions and the following disclaimer in the documentation
|
the copyright owner that is granting the License.
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
* Neither the name of the copyright holder nor the names of its
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
contributors may be used to endorse or promote products derived from
|
other entities that control, are controlled by, or are under common
|
||||||
this software without specific prior written permission.
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
exercising permissions granted by this License.
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
including but not limited to software source code, documentation
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
source, and configuration files.
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
"Object" form shall mean any form resulting from mechanical
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
transformation or translation of a Source form, including but
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright 2017 Pascal Vizeli
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
@@ -1,13 +1,13 @@
|
|||||||
# HassIO
|
# Hass.io
|
||||||
|
|
||||||
### First private cloud solution for home automation.
|
### First private cloud solution for home automation.
|
||||||
|
|
||||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
[HassIO-Addons](https://github.com/home-assistant/hassio-addons) | [HassIO-Build](https://github.com/home-assistant/hassio-build)
|
- [Hass.io Addons](https://github.com/home-assistant/hassio-addons)
|
||||||
|
- [Hass.io Build](https://github.com/home-assistant/hassio-build)
|
||||||
**HassIO is under active development and is not ready yet for production use.**
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
@@ -13,11 +13,12 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
bootstrap.initialize_logging()
|
bootstrap.initialize_logging()
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
if not bootstrap.check_environment():
|
if not bootstrap.check_environment():
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
# init executor pool
|
||||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||||
loop.set_default_executor(executor)
|
loop.set_default_executor(executor)
|
||||||
|
|
||||||
@@ -27,19 +28,20 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
bootstrap.migrate_system_env(config)
|
bootstrap.migrate_system_env(config)
|
||||||
|
|
||||||
_LOGGER.info("Run Hassio setup")
|
_LOGGER.info("Setup HassIO")
|
||||||
loop.run_until_complete(hassio.setup())
|
loop.run_until_complete(hassio.setup())
|
||||||
|
|
||||||
_LOGGER.info("Start Hassio")
|
|
||||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||||
|
|
||||||
_LOGGER.info("Run Hassio loop")
|
try:
|
||||||
loop.run_forever()
|
_LOGGER.info("Run HassIO")
|
||||||
|
loop.run_forever()
|
||||||
_LOGGER.info("Cleanup system")
|
finally:
|
||||||
executor.shutdown(wait=False)
|
_LOGGER.info("Stopping HassIO")
|
||||||
loop.close()
|
loop.run_until_complete(hassio.stop())
|
||||||
|
executor.shutdown(wait=False)
|
||||||
|
loop.close()
|
||||||
|
|
||||||
_LOGGER.info("Close Hassio")
|
_LOGGER.info("Close Hassio")
|
||||||
sys.exit(hassio.exit_code)
|
sys.exit(0)
|
||||||
|
@@ -15,11 +15,11 @@ BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
|||||||
class AddonManager(object):
|
class AddonManager(object):
|
||||||
"""Manage addons inside HassIO."""
|
"""Manage addons inside HassIO."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock):
|
def __init__(self, config, loop, docker):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.config = config
|
self.config = config
|
||||||
self.dock = dock
|
self.docker = docker
|
||||||
self.data = Data(config)
|
self.data = Data(config)
|
||||||
self.addons = {}
|
self.addons = {}
|
||||||
self.repositories = {}
|
self.repositories = {}
|
||||||
@@ -78,7 +78,7 @@ class AddonManager(object):
|
|||||||
|
|
||||||
# don't add built-in repository to config
|
# don't add built-in repository to config
|
||||||
if url not in BUILTIN_REPOSITORIES:
|
if url not in BUILTIN_REPOSITORIES:
|
||||||
self.config.addons_repositories = url
|
self.config.add_addon_repository(url)
|
||||||
|
|
||||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||||
if tasks:
|
if tasks:
|
||||||
@@ -108,7 +108,7 @@ class AddonManager(object):
|
|||||||
tasks = []
|
tasks = []
|
||||||
for addon_slug in add_addons:
|
for addon_slug in add_addons:
|
||||||
addon = Addon(
|
addon = Addon(
|
||||||
self.config, self.loop, self.dock, self.data, addon_slug)
|
self.config, self.loop, self.docker, self.data, addon_slug)
|
||||||
|
|
||||||
tasks.append(addon.load())
|
tasks.append(addon.load())
|
||||||
self.addons[addon_slug] = addon
|
self.addons[addon_slug] = addon
|
||||||
|
@@ -12,39 +12,43 @@ import voluptuous as vol
|
|||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .validate import (
|
from .validate import (
|
||||||
validate_options, SCHEMA_ADDON_SNAPSHOT, MAP_VOLUME)
|
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME)
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
||||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK)
|
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||||
|
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY)
|
||||||
from .util import check_installed
|
from .util import check_installed
|
||||||
from ..dock.addon import DockerAddon
|
from ..dock.addon import DockerAddon
|
||||||
from ..tools import write_json_file, read_json_file
|
from ..tools import write_json_file, read_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
RE_WEBUI = re.compile(
|
||||||
|
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||||
|
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
|
||||||
|
|
||||||
|
|
||||||
class Addon(object):
|
class Addon(object):
|
||||||
"""Hold data for addon inside HassIO."""
|
"""Hold data for addon inside HassIO."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, data, slug):
|
def __init__(self, config, loop, docker, data, slug):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.config = config
|
self.config = config
|
||||||
self.data = data
|
self.data = data
|
||||||
self._id = slug
|
self._id = slug
|
||||||
|
|
||||||
self.addon_docker = DockerAddon(config, loop, dock, self)
|
self.docker = DockerAddon(config, loop, docker, self)
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Async initialize of object."""
|
"""Async initialize of object."""
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
await self.addon_docker.attach()
|
await self.docker.attach()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def slug(self):
|
def slug(self):
|
||||||
@@ -104,7 +108,7 @@ class Addon(object):
|
|||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
return {
|
return {
|
||||||
**self.data.system[self._id][ATTR_OPTIONS],
|
**self.data.system[self._id][ATTR_OPTIONS],
|
||||||
**self.data.user[self._id][ATTR_OPTIONS],
|
**self.data.user[self._id][ATTR_OPTIONS]
|
||||||
}
|
}
|
||||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||||
|
|
||||||
@@ -130,7 +134,8 @@ class Addon(object):
|
|||||||
@property
|
@property
|
||||||
def auto_update(self):
|
def auto_update(self):
|
||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
|
||||||
|
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
||||||
|
|
||||||
@auto_update.setter
|
@auto_update.setter
|
||||||
def auto_update(self, value):
|
def auto_update(self, value):
|
||||||
@@ -173,8 +178,8 @@ class Addon(object):
|
|||||||
@property
|
@property
|
||||||
def ports(self):
|
def ports(self):
|
||||||
"""Return ports of addon."""
|
"""Return ports of addon."""
|
||||||
if self.network_mode != 'bridge' or ATTR_PORTS not in self._mesh:
|
if self.host_network or ATTR_PORTS not in self._mesh:
|
||||||
return
|
return None
|
||||||
|
|
||||||
if not self.is_installed or \
|
if not self.is_installed or \
|
||||||
ATTR_NETWORK not in self.data.user[self._id]:
|
ATTR_NETWORK not in self.data.user[self._id]:
|
||||||
@@ -197,11 +202,40 @@ class Addon(object):
|
|||||||
self.data.save()
|
self.data.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def network_mode(self):
|
def webui(self):
|
||||||
"""Return network mode of addon."""
|
"""Return URL to webui or None."""
|
||||||
if self._mesh[ATTR_HOST_NETWORK]:
|
if ATTR_WEBUI not in self._mesh:
|
||||||
return 'host'
|
return None
|
||||||
return 'bridge'
|
webui = RE_WEBUI.match(self._mesh[ATTR_WEBUI])
|
||||||
|
|
||||||
|
# extract arguments
|
||||||
|
t_port = webui.group('t_port')
|
||||||
|
t_proto = webui.group('t_proto')
|
||||||
|
s_prefix = webui.group('s_prefix') or ""
|
||||||
|
s_suffix = webui.group('s_suffix') or ""
|
||||||
|
|
||||||
|
# search host port for this docker port
|
||||||
|
if self.ports is None:
|
||||||
|
port = t_port
|
||||||
|
else:
|
||||||
|
port = self.ports.get("{}/tcp".format(t_port), t_port)
|
||||||
|
|
||||||
|
# for interface config or port lists
|
||||||
|
if isinstance(port, (tuple, list)):
|
||||||
|
port = port[-1]
|
||||||
|
|
||||||
|
# lookup the correct protocol from config
|
||||||
|
if t_proto:
|
||||||
|
proto = 'https' if self.options[t_proto] else 'http'
|
||||||
|
else:
|
||||||
|
proto = s_prefix
|
||||||
|
|
||||||
|
return "{}://[HOST]:{}{}".format(proto, port, s_suffix)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_network(self):
|
||||||
|
"""Return True if addon run on host network."""
|
||||||
|
return self._mesh[ATTR_HOST_NETWORK]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def devices(self):
|
def devices(self):
|
||||||
@@ -223,11 +257,86 @@ class Addon(object):
|
|||||||
"""Return list of privilege."""
|
"""Return list of privilege."""
|
||||||
return self._mesh.get(ATTR_PRIVILEGED)
|
return self._mesh.get(ATTR_PRIVILEGED)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def legacy(self):
|
||||||
|
"""Return if the add-on don't support hass labels."""
|
||||||
|
return self._mesh.get(ATTR_LEGACY)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_hassio_api(self):
|
||||||
|
"""Return True if the add-on access to hassio api."""
|
||||||
|
return self._mesh[ATTR_HASSIO_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_homeassistant_api(self):
|
||||||
|
"""Return True if the add-on access to Home-Assistant api proxy."""
|
||||||
|
return self._mesh[ATTR_HOMEASSISTANT_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_stdin(self):
|
||||||
|
"""Return True if the add-on access use stdin input."""
|
||||||
|
return self._mesh[ATTR_STDIN]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_gpio(self):
|
||||||
|
"""Return True if the add-on access to gpio interface."""
|
||||||
|
return self._mesh[ATTR_GPIO]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_audio(self):
|
||||||
|
"""Return True if the add-on access to audio."""
|
||||||
|
return self._mesh[ATTR_AUDIO]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_output(self):
|
||||||
|
"""Return ALSA config for output or None."""
|
||||||
|
if not self.with_audio:
|
||||||
|
return None
|
||||||
|
|
||||||
|
setting = self.config.audio_output
|
||||||
|
if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]:
|
||||||
|
setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||||
|
return setting
|
||||||
|
|
||||||
|
@audio_output.setter
|
||||||
|
def audio_output(self, value):
|
||||||
|
"""Set/remove custom audio output settings."""
|
||||||
|
if value is None:
|
||||||
|
self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||||
|
else:
|
||||||
|
self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||||
|
self.data.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_input(self):
|
||||||
|
"""Return ALSA config for input or None."""
|
||||||
|
if not self.with_audio:
|
||||||
|
return
|
||||||
|
|
||||||
|
setting = self.config.audio_input
|
||||||
|
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
||||||
|
setting = self.data.user[self._id][ATTR_AUDIO_INPUT]
|
||||||
|
return setting
|
||||||
|
|
||||||
|
@audio_input.setter
|
||||||
|
def audio_input(self, value):
|
||||||
|
"""Set/remove custom audio input settings."""
|
||||||
|
if value is None:
|
||||||
|
self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||||
|
else:
|
||||||
|
self.data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||||
|
self.data.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
"""Return url of addon."""
|
"""Return url of addon."""
|
||||||
return self._mesh.get(ATTR_URL)
|
return self._mesh.get(ATTR_URL)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_logo(self):
|
||||||
|
"""Return True if a logo exists."""
|
||||||
|
return self.path_logo.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_arch(self):
|
def supported_arch(self):
|
||||||
"""Return list of supported arch."""
|
"""Return list of supported arch."""
|
||||||
@@ -273,15 +382,20 @@ class Addon(object):
|
|||||||
return PurePath(self.config.path_extern_addons_data, self._id)
|
return PurePath(self.config.path_extern_addons_data, self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_addon_options(self):
|
def path_options(self):
|
||||||
"""Return path to addons options."""
|
"""Return path to addons options."""
|
||||||
return Path(self.path_data, "options.json")
|
return Path(self.path_data, "options.json")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_addon_location(self):
|
def path_location(self):
|
||||||
"""Return path to this addon."""
|
"""Return path to this addon."""
|
||||||
return Path(self._mesh[ATTR_LOCATON])
|
return Path(self._mesh[ATTR_LOCATON])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_logo(self):
|
||||||
|
"""Return path to addon logo."""
|
||||||
|
return Path(self.path_location, 'logo.png')
|
||||||
|
|
||||||
def write_options(self):
|
def write_options(self):
|
||||||
"""Return True if addon options is written to data."""
|
"""Return True if addon options is written to data."""
|
||||||
schema = self.schema
|
schema = self.schema
|
||||||
@@ -289,7 +403,7 @@ class Addon(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
schema(options)
|
schema(options)
|
||||||
return write_json_file(self.path_addon_options, options)
|
return write_json_file(self.path_options, options)
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
|
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
|
||||||
humanize_error(options, ex))
|
humanize_error(options, ex))
|
||||||
@@ -305,7 +419,37 @@ class Addon(object):
|
|||||||
return vol.Schema(dict)
|
return vol.Schema(dict)
|
||||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||||
|
|
||||||
async def install(self, version=None):
|
def test_udpate_schema(self):
|
||||||
|
"""Check if the exists config valid after update."""
|
||||||
|
if not self.is_installed or self.is_detached:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# load next schema
|
||||||
|
new_raw_schema = self.data.cache[self._id][ATTR_SCHEMA]
|
||||||
|
default_options = self.data.cache[self._id][ATTR_OPTIONS]
|
||||||
|
|
||||||
|
# if disabled
|
||||||
|
if isinstance(new_raw_schema, bool):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# merge options
|
||||||
|
options = {
|
||||||
|
**self.data.user[self._id][ATTR_OPTIONS],
|
||||||
|
**default_options,
|
||||||
|
}
|
||||||
|
|
||||||
|
# create voluptuous
|
||||||
|
new_schema = \
|
||||||
|
vol.Schema(vol.All(dict, validate_options(new_raw_schema)))
|
||||||
|
|
||||||
|
# validate
|
||||||
|
try:
|
||||||
|
new_schema(options)
|
||||||
|
except vol.Invalid:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def install(self):
|
||||||
"""Install a addon."""
|
"""Install a addon."""
|
||||||
if self.config.arch not in self.supported_arch:
|
if self.config.arch not in self.supported_arch:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
@@ -321,17 +465,16 @@ class Addon(object):
|
|||||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||||
self.path_data.mkdir()
|
self.path_data.mkdir()
|
||||||
|
|
||||||
version = version or self.last_version
|
if not await self.docker.install(self.last_version):
|
||||||
if not await self.addon_docker.install(version):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._set_install(version)
|
self._set_install(self.last_version)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def uninstall(self):
|
async def uninstall(self):
|
||||||
"""Remove a addon."""
|
"""Remove a addon."""
|
||||||
if not await self.addon_docker.remove():
|
if not await self.docker.remove():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.path_data.is_dir():
|
if self.path_data.is_dir():
|
||||||
@@ -347,45 +490,93 @@ class Addon(object):
|
|||||||
if not self.is_installed:
|
if not self.is_installed:
|
||||||
return STATE_NONE
|
return STATE_NONE
|
||||||
|
|
||||||
if await self.addon_docker.is_running():
|
if await self.docker.is_running():
|
||||||
return STATE_STARTED
|
return STATE_STARTED
|
||||||
return STATE_STOPPED
|
return STATE_STOPPED
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def start(self):
|
def start(self):
|
||||||
"""Set options and start addon."""
|
"""Set options and start addon.
|
||||||
return await self.addon_docker.run()
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.run()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def stop(self):
|
def stop(self):
|
||||||
"""Stop addon."""
|
"""Stop addon.
|
||||||
return await self.addon_docker.stop()
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.stop()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def update(self, version=None):
|
async def update(self):
|
||||||
"""Update addon."""
|
"""Update addon."""
|
||||||
version = version or self.last_version
|
last_state = await self.state()
|
||||||
|
|
||||||
if version == self.version_installed:
|
if self.last_version == self.version_installed:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Addon %s is already installed in %s", self._id, version)
|
"No update available for Addon %s", self._id)
|
||||||
return True
|
|
||||||
|
|
||||||
if not await self.addon_docker.update(version):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._set_update(version)
|
if not await self.docker.update(self.last_version):
|
||||||
|
return False
|
||||||
|
self._set_update(self.last_version)
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == STATE_STARTED:
|
||||||
|
await self.docker.run()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def restart(self):
|
def restart(self):
|
||||||
"""Restart addon."""
|
"""Restart addon.
|
||||||
return await self.addon_docker.restart()
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.restart()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def logs(self):
|
def logs(self):
|
||||||
"""Return addons log output."""
|
"""Return addons log output.
|
||||||
return await self.addon_docker.logs()
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.logs()
|
||||||
|
|
||||||
|
@check_installed
|
||||||
|
async def rebuild(self):
|
||||||
|
"""Performe a rebuild of local build addon."""
|
||||||
|
last_state = await self.state()
|
||||||
|
|
||||||
|
if not self.need_build:
|
||||||
|
_LOGGER.error("Can't rebuild a none local build addon!")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# remove docker container but not addon config
|
||||||
|
if not await self.docker.remove():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not await self.docker.install(self.version_installed):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == STATE_STARTED:
|
||||||
|
await self.docker.run()
|
||||||
|
return True
|
||||||
|
|
||||||
|
@check_installed
|
||||||
|
async def write_stdin(self, data):
|
||||||
|
"""Write data to add-on stdin.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
if not self.with_stdin:
|
||||||
|
_LOGGER.error("Add-on don't support write to stdin!")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return await self.docker.write_stdin(data)
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def snapshot(self, tar_file):
|
async def snapshot(self, tar_file):
|
||||||
@@ -393,7 +584,7 @@ class Addon(object):
|
|||||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||||
# store local image
|
# store local image
|
||||||
if self.need_build and not await \
|
if self.need_build and not await \
|
||||||
self.addon_docker.export_image(Path(temp, "image.tar")):
|
self.docker.export_image(Path(temp, "image.tar")):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
@@ -417,11 +608,13 @@ class Addon(object):
|
|||||||
snapshot.add(self.path_data, arcname="data")
|
snapshot.add(self.path_data, arcname="data")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
_LOGGER.info("Build snapshot for addon %s", self._id)
|
||||||
await self.loop.run_in_executor(None, _create_tar)
|
await self.loop.run_in_executor(None, _create_tar)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
|
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
_LOGGER.info("Finish snapshot for addon %s", self._id)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def restore(self, tar_file):
|
async def restore(self, tar_file):
|
||||||
@@ -458,15 +651,15 @@ class Addon(object):
|
|||||||
|
|
||||||
# check version / restore image
|
# check version / restore image
|
||||||
version = data[ATTR_VERSION]
|
version = data[ATTR_VERSION]
|
||||||
if version != self.addon_docker.version:
|
if version != self.docker.version:
|
||||||
image_file = Path(temp, "image.tar")
|
image_file = Path(temp, "image.tar")
|
||||||
if image_file.is_file():
|
if image_file.is_file():
|
||||||
await self.addon_docker.import_image(image_file, version)
|
await self.docker.import_image(image_file, version)
|
||||||
else:
|
else:
|
||||||
if await self.addon_docker.install(version):
|
if await self.docker.install(version):
|
||||||
await self.addon_docker.cleanup()
|
await self.docker.cleanup()
|
||||||
else:
|
else:
|
||||||
await self.addon_docker.stop()
|
await self.docker.stop()
|
||||||
|
|
||||||
# restore data
|
# restore data
|
||||||
def _restore_data():
|
def _restore_data():
|
||||||
@@ -476,6 +669,7 @@ class Addon(object):
|
|||||||
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
_LOGGER.info("Restore data for addon %s", self._id)
|
||||||
await self.loop.run_in_executor(None, _restore_data)
|
await self.loop.run_in_executor(None, _restore_data)
|
||||||
except shutil.Error as err:
|
except shutil.Error as err:
|
||||||
_LOGGER.error("Can't restore origin data -> %s", err)
|
_LOGGER.error("Can't restore origin data -> %s", err)
|
||||||
@@ -485,4 +679,5 @@ class Addon(object):
|
|||||||
if data[ATTR_STATE] == STATE_STARTED:
|
if data[ATTR_STATE] == STATE_STARTED:
|
||||||
return await self.start()
|
return await self.start()
|
||||||
|
|
||||||
|
_LOGGER.info("Finish restore for addon %s", self._id)
|
||||||
return True
|
return True
|
||||||
|
65
hassio/addons/build.py
Normal file
65
hassio/addons/build.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
"""HassIO addons build environment."""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from .validate import SCHEMA_BUILD_CONFIG
|
||||||
|
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
|
||||||
|
from ..tools import JsonConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AddonBuild(JsonConfig):
|
||||||
|
"""Handle build options for addons."""
|
||||||
|
|
||||||
|
def __init__(self, config, addon):
|
||||||
|
"""Initialize addon builder."""
|
||||||
|
self.config = config
|
||||||
|
self.addon = addon
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
Path(addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""Ignore save function."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base_image(self):
|
||||||
|
"""Base images for this addon."""
|
||||||
|
return self._data[ATTR_BUILD_FROM][self.config.arch]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def squash(self):
|
||||||
|
"""Return True or False if squash is active."""
|
||||||
|
return self._data[ATTR_SQUASH]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def additional_args(self):
|
||||||
|
"""Return additional docker build arguments."""
|
||||||
|
return self._data[ATTR_ARGS]
|
||||||
|
|
||||||
|
def get_docker_args(self, version):
|
||||||
|
"""Create a dict with docker build arguments."""
|
||||||
|
args = {
|
||||||
|
'path': str(self.addon.path_location),
|
||||||
|
'tag': "{}:{}".format(self.addon.image, version),
|
||||||
|
'pull': True,
|
||||||
|
'forcerm': True,
|
||||||
|
'squash': self.squash,
|
||||||
|
'labels': {
|
||||||
|
'io.hass.version': version,
|
||||||
|
'io.hass.arch': self.config.arch,
|
||||||
|
'io.hass.type': META_ADDON,
|
||||||
|
'io.hass.name': self.addon.name,
|
||||||
|
'io.hass.description': self.addon.description,
|
||||||
|
},
|
||||||
|
'buildargs': {
|
||||||
|
'BUILD_FROM': self.base_image,
|
||||||
|
'BUILD_VERSION': version,
|
||||||
|
'BUILD_ARCH': self.config.arch,
|
||||||
|
**self.additional_args,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.addon.url:
|
||||||
|
args['labels']['io.hass.url'] = self.addon.url
|
||||||
|
|
||||||
|
return args
|
@@ -3,15 +3,13 @@ import copy
|
|||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .util import extract_hash_from_path
|
from .util import extract_hash_from_path
|
||||||
from .validate import (
|
from .validate import (
|
||||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG,
|
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||||
MAP_VOLUME)
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||||
@@ -19,8 +17,6 @@ from ..tools import JsonConfig, read_json_file
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
|
||||||
|
|
||||||
|
|
||||||
class Data(JsonConfig):
|
class Data(JsonConfig):
|
||||||
"""Hold data for addons inside HassIO."""
|
"""Hold data for addons inside HassIO."""
|
||||||
@@ -118,7 +114,7 @@ class Data(JsonConfig):
|
|||||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||||
self._cache[addon_slug] = addon_config
|
self._cache[addon_slug] = addon_config
|
||||||
|
|
||||||
except OSError:
|
except (OSError, json.JSONDecodeError):
|
||||||
_LOGGER.warning("Can't read %s", addon)
|
_LOGGER.warning("Can't read %s", addon)
|
||||||
|
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
"""Init file for HassIO addons git."""
|
"""Init file for HassIO addons git."""
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import functools as ft
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
@@ -48,7 +49,9 @@ class GitRepo(object):
|
|||||||
try:
|
try:
|
||||||
_LOGGER.info("Clone addon %s repository", self.url)
|
_LOGGER.info("Clone addon %s repository", self.url)
|
||||||
self.repo = await self.loop.run_in_executor(
|
self.repo = await self.loop.run_in_executor(
|
||||||
None, git.Repo.clone_from, self.url, str(self.path))
|
None, ft.partial(
|
||||||
|
git.Repo.clone_from, self.url, str(self.path),
|
||||||
|
recursive=True))
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||||
git.GitCommandError) as err:
|
git.GitCommandError) as err:
|
||||||
@@ -70,7 +73,7 @@ class GitRepo(object):
|
|||||||
None, self.repo.remotes.origin.pull)
|
None, self.repo.remotes.origin.pull)
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||||
git.exc.GitCommandError) as err:
|
git.GitCommandError) as err:
|
||||||
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@@ -1,19 +1,27 @@
|
|||||||
"""Validate addons options schema."""
|
"""Validate addons options schema."""
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||||
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER,
|
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE,
|
||||||
STARTUP_BEFORE, STARTUP_INITIALIZE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA,
|
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE,
|
||||||
ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER, ATTR_ARCH, ATTR_DEVICES,
|
BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER,
|
||||||
ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64,
|
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||||
ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_USER, ATTR_STATE, ATTR_SYSTEM,
|
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||||
STATE_STARTED, STATE_STOPPED, ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT,
|
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||||
ATTR_NETWORK, ATTR_AUTO_UPDATE)
|
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
||||||
from ..validate import NETWORK_PORT, DOCKER_PORTS
|
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||||
|
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY)
|
||||||
|
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
||||||
|
|
||||||
V_STR = 'str'
|
V_STR = 'str'
|
||||||
V_INT = 'int'
|
V_INT = 'int'
|
||||||
@@ -22,17 +30,50 @@ V_BOOL = 'bool'
|
|||||||
V_EMAIL = 'email'
|
V_EMAIL = 'email'
|
||||||
V_URL = 'url'
|
V_URL = 'url'
|
||||||
V_PORT = 'port'
|
V_PORT = 'port'
|
||||||
|
V_MATCH = 'match'
|
||||||
|
|
||||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL, V_PORT])
|
RE_SCHEMA_ELEMENT = re.compile(
|
||||||
|
r"^(?:"
|
||||||
|
r"|str|bool|email|url|port"
|
||||||
|
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||||
|
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||||
|
r"|match\((?P<match>.*)\)"
|
||||||
|
r")\??$"
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||||
|
|
||||||
ARCH_ALL = [
|
ARCH_ALL = [
|
||||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
||||||
]
|
]
|
||||||
|
|
||||||
PRIVILEGE_ALL = [
|
STARTUP_ALL = [
|
||||||
"NET_ADMIN"
|
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||||
|
STARTUP_APPLICATION
|
||||||
]
|
]
|
||||||
|
|
||||||
|
PRIVILEGED_ALL = [
|
||||||
|
"NET_ADMIN",
|
||||||
|
"SYS_ADMIN",
|
||||||
|
"SYS_RAWIO"
|
||||||
|
]
|
||||||
|
|
||||||
|
BASE_IMAGE = {
|
||||||
|
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||||
|
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
||||||
|
ARCH_I386: "homeassistant/i386-base:latest",
|
||||||
|
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _simple_startup(value):
|
||||||
|
"""Simple startup schema."""
|
||||||
|
if value == "before":
|
||||||
|
return STARTUP_SERVICES
|
||||||
|
if value == "after":
|
||||||
|
return STARTUP_APPLICATION
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||||
@@ -43,28 +84,40 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_URL): vol.Url(),
|
vol.Optional(ATTR_URL): vol.Url(),
|
||||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||||
vol.Required(ATTR_STARTUP):
|
vol.Required(ATTR_STARTUP):
|
||||||
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE,
|
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||||
STARTUP_INITIALIZE]),
|
|
||||||
vol.Required(ATTR_BOOT):
|
vol.Required(ATTR_BOOT):
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||||
|
vol.Optional(ATTR_WEBUI):
|
||||||
|
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||||
vol.Optional(ATTR_TMPFS):
|
vol.Optional(ATTR_TMPFS):
|
||||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
|
vol.Optional(ATTR_MAP, default=[]): [vol.Match(RE_VOLUME)],
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGE_ALL)],
|
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||||
|
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
vol.Required(ATTR_OPTIONS): dict,
|
||||||
vol.Required(ATTR_SCHEMA): vol.Any({
|
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
vol.Any(
|
||||||
])
|
SCHEMA_ELEMENT,
|
||||||
}, False),
|
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
),
|
||||||
|
], vol.Schema({
|
||||||
|
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||||
|
}))
|
||||||
|
}), False),
|
||||||
|
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"),
|
||||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
||||||
}, extra=vol.ALLOW_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
@@ -72,17 +125,31 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
|||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_URL): vol.Url(),
|
vol.Optional(ATTR_URL): vol.Url(),
|
||||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||||
}, extra=vol.ALLOW_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||||
|
vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({
|
||||||
|
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
||||||
|
}),
|
||||||
|
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_ARGS, default={}): vol.Schema({
|
||||||
|
vol.Coerce(str): vol.Coerce(str)
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_ADDON_USER = vol.Schema({
|
SCHEMA_ADDON_USER = vol.Schema({
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_BOOT):
|
vol.Optional(ATTR_BOOT):
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
@@ -118,14 +185,19 @@ def validate_options(raw_schema):
|
|||||||
|
|
||||||
# read options
|
# read options
|
||||||
for key, value in struct.items():
|
for key, value in struct.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
if key not in raw_schema:
|
if key not in raw_schema:
|
||||||
raise vol.Invalid("Unknown options {}.".format(key))
|
_LOGGER.warning("Unknown options %s", key)
|
||||||
|
continue
|
||||||
|
|
||||||
typ = raw_schema[key]
|
typ = raw_schema[key]
|
||||||
try:
|
try:
|
||||||
if isinstance(typ, list):
|
if isinstance(typ, list):
|
||||||
# nested value
|
# nested value list
|
||||||
options[key] = _nested_validate(typ[0], value, key)
|
options[key] = _nested_validate_list(typ[0], value, key)
|
||||||
|
elif isinstance(typ, dict):
|
||||||
|
# nested value dict
|
||||||
|
options[key] = _nested_validate_dict(typ, value, key)
|
||||||
else:
|
else:
|
||||||
# normal value
|
# normal value
|
||||||
options[key] = _single_validate(typ, value, key)
|
options[key] = _single_validate(typ, value, key)
|
||||||
@@ -133,6 +205,7 @@ def validate_options(raw_schema):
|
|||||||
raise vol.Invalid(
|
raise vol.Invalid(
|
||||||
"Type error for {}.".format(key)) from None
|
"Type error for {}.".format(key)) from None
|
||||||
|
|
||||||
|
_check_missing_options(raw_schema, options, 'root')
|
||||||
return options
|
return options
|
||||||
|
|
||||||
return validate
|
return validate
|
||||||
@@ -141,49 +214,82 @@ def validate_options(raw_schema):
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
def _single_validate(typ, value, key):
|
def _single_validate(typ, value, key):
|
||||||
"""Validate a single element."""
|
"""Validate a single element."""
|
||||||
try:
|
# if required argument
|
||||||
# if required argument
|
if value is None:
|
||||||
if value is None:
|
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
|
||||||
|
|
||||||
if typ == V_STR:
|
# parse extend data from type
|
||||||
return str(value)
|
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||||
elif typ == V_INT:
|
|
||||||
return int(value)
|
|
||||||
elif typ == V_FLOAT:
|
|
||||||
return float(value)
|
|
||||||
elif typ == V_BOOL:
|
|
||||||
return vol.Boolean()(value)
|
|
||||||
elif typ == V_EMAIL:
|
|
||||||
return vol.Email()(value)
|
|
||||||
elif typ == V_URL:
|
|
||||||
return vol.Url()(value)
|
|
||||||
elif typ == V_PORT:
|
|
||||||
return NETWORK_PORT(value)
|
|
||||||
|
|
||||||
raise vol.Invalid("Fatal error for {} type {}.".format(key, typ))
|
# prepare range
|
||||||
except ValueError:
|
range_args = {}
|
||||||
raise vol.Invalid(
|
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
||||||
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
|
group_value = match.group(group_name)
|
||||||
|
if group_value:
|
||||||
|
range_args[group_name[2:]] = float(group_value)
|
||||||
|
|
||||||
|
if typ.startswith(V_STR):
|
||||||
|
return str(value)
|
||||||
|
elif typ.startswith(V_INT):
|
||||||
|
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||||
|
elif typ.startswith(V_FLOAT):
|
||||||
|
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||||
|
elif typ.startswith(V_BOOL):
|
||||||
|
return vol.Boolean()(value)
|
||||||
|
elif typ.startswith(V_EMAIL):
|
||||||
|
return vol.Email()(value)
|
||||||
|
elif typ.startswith(V_URL):
|
||||||
|
return vol.Url()(value)
|
||||||
|
elif typ.startswith(V_PORT):
|
||||||
|
return NETWORK_PORT(value)
|
||||||
|
elif typ.startswith(V_MATCH):
|
||||||
|
return vol.Match(match.group('match'))(str(value))
|
||||||
|
|
||||||
|
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||||
|
|
||||||
|
|
||||||
def _nested_validate(typ, data_list, key):
|
def _nested_validate_list(typ, data_list, key):
|
||||||
"""Validate nested items."""
|
"""Validate nested items."""
|
||||||
options = []
|
options = []
|
||||||
|
|
||||||
for element in data_list:
|
for element in data_list:
|
||||||
# dict list
|
# Nested?
|
||||||
if isinstance(typ, dict):
|
if isinstance(typ, dict):
|
||||||
c_options = {}
|
c_options = _nested_validate_dict(typ, element, key)
|
||||||
for c_key, c_value in element.items():
|
|
||||||
if c_key not in typ:
|
|
||||||
raise vol.Invalid(
|
|
||||||
"Unknown nested options {}.".format(c_key))
|
|
||||||
|
|
||||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
|
||||||
options.append(c_options)
|
options.append(c_options)
|
||||||
# normal list
|
|
||||||
else:
|
else:
|
||||||
options.append(_single_validate(typ, element, key))
|
options.append(_single_validate(typ, element, key))
|
||||||
|
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
def _nested_validate_dict(typ, data_dict, key):
|
||||||
|
"""Validate nested items."""
|
||||||
|
options = {}
|
||||||
|
|
||||||
|
for c_key, c_value in data_dict.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
|
if c_key not in typ:
|
||||||
|
_LOGGER.warning("Unknown options %s", c_key)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Nested?
|
||||||
|
if isinstance(typ[c_key], list):
|
||||||
|
options[c_key] = _nested_validate_list(typ[c_key][0],
|
||||||
|
c_value, c_key)
|
||||||
|
else:
|
||||||
|
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||||
|
|
||||||
|
_check_missing_options(typ, options, key)
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
def _check_missing_options(origin, exists, root):
|
||||||
|
"""Check if all options are exists."""
|
||||||
|
missing = set(origin) - set(exists)
|
||||||
|
for miss_opt in missing:
|
||||||
|
if isinstance(origin[miss_opt], str) and \
|
||||||
|
origin[miss_opt].endswith("?"):
|
||||||
|
continue
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Missing option {} in {}".format(miss_opt, root))
|
||||||
|
@@ -28,14 +28,16 @@ class RestAPI(object):
|
|||||||
self._handler = None
|
self._handler = None
|
||||||
self.server = None
|
self.server = None
|
||||||
|
|
||||||
def register_host(self, host_control):
|
def register_host(self, host_control, hardware):
|
||||||
"""Register hostcontrol function."""
|
"""Register hostcontrol function."""
|
||||||
api_host = APIHost(self.config, self.loop, host_control)
|
api_host = APIHost(self.config, self.loop, host_control, hardware)
|
||||||
|
|
||||||
self.webapp.router.add_get('/host/info', api_host.info)
|
self.webapp.router.add_get('/host/info', api_host.info)
|
||||||
|
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||||
self.webapp.router.add_post('/host/update', api_host.update)
|
self.webapp.router.add_post('/host/update', api_host.update)
|
||||||
|
self.webapp.router.add_post('/host/options', api_host.options)
|
||||||
|
|
||||||
def register_network(self, host_control):
|
def register_network(self, host_control):
|
||||||
"""Register network function."""
|
"""Register network function."""
|
||||||
@@ -45,16 +47,14 @@ class RestAPI(object):
|
|||||||
self.webapp.router.add_post('/network/options', api_net.options)
|
self.webapp.router.add_post('/network/options', api_net.options)
|
||||||
|
|
||||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||||
websession):
|
updater):
|
||||||
"""Register supervisor function."""
|
"""Register supervisor function."""
|
||||||
api_supervisor = APISupervisor(
|
api_supervisor = APISupervisor(
|
||||||
self.config, self.loop, supervisor, snapshots, addons,
|
self.config, self.loop, supervisor, snapshots, addons,
|
||||||
host_control, websession)
|
host_control, updater)
|
||||||
|
|
||||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||||
self.webapp.router.add_get(
|
|
||||||
'/supervisor/addons', api_supervisor.available_addons)
|
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/supervisor/update', api_supervisor.update)
|
'/supervisor/update', api_supervisor.update)
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
@@ -68,10 +68,17 @@ class RestAPI(object):
|
|||||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||||
|
|
||||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||||
|
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop)
|
||||||
|
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
||||||
|
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||||
|
self.webapp.router.add_post(
|
||||||
|
'/homeassistant/api/{path:.+}', api_hass.api)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api/{path:.+}', api_hass.api)
|
||||||
|
|
||||||
def register_addons(self, addons):
|
def register_addons(self, addons):
|
||||||
"""Register homeassistant function."""
|
"""Register homeassistant function."""
|
||||||
@@ -93,7 +100,11 @@ class RestAPI(object):
|
|||||||
'/addons/{addon}/update', api_addons.update)
|
'/addons/{addon}/update', api_addons.update)
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/addons/{addon}/options', api_addons.options)
|
'/addons/{addon}/options', api_addons.options)
|
||||||
|
self.webapp.router.add_post(
|
||||||
|
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||||
|
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||||
|
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
||||||
|
|
||||||
def register_security(self):
|
def register_security(self):
|
||||||
"""Register security function."""
|
"""Register security function."""
|
||||||
@@ -155,5 +166,5 @@ class RestAPI(object):
|
|||||||
await self.webapp.shutdown()
|
await self.webapp.shutdown()
|
||||||
|
|
||||||
if self._handler:
|
if self._handler:
|
||||||
await self._handler.finish_connections(60)
|
await self._handler.shutdown(60)
|
||||||
await self.webapp.cleanup()
|
await self.webapp.cleanup()
|
||||||
|
@@ -11,7 +11,10 @@ from ..const import (
|
|||||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
||||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||||
ATTR_INSTALLED, BOOT_AUTO, BOOT_MANUAL)
|
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||||
|
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||||
|
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||||
|
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
||||||
from ..validate import DOCKER_PORTS
|
from ..validate import DOCKER_PORTS
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -48,6 +51,14 @@ class APIAddons(object):
|
|||||||
|
|
||||||
return addon
|
return addon
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _pretty_devices(addon):
|
||||||
|
"""Return a simplified device list."""
|
||||||
|
dev_list = addon.devices
|
||||||
|
if not dev_list:
|
||||||
|
return
|
||||||
|
return [row.split(':')[0] for row in dev_list]
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Return all addons / repositories ."""
|
"""Return all addons / repositories ."""
|
||||||
@@ -63,7 +74,15 @@ class APIAddons(object):
|
|||||||
ATTR_DETACHED: addon.is_detached,
|
ATTR_DETACHED: addon.is_detached,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
|
ATTR_DEVICES: self._pretty_devices(addon),
|
||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
ATTR_STDIN: addon.with_stdin,
|
||||||
|
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||||
|
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||||
|
ATTR_AUDIO: addon.with_audio,
|
||||||
|
ATTR_GPIO: addon.with_gpio,
|
||||||
})
|
})
|
||||||
|
|
||||||
data_repositories = []
|
data_repositories = []
|
||||||
@@ -82,9 +101,10 @@ class APIAddons(object):
|
|||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def reload(self, request):
|
async def reload(self, request):
|
||||||
"""Reload all addons data."""
|
"""Reload all addons data."""
|
||||||
return self.addons.reload()
|
await asyncio.shield(self.addons.reload(), loop=self.loop)
|
||||||
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
@@ -105,7 +125,18 @@ class APIAddons(object):
|
|||||||
ATTR_DETACHED: addon.is_detached,
|
ATTR_DETACHED: addon.is_detached,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
ATTR_NETWORK: addon.ports,
|
ATTR_NETWORK: addon.ports,
|
||||||
ATTR_HOST_NETWORK: addon.network_mode == 'host',
|
ATTR_HOST_NETWORK: addon.host_network,
|
||||||
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
|
ATTR_DEVICES: self._pretty_devices(addon),
|
||||||
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
ATTR_WEBUI: addon.webui,
|
||||||
|
ATTR_STDIN: addon.with_stdin,
|
||||||
|
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||||
|
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||||
|
ATTR_GPIO: addon.with_gpio,
|
||||||
|
ATTR_AUDIO: addon.with_audio,
|
||||||
|
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||||
|
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -127,27 +158,27 @@ class APIAddons(object):
|
|||||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||||
if ATTR_NETWORK in body:
|
if ATTR_NETWORK in body:
|
||||||
addon.ports = body[ATTR_NETWORK]
|
addon.ports = body[ATTR_NETWORK]
|
||||||
|
if ATTR_AUDIO_INPUT in body:
|
||||||
|
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||||
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
|
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def install(self, request):
|
def install(self, request):
|
||||||
"""Install addon."""
|
"""Install addon."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
version = body.get(ATTR_VERSION)
|
return asyncio.shield(addon.install(), loop=self.loop)
|
||||||
|
|
||||||
return await asyncio.shield(
|
|
||||||
addon.install(version=version), loop=self.loop)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def uninstall(self, request):
|
def uninstall(self, request):
|
||||||
"""Uninstall addon."""
|
"""Uninstall addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return await asyncio.shield(addon.uninstall(), loop=self.loop)
|
return asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def start(self, request):
|
def start(self, request):
|
||||||
"""Start addon."""
|
"""Start addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
@@ -158,32 +189,61 @@ class APIAddons(object):
|
|||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
raise RuntimeError(humanize_error(options, ex)) from None
|
raise RuntimeError(humanize_error(options, ex)) from None
|
||||||
|
|
||||||
return await asyncio.shield(addon.start(), loop=self.loop)
|
return asyncio.shield(addon.start(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stop(self, request):
|
def stop(self, request):
|
||||||
"""Stop addon."""
|
"""Stop addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return await asyncio.shield(addon.stop(), loop=self.loop)
|
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def update(self, request):
|
def update(self, request):
|
||||||
"""Update addon."""
|
"""Update addon."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
version = body.get(ATTR_VERSION)
|
|
||||||
|
|
||||||
return await asyncio.shield(
|
if addon.last_version == addon.version_installed:
|
||||||
addon.update(version=version), loop=self.loop)
|
raise RuntimeError("No update available!")
|
||||||
|
|
||||||
|
return asyncio.shield(addon.update(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request):
|
def restart(self, request):
|
||||||
"""Restart addon."""
|
"""Restart addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return await asyncio.shield(addon.restart(), loop=self.loop)
|
return asyncio.shield(addon.restart(), loop=self.loop)
|
||||||
|
|
||||||
@api_process_raw
|
@api_process
|
||||||
|
def rebuild(self, request):
|
||||||
|
"""Rebuild local build addon."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
if not addon.need_build:
|
||||||
|
raise RuntimeError("Only local build addons are supported")
|
||||||
|
|
||||||
|
return asyncio.shield(addon.rebuild(), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return logs from addon."""
|
"""Return logs from addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return addon.logs()
|
return addon.logs()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
|
async def logo(self, request):
|
||||||
|
"""Return logo from addon."""
|
||||||
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_logo:
|
||||||
|
raise RuntimeError("No image found!")
|
||||||
|
|
||||||
|
with addon.path_logo.open('rb') as png:
|
||||||
|
return png.read()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stdin(self, request):
|
||||||
|
"""Write to stdin of addon."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
if not addon.with_stdin:
|
||||||
|
raise RuntimeError("STDIN not supported by addons")
|
||||||
|
|
||||||
|
data = await request.read()
|
||||||
|
return await asyncio.shield(addon.write_stdin(data), loop=self.loop)
|
||||||
|
@@ -2,21 +2,34 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from aiohttp import web
|
||||||
|
from aiohttp.web_exceptions import HTTPBadGateway
|
||||||
|
from aiohttp.hdrs import CONTENT_TYPE
|
||||||
|
import async_timeout
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .util import api_process, api_process_raw, api_validate
|
from .util import api_process, api_process_raw, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM)
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
||||||
from ..validate import HASS_DEVICES
|
ATTR_BOOT, ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG,
|
||||||
|
CONTENT_TYPE_BINARY, HEADER_HA_ACCESS)
|
||||||
|
from ..validate import HASS_DEVICES, NETWORK_PORT
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_OPTIONS = vol.Schema({
|
SCHEMA_OPTIONS = vol.Schema({
|
||||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
||||||
|
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||||
vol.Any(None, vol.Coerce(str)),
|
vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||||
})
|
})
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({
|
SCHEMA_VERSION = vol.Schema({
|
||||||
@@ -33,6 +46,45 @@ class APIHomeAssistant(object):
|
|||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.homeassistant = homeassistant
|
self.homeassistant = homeassistant
|
||||||
|
|
||||||
|
async def homeassistant_proxy(self, path, request):
|
||||||
|
"""Return a client request with proxy origin for Home-Assistant."""
|
||||||
|
url = "{}/api/{}".format(self.homeassistant.api_url, path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = None
|
||||||
|
headers = {}
|
||||||
|
method = getattr(
|
||||||
|
self.homeassistant.websession, request.method.lower())
|
||||||
|
|
||||||
|
# read data
|
||||||
|
with async_timeout.timeout(10, loop=self.loop):
|
||||||
|
data = await request.read()
|
||||||
|
|
||||||
|
if data:
|
||||||
|
headers.update({CONTENT_TYPE: request.content_type})
|
||||||
|
|
||||||
|
# need api password?
|
||||||
|
if self.homeassistant.api_password:
|
||||||
|
headers = {HEADER_HA_ACCESS: self.homeassistant.api_password}
|
||||||
|
|
||||||
|
# reset headers
|
||||||
|
if not headers:
|
||||||
|
headers = None
|
||||||
|
|
||||||
|
client = await method(
|
||||||
|
url, data=data, headers=headers, timeout=300
|
||||||
|
)
|
||||||
|
|
||||||
|
return client
|
||||||
|
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
|
_LOGGER.error("Client error on api %s request %s.", path, err)
|
||||||
|
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
_LOGGER.error("Client timeout error on api request %s.", path)
|
||||||
|
|
||||||
|
raise HTTPBadGateway()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
@@ -42,6 +94,10 @@ class APIHomeAssistant(object):
|
|||||||
ATTR_IMAGE: self.homeassistant.image,
|
ATTR_IMAGE: self.homeassistant.image,
|
||||||
ATTR_DEVICES: self.homeassistant.devices,
|
ATTR_DEVICES: self.homeassistant.devices,
|
||||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
||||||
|
ATTR_BOOT: self.homeassistant.boot,
|
||||||
|
ATTR_PORT: self.homeassistant.api_port,
|
||||||
|
ATTR_SSL: self.homeassistant.api_ssl,
|
||||||
|
ATTR_WATCHDOG: self.homeassistant.watchdog,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -56,33 +112,71 @@ class APIHomeAssistant(object):
|
|||||||
self.homeassistant.set_custom(
|
self.homeassistant.set_custom(
|
||||||
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
||||||
|
|
||||||
|
if ATTR_BOOT in body:
|
||||||
|
self.homeassistant.boot = body[ATTR_BOOT]
|
||||||
|
|
||||||
|
if ATTR_PORT in body:
|
||||||
|
self.homeassistant.api_port = body[ATTR_PORT]
|
||||||
|
|
||||||
|
if ATTR_PASSWORD in body:
|
||||||
|
self.homeassistant.api_password = body[ATTR_PASSWORD]
|
||||||
|
|
||||||
|
if ATTR_SSL in body:
|
||||||
|
self.homeassistant.api_ssl = body[ATTR_SSL]
|
||||||
|
|
||||||
|
if ATTR_WATCHDOG in body:
|
||||||
|
self.homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update homeassistant."""
|
"""Update homeassistant."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self.config.last_homeassistant)
|
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
|
||||||
|
|
||||||
if self.homeassistant.in_progress:
|
if version == self.homeassistant.version:
|
||||||
raise RuntimeError("Other task is in progress")
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.homeassistant.update(version), loop=self.loop)
|
self.homeassistant.update(version), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request):
|
def stop(self, request):
|
||||||
|
"""Stop homeassistant."""
|
||||||
|
return asyncio.shield(self.homeassistant.stop(), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def start(self, request):
|
||||||
|
"""Start homeassistant."""
|
||||||
|
return asyncio.shield(self.homeassistant.run(), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def restart(self, request):
|
||||||
"""Restart homeassistant."""
|
"""Restart homeassistant."""
|
||||||
if self.homeassistant.in_progress:
|
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
|
||||||
raise RuntimeError("Other task is in progress")
|
|
||||||
|
|
||||||
return await asyncio.shield(
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
self.homeassistant.restart(), loop=self.loop)
|
|
||||||
|
|
||||||
@api_process_raw
|
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return homeassistant docker logs.
|
"""Return homeassistant docker logs."""
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.homeassistant.logs()
|
return self.homeassistant.logs()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def check(self, request):
|
||||||
|
"""Check config of homeassistant."""
|
||||||
|
code, message = await self.homeassistant.check_config()
|
||||||
|
if not code:
|
||||||
|
raise RuntimeError(message)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def api(self, request):
|
||||||
|
"""Proxy API request to Home-Assistant."""
|
||||||
|
path = request.match_info.get('path')
|
||||||
|
|
||||||
|
client = await self.homeassistant_proxy(path, request)
|
||||||
|
return web.Response(
|
||||||
|
body=await client.read(),
|
||||||
|
status=client.status,
|
||||||
|
content_type=client.content_type
|
||||||
|
)
|
||||||
|
@@ -7,7 +7,9 @@ import voluptuous as vol
|
|||||||
from .util import api_process_hostcontrol, api_process, api_validate
|
from .util import api_process_hostcontrol, api_process, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||||
ATTR_OS)
|
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
|
||||||
|
from ..validate import ALSA_CHANNEL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -15,15 +17,21 @@ SCHEMA_VERSION = vol.Schema({
|
|||||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
SCHEMA_OPTIONS = vol.Schema({
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
class APIHost(object):
|
class APIHost(object):
|
||||||
"""Handle rest api for host functions."""
|
"""Handle rest api for host functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, host_control):
|
def __init__(self, config, loop, host_control, hardware):
|
||||||
"""Initialize host rest api part."""
|
"""Initialize host rest api part."""
|
||||||
self.config = config
|
self.config = config
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.host_control = host_control
|
self.host_control = host_control
|
||||||
|
self.local_hw = hardware
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
@@ -37,6 +45,18 @@ class APIHost(object):
|
|||||||
ATTR_OS: self.host_control.os_info,
|
ATTR_OS: self.host_control.os_info,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request):
|
||||||
|
"""Process host options."""
|
||||||
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
|
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
if ATTR_AUDIO_INPUT in body:
|
||||||
|
self.config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@api_process_hostcontrol
|
@api_process_hostcontrol
|
||||||
def reboot(self, request):
|
def reboot(self, request):
|
||||||
"""Reboot host."""
|
"""Reboot host."""
|
||||||
@@ -54,7 +74,18 @@ class APIHost(object):
|
|||||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
||||||
|
|
||||||
if version == self.host_control.version:
|
if version == self.host_control.version:
|
||||||
raise RuntimeError("Version is already in use")
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.host_control.update(version=version), loop=self.loop)
|
self.host_control.update(version=version), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def hardware(self, request):
|
||||||
|
"""Return local hardware infos."""
|
||||||
|
return {
|
||||||
|
ATTR_SERIAL: list(self.local_hw.serial_devices),
|
||||||
|
ATTR_INPUT: list(self.local_hw.input_devices),
|
||||||
|
ATTR_DISK: list(self.local_hw.disk_devices),
|
||||||
|
ATTR_GPIO: list(self.local_hw.gpio_devices),
|
||||||
|
ATTR_AUDIO: self.local_hw.audio_devices,
|
||||||
|
}
|
||||||
|
@@ -98,5 +98,5 @@ class APISecurity(object):
|
|||||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
||||||
|
|
||||||
# store session
|
# store session
|
||||||
self.config.security_sessions = (session, valid_until)
|
self.config.add_security_session(session, valid_until)
|
||||||
return {ATTR_SESSION: session}
|
return {ATTR_SESSION: session}
|
||||||
|
@@ -63,9 +63,10 @@ class APISnapshots(object):
|
|||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def reload(self, request):
|
async def reload(self, request):
|
||||||
"""Reload snapshot list."""
|
"""Reload snapshot list."""
|
||||||
return asyncio.shield(self.snapshots.reload(), loop=self.loop)
|
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
|
||||||
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
@@ -110,10 +111,10 @@ class APISnapshots(object):
|
|||||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_full(self, request):
|
def restore_full(self, request):
|
||||||
"""Full-Restore a snapshot."""
|
"""Full-Restore a snapshot."""
|
||||||
snapshot = self._extract_snapshot(request)
|
snapshot = self._extract_snapshot(request)
|
||||||
return await asyncio.shield(
|
return asyncio.shield(
|
||||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -124,7 +125,8 @@ class APISnapshots(object):
|
|||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.snapshots.do_restore_partial(snapshot, **body),
|
self.snapshots.do_restore_partial(snapshot, **body),
|
||||||
loop=self.loop)
|
loop=self.loop
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove(self, request):
|
async def remove(self, request):
|
||||||
|
@@ -6,12 +6,11 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from .util import api_process, api_process_raw, api_validate
|
from .util import api_process, api_process_raw, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL,
|
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES,
|
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||||
ATTR_REPOSITORY, ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED,
|
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||||
ATTR_DETACHED, ATTR_SOURCE, ATTR_MAINTAINER, ATTR_URL, ATTR_ARCH,
|
ATTR_STATE, CONTENT_TYPE_BINARY)
|
||||||
ATTR_BUILD, ATTR_TIMEZONE)
|
from ..validate import validate_timezone
|
||||||
from ..tools import validate_timezone
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -31,7 +30,7 @@ class APISupervisor(object):
|
|||||||
"""Handle rest api for supervisor functions."""
|
"""Handle rest api for supervisor functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||||
host_control, websession):
|
host_control, updater):
|
||||||
"""Initialize supervisor rest api part."""
|
"""Initialize supervisor rest api part."""
|
||||||
self.config = config
|
self.config = config
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
@@ -39,43 +38,7 @@ class APISupervisor(object):
|
|||||||
self.addons = addons
|
self.addons = addons
|
||||||
self.snapshots = snapshots
|
self.snapshots = snapshots
|
||||||
self.host_control = host_control
|
self.host_control = host_control
|
||||||
self.websession = websession
|
self.updater = updater
|
||||||
|
|
||||||
def _addons_list(self, only_installed=False):
|
|
||||||
"""Return a list of addons."""
|
|
||||||
data = []
|
|
||||||
for addon in self.addons.list_addons:
|
|
||||||
if only_installed and not addon.is_installed:
|
|
||||||
continue
|
|
||||||
|
|
||||||
data.append({
|
|
||||||
ATTR_NAME: addon.name,
|
|
||||||
ATTR_SLUG: addon.slug,
|
|
||||||
ATTR_DESCRIPTON: addon.description,
|
|
||||||
ATTR_VERSION: addon.last_version,
|
|
||||||
ATTR_INSTALLED: addon.version_installed,
|
|
||||||
ATTR_ARCH: addon.supported_arch,
|
|
||||||
ATTR_DETACHED: addon.is_detached,
|
|
||||||
ATTR_REPOSITORY: addon.repository,
|
|
||||||
ATTR_BUILD: addon.need_build,
|
|
||||||
ATTR_URL: addon.url,
|
|
||||||
})
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _repositories_list(self):
|
|
||||||
"""Return a list of addons repositories."""
|
|
||||||
data = []
|
|
||||||
for repository in self.addons.list_repositories:
|
|
||||||
data.append({
|
|
||||||
ATTR_SLUG: repository.slug,
|
|
||||||
ATTR_NAME: repository.name,
|
|
||||||
ATTR_SOURCE: repository.source,
|
|
||||||
ATTR_URL: repository.url,
|
|
||||||
ATTR_MAINTAINER: repository.maintainer,
|
|
||||||
})
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def ping(self, request):
|
async def ping(self, request):
|
||||||
@@ -85,31 +48,37 @@ class APISupervisor(object):
|
|||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
|
list_addons = []
|
||||||
|
for addon in self.addons.list_addons:
|
||||||
|
if addon.is_installed:
|
||||||
|
list_addons.append({
|
||||||
|
ATTR_NAME: addon.name,
|
||||||
|
ATTR_SLUG: addon.slug,
|
||||||
|
ATTR_DESCRIPTON: addon.description,
|
||||||
|
ATTR_STATE: await addon.state(),
|
||||||
|
ATTR_VERSION: addon.last_version,
|
||||||
|
ATTR_INSTALLED: addon.version_installed,
|
||||||
|
ATTR_REPOSITORY: addon.repository,
|
||||||
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ATTR_VERSION: HASSIO_VERSION,
|
ATTR_VERSION: HASSIO_VERSION,
|
||||||
ATTR_LAST_VERSION: self.config.last_hassio,
|
ATTR_LAST_VERSION: self.updater.version_hassio,
|
||||||
ATTR_BETA_CHANNEL: self.config.upstream_beta,
|
ATTR_BETA_CHANNEL: self.updater.beta_channel,
|
||||||
ATTR_ARCH: self.config.arch,
|
ATTR_ARCH: self.config.arch,
|
||||||
ATTR_TIMEZONE: self.config.timezone,
|
ATTR_TIMEZONE: self.config.timezone,
|
||||||
ATTR_ADDONS: self._addons_list(only_installed=True),
|
ATTR_ADDONS: list_addons,
|
||||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def available_addons(self, request):
|
|
||||||
"""Return information for all available addons."""
|
|
||||||
return {
|
|
||||||
ATTR_ADDONS: self._addons_list(),
|
|
||||||
ATTR_REPOSITORIES: self._repositories_list(),
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def options(self, request):
|
async def options(self, request):
|
||||||
"""Set supervisor options."""
|
"""Set supervisor options."""
|
||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_BETA_CHANNEL in body:
|
if ATTR_BETA_CHANNEL in body:
|
||||||
self.config.upstream_beta = body[ATTR_BETA_CHANNEL]
|
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||||
|
|
||||||
if ATTR_TIMEZONE in body:
|
if ATTR_TIMEZONE in body:
|
||||||
self.config.timezone = body[ATTR_TIMEZONE]
|
self.config.timezone = body[ATTR_TIMEZONE]
|
||||||
@@ -124,10 +93,10 @@ class APISupervisor(object):
|
|||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update supervisor OS."""
|
"""Update supervisor OS."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self.config.last_hassio)
|
version = body.get(ATTR_VERSION, self.updater.version_hassio)
|
||||||
|
|
||||||
if version == self.supervisor.version:
|
if version == self.supervisor.version:
|
||||||
raise RuntimeError("Version is already in use")
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.supervisor.update(version), loop=self.loop)
|
self.supervisor.update(version), loop=self.loop)
|
||||||
@@ -138,7 +107,7 @@ class APISupervisor(object):
|
|||||||
tasks = [
|
tasks = [
|
||||||
self.addons.reload(),
|
self.addons.reload(),
|
||||||
self.snapshots.reload(),
|
self.snapshots.reload(),
|
||||||
self.config.fetch_update_infos(self.websession),
|
self.updater.fetch_data(),
|
||||||
self.host_control.load()
|
self.host_control.load()
|
||||||
]
|
]
|
||||||
results, _ = await asyncio.shield(
|
results, _ = await asyncio.shield(
|
||||||
@@ -150,10 +119,7 @@ class APISupervisor(object):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process_raw
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return supervisor docker logs.
|
"""Return supervisor docker logs."""
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.supervisor.logs()
|
return self.supervisor.logs()
|
||||||
|
@@ -9,7 +9,8 @@ import voluptuous as vol
|
|||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR)
|
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
||||||
|
CONTENT_TYPE_BINARY)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -65,25 +66,27 @@ def api_process_hostcontrol(method):
|
|||||||
return wrap_hostcontrol
|
return wrap_hostcontrol
|
||||||
|
|
||||||
|
|
||||||
def api_process_raw(method):
|
def api_process_raw(content):
|
||||||
"""Wrap function with raw output to rest api."""
|
"""Wrap content_type into function."""
|
||||||
async def wrap_api(api, *args, **kwargs):
|
def wrap_method(method):
|
||||||
"""Return api information."""
|
"""Wrap function with raw output to rest api."""
|
||||||
try:
|
async def wrap_api(api, *args, **kwargs):
|
||||||
message = await method(api, *args, **kwargs)
|
"""Return api information."""
|
||||||
except RuntimeError as err:
|
try:
|
||||||
message = str(err).encode()
|
msg_data = await method(api, *args, **kwargs)
|
||||||
|
msg_type = content
|
||||||
|
except RuntimeError as err:
|
||||||
|
msg_data = str(err).encode()
|
||||||
|
msg_type = CONTENT_TYPE_BINARY
|
||||||
|
|
||||||
return web.Response(body=message)
|
return web.Response(body=msg_data, content_type=msg_type)
|
||||||
|
|
||||||
return wrap_api
|
return wrap_api
|
||||||
|
return wrap_method
|
||||||
|
|
||||||
|
|
||||||
def api_return_error(message=None):
|
def api_return_error(message=None):
|
||||||
"""Return a API error message."""
|
"""Return a API error message."""
|
||||||
if message:
|
|
||||||
_LOGGER.error(message)
|
|
||||||
|
|
||||||
return web.json_response({
|
return web.json_response({
|
||||||
JSON_RESULT: RESULT_ERROR,
|
JSON_RESULT: RESULT_ERROR,
|
||||||
JSON_MESSAGE: message,
|
JSON_MESSAGE: message,
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from colorlog import ColoredFormatter
|
from colorlog import ColoredFormatter
|
||||||
@@ -100,6 +101,7 @@ def initialize_logging():
|
|||||||
|
|
||||||
def check_environment():
|
def check_environment():
|
||||||
"""Check if all environment are exists."""
|
"""Check if all environment are exists."""
|
||||||
|
# check environment variables
|
||||||
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
||||||
'HOMEASSISTANT_REPOSITORY'):
|
'HOMEASSISTANT_REPOSITORY'):
|
||||||
try:
|
try:
|
||||||
@@ -108,29 +110,35 @@ def check_environment():
|
|||||||
_LOGGER.fatal("Can't find %s in env!", key)
|
_LOGGER.fatal("Can't find %s in env!", key)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# check docker socket
|
||||||
if not SOCKET_DOCKER.is_socket():
|
if not SOCKET_DOCKER.is_socket():
|
||||||
_LOGGER.fatal("Can't find docker socket!")
|
_LOGGER.fatal("Can't find docker socket!")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# check socat exec
|
||||||
|
if not shutil.which('socat'):
|
||||||
|
_LOGGER.fatal("Can0t find socat program!")
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def reg_signal(loop, hassio):
|
def reg_signal(loop):
|
||||||
"""Register SIGTERM, SIGKILL to stop system."""
|
"""Register SIGTERM, SIGKILL to stop system."""
|
||||||
try:
|
try:
|
||||||
loop.add_signal_handler(
|
loop.add_signal_handler(
|
||||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||||
except (ValueError, RuntimeError):
|
except (ValueError, RuntimeError):
|
||||||
_LOGGER.warning("Could not bind to SIGTERM")
|
_LOGGER.warning("Could not bind to SIGTERM")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
loop.add_signal_handler(
|
loop.add_signal_handler(
|
||||||
signal.SIGHUP, lambda: loop.create_task(hassio.stop()))
|
signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||||
except (ValueError, RuntimeError):
|
except (ValueError, RuntimeError):
|
||||||
_LOGGER.warning("Could not bind to SIGHUP")
|
_LOGGER.warning("Could not bind to SIGHUP")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
loop.add_signal_handler(
|
loop.add_signal_handler(
|
||||||
signal.SIGINT, lambda: loop.create_task(hassio.stop()))
|
signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||||
except (ValueError, RuntimeError):
|
except (ValueError, RuntimeError):
|
||||||
_LOGGER.warning("Could not bind to SIGINT")
|
_LOGGER.warning("Could not bind to SIGINT")
|
||||||
|
173
hassio/config.py
173
hassio/config.py
@@ -4,55 +4,29 @@ import logging
|
|||||||
import os
|
import os
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
import voluptuous as vol
|
from .const import (
|
||||||
|
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
||||||
from .const import FILE_HASSIO_CONFIG, HASSIO_DATA
|
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||||
from .tools import fetch_last_versions, JsonConfig, validate_timezone
|
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT)
|
||||||
|
from .tools import JsonConfig, parse_datetime
|
||||||
|
from .validate import SCHEMA_HASSIO_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DATETIME_FORMAT = "%Y%m%d %H:%M:%S"
|
|
||||||
|
|
||||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||||
HOMEASSISTANT_LAST = 'homeassistant_last'
|
|
||||||
|
|
||||||
HASSIO_SSL = PurePath("ssl")
|
HASSIO_SSL = PurePath("ssl")
|
||||||
HASSIO_LAST = 'hassio_last'
|
|
||||||
|
|
||||||
ADDONS_CORE = PurePath("addons/core")
|
ADDONS_CORE = PurePath("addons/core")
|
||||||
ADDONS_LOCAL = PurePath("addons/local")
|
ADDONS_LOCAL = PurePath("addons/local")
|
||||||
ADDONS_GIT = PurePath("addons/git")
|
ADDONS_GIT = PurePath("addons/git")
|
||||||
ADDONS_DATA = PurePath("addons/data")
|
ADDONS_DATA = PurePath("addons/data")
|
||||||
ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
|
||||||
|
|
||||||
BACKUP_DATA = PurePath("backup")
|
BACKUP_DATA = PurePath("backup")
|
||||||
SHARE_DATA = PurePath("share")
|
SHARE_DATA = PurePath("share")
|
||||||
TMP_DATA = PurePath("tmp")
|
TMP_DATA = PurePath("tmp")
|
||||||
|
|
||||||
UPSTREAM_BETA = 'upstream_beta'
|
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||||
API_ENDPOINT = 'api_endpoint'
|
|
||||||
TIMEZONE = 'timezone'
|
|
||||||
|
|
||||||
SECURITY_INITIALIZE = 'security_initialize'
|
|
||||||
SECURITY_TOTP = 'security_totp'
|
|
||||||
SECURITY_PASSWORD = 'security_password'
|
|
||||||
SECURITY_SESSIONS = 'security_sessions'
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_CONFIG = vol.Schema({
|
|
||||||
vol.Optional(UPSTREAM_BETA, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(API_ENDPOINT): vol.Coerce(str),
|
|
||||||
vol.Optional(TIMEZONE, default='UTC'): validate_timezone,
|
|
||||||
vol.Optional(HOMEASSISTANT_LAST): vol.Coerce(str),
|
|
||||||
vol.Optional(HASSIO_LAST): vol.Coerce(str),
|
|
||||||
vol.Optional(ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
|
||||||
vol.Optional(SECURITY_INITIALIZE, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(SECURITY_TOTP): vol.Coerce(str),
|
|
||||||
vol.Optional(SECURITY_PASSWORD): vol.Coerce(str),
|
|
||||||
vol.Optional(SECURITY_SESSIONS, default={}):
|
|
||||||
{vol.Coerce(str): vol.Coerce(str)},
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
class CoreConfig(JsonConfig):
|
class CoreConfig(JsonConfig):
|
||||||
@@ -60,64 +34,35 @@ class CoreConfig(JsonConfig):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize config object."""
|
"""Initialize config object."""
|
||||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_CONFIG)
|
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||||
self.arch = None
|
self.arch = None
|
||||||
|
|
||||||
async def fetch_update_infos(self, websession):
|
|
||||||
"""Read current versions from web."""
|
|
||||||
last = await fetch_last_versions(websession, beta=self.upstream_beta)
|
|
||||||
|
|
||||||
if last:
|
|
||||||
self._data.update({
|
|
||||||
HOMEASSISTANT_LAST: last.get('homeassistant'),
|
|
||||||
HASSIO_LAST: last.get('hassio'),
|
|
||||||
})
|
|
||||||
self.save()
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api_endpoint(self):
|
|
||||||
"""Return IP address of api endpoint."""
|
|
||||||
return self._data[API_ENDPOINT]
|
|
||||||
|
|
||||||
@api_endpoint.setter
|
|
||||||
def api_endpoint(self, value):
|
|
||||||
"""Store IP address of api endpoint."""
|
|
||||||
self._data[API_ENDPOINT] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def upstream_beta(self):
|
|
||||||
"""Return True if we run in beta upstream."""
|
|
||||||
return self._data[UPSTREAM_BETA]
|
|
||||||
|
|
||||||
@upstream_beta.setter
|
|
||||||
def upstream_beta(self, value):
|
|
||||||
"""Set beta upstream mode."""
|
|
||||||
self._data[UPSTREAM_BETA] = bool(value)
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timezone(self):
|
def timezone(self):
|
||||||
"""Return system timezone."""
|
"""Return system timezone."""
|
||||||
return self._data[TIMEZONE]
|
return self._data[ATTR_TIMEZONE]
|
||||||
|
|
||||||
@timezone.setter
|
@timezone.setter
|
||||||
def timezone(self, value):
|
def timezone(self, value):
|
||||||
"""Set system timezone."""
|
"""Set system timezone."""
|
||||||
self._data[TIMEZONE] = value
|
self._data[ATTR_TIMEZONE] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_homeassistant(self):
|
def last_boot(self):
|
||||||
"""Actual version of homeassistant."""
|
"""Return last boot datetime."""
|
||||||
return self._data.get(HOMEASSISTANT_LAST)
|
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
||||||
|
|
||||||
@property
|
boot_time = parse_datetime(boot_str)
|
||||||
def last_hassio(self):
|
if not boot_time:
|
||||||
"""Actual version of hassio."""
|
return datetime.utcfromtimestamp(1)
|
||||||
return self._data.get(HASSIO_LAST)
|
return boot_time
|
||||||
|
|
||||||
|
@last_boot.setter
|
||||||
|
def last_boot(self, value):
|
||||||
|
"""Set last boot datetime."""
|
||||||
|
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||||
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_hassio(self):
|
def path_hassio(self):
|
||||||
@@ -207,73 +152,95 @@ class CoreConfig(JsonConfig):
|
|||||||
@property
|
@property
|
||||||
def addons_repositories(self):
|
def addons_repositories(self):
|
||||||
"""Return list of addons custom repositories."""
|
"""Return list of addons custom repositories."""
|
||||||
return self._data[ADDONS_CUSTOM_LIST]
|
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
||||||
|
|
||||||
@addons_repositories.setter
|
def add_addon_repository(self, repo):
|
||||||
def addons_repositories(self, repo):
|
|
||||||
"""Add a custom repository to list."""
|
"""Add a custom repository to list."""
|
||||||
if repo in self._data[ADDONS_CUSTOM_LIST]:
|
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||||
return
|
return
|
||||||
|
|
||||||
self._data[ADDONS_CUSTOM_LIST].append(repo)
|
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
def drop_addon_repository(self, repo):
|
def drop_addon_repository(self, repo):
|
||||||
"""Remove a custom repository from list."""
|
"""Remove a custom repository from list."""
|
||||||
if repo not in self._data[ADDONS_CUSTOM_LIST]:
|
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||||
return
|
return
|
||||||
|
|
||||||
self._data[ADDONS_CUSTOM_LIST].remove(repo)
|
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def security_initialize(self):
|
def security_initialize(self):
|
||||||
"""Return is security was initialize."""
|
"""Return is security was initialize."""
|
||||||
return self._data[SECURITY_INITIALIZE]
|
return self._data[ATTR_SECURITY]
|
||||||
|
|
||||||
@security_initialize.setter
|
@security_initialize.setter
|
||||||
def security_initialize(self, value):
|
def security_initialize(self, value):
|
||||||
"""Set is security initialize."""
|
"""Set is security initialize."""
|
||||||
self._data[SECURITY_INITIALIZE] = value
|
self._data[ATTR_SECURITY] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def security_totp(self):
|
def security_totp(self):
|
||||||
"""Return the TOTP key."""
|
"""Return the TOTP key."""
|
||||||
return self._data.get(SECURITY_TOTP)
|
return self._data.get(ATTR_TOTP)
|
||||||
|
|
||||||
@security_totp.setter
|
@security_totp.setter
|
||||||
def security_totp(self, value):
|
def security_totp(self, value):
|
||||||
"""Set the TOTP key."""
|
"""Set the TOTP key."""
|
||||||
self._data[SECURITY_TOTP] = value
|
self._data[ATTR_TOTP] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def security_password(self):
|
def security_password(self):
|
||||||
"""Return the password key."""
|
"""Return the password key."""
|
||||||
return self._data.get(SECURITY_PASSWORD)
|
return self._data.get(ATTR_PASSWORD)
|
||||||
|
|
||||||
@security_password.setter
|
@security_password.setter
|
||||||
def security_password(self, value):
|
def security_password(self, value):
|
||||||
"""Set the password key."""
|
"""Set the password key."""
|
||||||
self._data[SECURITY_PASSWORD] = value
|
self._data[ATTR_PASSWORD] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def security_sessions(self):
|
def security_sessions(self):
|
||||||
"""Return api sessions."""
|
"""Return api sessions."""
|
||||||
return {session: datetime.strptime(until, DATETIME_FORMAT) for
|
return {
|
||||||
session, until in self._data[SECURITY_SESSIONS].items()}
|
session: parse_datetime(until) for
|
||||||
|
session, until in self._data[ATTR_SESSIONS].items()
|
||||||
|
}
|
||||||
|
|
||||||
@security_sessions.setter
|
def add_security_session(self, session, valid):
|
||||||
def security_sessions(self, value):
|
|
||||||
"""Set the a new session."""
|
"""Set the a new session."""
|
||||||
session, valid = value
|
self._data[ATTR_SESSIONS].update(
|
||||||
if valid is None:
|
{session: valid.isoformat()}
|
||||||
self._data[SECURITY_SESSIONS].pop(session, None)
|
)
|
||||||
else:
|
self.save()
|
||||||
self._data[SECURITY_SESSIONS].update(
|
|
||||||
{session: valid.strftime(DATETIME_FORMAT)}
|
def drop_security_session(self, session):
|
||||||
)
|
"""Delete the a session."""
|
||||||
|
self._data[ATTR_SESSIONS].pop(session, None)
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_output(self):
|
||||||
|
"""Return ALSA audio output card,dev."""
|
||||||
|
return self._data.get(ATTR_AUDIO_OUTPUT)
|
||||||
|
|
||||||
|
@audio_output.setter
|
||||||
|
def audio_output(self, value):
|
||||||
|
"""Set ALSA audio output card,dev."""
|
||||||
|
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_input(self):
|
||||||
|
"""Return ALSA audio input card,dev."""
|
||||||
|
return self._data.get(ATTR_AUDIO_INPUT)
|
||||||
|
|
||||||
|
@audio_input.setter
|
||||||
|
def audio_input(self, value):
|
||||||
|
"""Set ALSA audio input card,dev."""
|
||||||
|
self._data[ATTR_AUDIO_INPUT] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
@@ -1,12 +1,11 @@
|
|||||||
"""Const file for HassIO."""
|
"""Const file for HassIO."""
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from ipaddress import ip_network
|
||||||
|
|
||||||
HASSIO_VERSION = '0.43'
|
HASSIO_VERSION = '0.73'
|
||||||
|
|
||||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||||
'hassio/master/version.json')
|
'hassio/{}/version.json')
|
||||||
URL_HASSIO_VERSION_BETA = ('https://raw.githubusercontent.com/home-assistant/'
|
|
||||||
'hassio/dev/version.json')
|
|
||||||
|
|
||||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||||
|
|
||||||
@@ -17,18 +16,22 @@ RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
|||||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
RUN_UPDATE_ADDONS_TASKS = 57600
|
||||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
||||||
RUN_WATCHDOG_HOMEASSISTANT = 15
|
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||||
|
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||||
RUN_CLEANUP_API_SESSIONS = 900
|
RUN_CLEANUP_API_SESSIONS = 900
|
||||||
|
|
||||||
RESTART_EXIT_CODE = 100
|
|
||||||
|
|
||||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||||
|
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||||
|
|
||||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||||
|
|
||||||
|
DOCKER_NETWORK = 'hassio'
|
||||||
|
DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23')
|
||||||
|
DOCKER_NETWORK_RANGE = ip_network('172.30.33.0/24')
|
||||||
|
|
||||||
LABEL_VERSION = 'io.hass.version'
|
LABEL_VERSION = 'io.hass.version'
|
||||||
LABEL_ARCH = 'io.hass.arch'
|
LABEL_ARCH = 'io.hass.arch'
|
||||||
LABEL_TYPE = 'io.hass.type'
|
LABEL_TYPE = 'io.hass.type'
|
||||||
@@ -44,16 +47,24 @@ JSON_MESSAGE = 'message'
|
|||||||
RESULT_ERROR = 'error'
|
RESULT_ERROR = 'error'
|
||||||
RESULT_OK = 'ok'
|
RESULT_OK = 'ok'
|
||||||
|
|
||||||
|
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||||
|
CONTENT_TYPE_PNG = 'image/png'
|
||||||
|
CONTENT_TYPE_JSON = 'application/json'
|
||||||
|
HEADER_HA_ACCESS = 'x-ha-access'
|
||||||
|
|
||||||
|
ATTR_WATCHDOG = 'watchdog'
|
||||||
ATTR_DATE = 'date'
|
ATTR_DATE = 'date'
|
||||||
ATTR_ARCH = 'arch'
|
ATTR_ARCH = 'arch'
|
||||||
ATTR_HOSTNAME = 'hostname'
|
ATTR_HOSTNAME = 'hostname'
|
||||||
ATTR_TIMEZONE = 'timezone'
|
ATTR_TIMEZONE = 'timezone'
|
||||||
|
ATTR_ARGS = 'args'
|
||||||
ATTR_OS = 'os'
|
ATTR_OS = 'os'
|
||||||
ATTR_TYPE = 'type'
|
ATTR_TYPE = 'type'
|
||||||
ATTR_SOURCE = 'source'
|
ATTR_SOURCE = 'source'
|
||||||
ATTR_FEATURES = 'features'
|
ATTR_FEATURES = 'features'
|
||||||
ATTR_ADDONS = 'addons'
|
ATTR_ADDONS = 'addons'
|
||||||
ATTR_VERSION = 'version'
|
ATTR_VERSION = 'version'
|
||||||
|
ATTR_LAST_BOOT = 'last_boot'
|
||||||
ATTR_LAST_VERSION = 'last_version'
|
ATTR_LAST_VERSION = 'last_version'
|
||||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||||
ATTR_NAME = 'name'
|
ATTR_NAME = 'name'
|
||||||
@@ -62,13 +73,18 @@ ATTR_DESCRIPTON = 'description'
|
|||||||
ATTR_STARTUP = 'startup'
|
ATTR_STARTUP = 'startup'
|
||||||
ATTR_BOOT = 'boot'
|
ATTR_BOOT = 'boot'
|
||||||
ATTR_PORTS = 'ports'
|
ATTR_PORTS = 'ports'
|
||||||
|
ATTR_PORT = 'port'
|
||||||
|
ATTR_SSL = 'ssl'
|
||||||
ATTR_MAP = 'map'
|
ATTR_MAP = 'map'
|
||||||
|
ATTR_WEBUI = 'webui'
|
||||||
ATTR_OPTIONS = 'options'
|
ATTR_OPTIONS = 'options'
|
||||||
ATTR_INSTALLED = 'installed'
|
ATTR_INSTALLED = 'installed'
|
||||||
ATTR_DETACHED = 'detached'
|
ATTR_DETACHED = 'detached'
|
||||||
ATTR_STATE = 'state'
|
ATTR_STATE = 'state'
|
||||||
ATTR_SCHEMA = 'schema'
|
ATTR_SCHEMA = 'schema'
|
||||||
ATTR_IMAGE = 'image'
|
ATTR_IMAGE = 'image'
|
||||||
|
ATTR_LOGO = 'logo'
|
||||||
|
ATTR_STDIN = 'stdin'
|
||||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||||
ATTR_REPOSITORY = 'repository'
|
ATTR_REPOSITORY = 'repository'
|
||||||
ATTR_REPOSITORIES = 'repositories'
|
ATTR_REPOSITORIES = 'repositories'
|
||||||
@@ -78,6 +94,7 @@ ATTR_PASSWORD = 'password'
|
|||||||
ATTR_TOTP = 'totp'
|
ATTR_TOTP = 'totp'
|
||||||
ATTR_INITIALIZE = 'initialize'
|
ATTR_INITIALIZE = 'initialize'
|
||||||
ATTR_SESSION = 'session'
|
ATTR_SESSION = 'session'
|
||||||
|
ATTR_SESSIONS = 'sessions'
|
||||||
ATTR_LOCATON = 'location'
|
ATTR_LOCATON = 'location'
|
||||||
ATTR_BUILD = 'build'
|
ATTR_BUILD = 'build'
|
||||||
ATTR_DEVICES = 'devices'
|
ATTR_DEVICES = 'devices'
|
||||||
@@ -90,16 +107,33 @@ ATTR_USER = 'user'
|
|||||||
ATTR_SYSTEM = 'system'
|
ATTR_SYSTEM = 'system'
|
||||||
ATTR_SNAPSHOTS = 'snapshots'
|
ATTR_SNAPSHOTS = 'snapshots'
|
||||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||||
|
ATTR_HASSIO = 'hassio'
|
||||||
|
ATTR_HASSIO_API = 'hassio_api'
|
||||||
|
ATTR_HOMEASSISTANT_API = 'homeassistant_api'
|
||||||
ATTR_FOLDERS = 'folders'
|
ATTR_FOLDERS = 'folders'
|
||||||
ATTR_SIZE = 'size'
|
ATTR_SIZE = 'size'
|
||||||
ATTR_TYPE = 'type'
|
ATTR_TYPE = 'type'
|
||||||
ATTR_TIMEOUT = 'timeout'
|
ATTR_TIMEOUT = 'timeout'
|
||||||
ATTR_AUTO_UPDATE = 'auto_update'
|
ATTR_AUTO_UPDATE = 'auto_update'
|
||||||
ATTR_CUSTOM = 'custom'
|
ATTR_CUSTOM = 'custom'
|
||||||
|
ATTR_AUDIO = 'audio'
|
||||||
|
ATTR_AUDIO_INPUT = 'audio_input'
|
||||||
|
ATTR_AUDIO_OUTPUT = 'audio_output'
|
||||||
|
ATTR_INPUT = 'input'
|
||||||
|
ATTR_OUTPUT = 'output'
|
||||||
|
ATTR_DISK = 'disk'
|
||||||
|
ATTR_SERIAL = 'serial'
|
||||||
|
ATTR_SECURITY = 'security'
|
||||||
|
ATTR_BUILD_FROM = 'build_from'
|
||||||
|
ATTR_SQUASH = 'squash'
|
||||||
|
ATTR_GPIO = 'gpio'
|
||||||
|
ATTR_LEGACY = 'ATTR_LEGACY'
|
||||||
|
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||||
|
|
||||||
STARTUP_INITIALIZE = 'initialize'
|
STARTUP_INITIALIZE = 'initialize'
|
||||||
STARTUP_BEFORE = 'before'
|
STARTUP_SYSTEM = 'system'
|
||||||
STARTUP_AFTER = 'after'
|
STARTUP_SERVICES = 'services'
|
||||||
|
STARTUP_APPLICATION = 'application'
|
||||||
STARTUP_ONCE = 'once'
|
STARTUP_ONCE = 'once'
|
||||||
|
|
||||||
BOOT_AUTO = 'auto'
|
BOOT_AUTO = 'auto'
|
||||||
|
@@ -3,23 +3,28 @@ import asyncio
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import docker
|
|
||||||
|
|
||||||
from .addons import AddonManager
|
from .addons import AddonManager
|
||||||
from .api import RestAPI
|
from .api import RestAPI
|
||||||
from .host_control import HostControl
|
from .host_control import HostControl
|
||||||
from .const import (
|
from .const import (
|
||||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT_DOCKER,
|
||||||
RUN_CLEANUP_API_SESSIONS, STARTUP_AFTER, STARTUP_BEFORE,
|
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||||
STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS, RUN_UPDATE_ADDONS_TASKS)
|
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
||||||
|
RUN_UPDATE_ADDONS_TASKS)
|
||||||
|
from .hardware import Hardware
|
||||||
from .homeassistant import HomeAssistant
|
from .homeassistant import HomeAssistant
|
||||||
from .scheduler import Scheduler
|
from .scheduler import Scheduler
|
||||||
|
from .dock import DockerAPI
|
||||||
from .dock.supervisor import DockerSupervisor
|
from .dock.supervisor import DockerSupervisor
|
||||||
|
from .dns import DNSForward
|
||||||
from .snapshots import SnapshotsManager
|
from .snapshots import SnapshotsManager
|
||||||
|
from .updater import Updater
|
||||||
from .tasks import (
|
from .tasks import (
|
||||||
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
hassio_update, homeassistant_watchdog_docker, api_sessions_cleanup,
|
||||||
from .tools import get_local_ip, fetch_timezone
|
addons_update)
|
||||||
|
from .tools import fetch_timezone
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -33,23 +38,26 @@ class HassIO(object):
|
|||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.config = config
|
self.config = config
|
||||||
self.websession = aiohttp.ClientSession(loop=loop)
|
self.websession = aiohttp.ClientSession(loop=loop)
|
||||||
|
self.updater = Updater(config, loop, self.websession)
|
||||||
self.scheduler = Scheduler(loop)
|
self.scheduler = Scheduler(loop)
|
||||||
self.api = RestAPI(config, loop)
|
self.api = RestAPI(config, loop)
|
||||||
self.dock = docker.DockerClient(
|
self.hardware = Hardware()
|
||||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
self.docker = DockerAPI()
|
||||||
|
self.dns = DNSForward()
|
||||||
|
|
||||||
# init basic docker container
|
# init basic docker container
|
||||||
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
|
self.supervisor = DockerSupervisor(
|
||||||
|
config, loop, self.docker, self.stop)
|
||||||
|
|
||||||
# init homeassistant
|
# init homeassistant
|
||||||
self.homeassistant = HomeAssistant(
|
self.homeassistant = HomeAssistant(
|
||||||
config, loop, self.dock, self.websession)
|
config, loop, self.docker, self.updater)
|
||||||
|
|
||||||
# init HostControl
|
# init HostControl
|
||||||
self.host_control = HostControl(loop)
|
self.host_control = HostControl(loop)
|
||||||
|
|
||||||
# init addon system
|
# init addon system
|
||||||
self.addons = AddonManager(config, loop, self.dock)
|
self.addons = AddonManager(config, loop, self.docker)
|
||||||
|
|
||||||
# init snapshot system
|
# init snapshot system
|
||||||
self.snapshots = SnapshotsManager(
|
self.snapshots = SnapshotsManager(
|
||||||
@@ -59,15 +67,12 @@ class HassIO(object):
|
|||||||
"""Setup HassIO orchestration."""
|
"""Setup HassIO orchestration."""
|
||||||
# supervisor
|
# supervisor
|
||||||
if not await self.supervisor.attach():
|
if not await self.supervisor.attach():
|
||||||
_LOGGER.fatal("Can't attach to supervisor docker container!")
|
_LOGGER.fatal("Can't setup supervisor docker container!")
|
||||||
await self.supervisor.cleanup()
|
await self.supervisor.cleanup()
|
||||||
|
|
||||||
# set running arch
|
# set running arch
|
||||||
self.config.arch = self.supervisor.arch
|
self.config.arch = self.supervisor.arch
|
||||||
|
|
||||||
# set api endpoint
|
|
||||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
|
||||||
|
|
||||||
# update timezone
|
# update timezone
|
||||||
if self.config.timezone == 'UTC':
|
if self.config.timezone == 'UTC':
|
||||||
self.config.timezone = await fetch_timezone(self.websession)
|
self.config.timezone = await fetch_timezone(self.websession)
|
||||||
@@ -80,11 +85,11 @@ class HassIO(object):
|
|||||||
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
||||||
|
|
||||||
# rest api views
|
# rest api views
|
||||||
self.api.register_host(self.host_control)
|
self.api.register_host(self.host_control, self.hardware)
|
||||||
self.api.register_network(self.host_control)
|
self.api.register_network(self.host_control)
|
||||||
self.api.register_supervisor(
|
self.api.register_supervisor(
|
||||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||||
self.websession)
|
self.updater)
|
||||||
self.api.register_homeassistant(self.homeassistant)
|
self.api.register_homeassistant(self.homeassistant)
|
||||||
self.api.register_addons(self.addons)
|
self.api.register_addons(self.addons)
|
||||||
self.api.register_security()
|
self.api.register_security()
|
||||||
@@ -110,13 +115,16 @@ class HassIO(object):
|
|||||||
|
|
||||||
# schedule self update task
|
# schedule self update task
|
||||||
self.scheduler.register_task(
|
self.scheduler.register_task(
|
||||||
hassio_update(self.config, self.supervisor, self.websession),
|
hassio_update(self.supervisor, self.updater),
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||||
|
|
||||||
# schedule snapshot update tasks
|
# schedule snapshot update tasks
|
||||||
self.scheduler.register_task(
|
self.scheduler.register_task(
|
||||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
||||||
|
|
||||||
|
# start dns forwarding
|
||||||
|
self.loop.create_task(self.dns.start())
|
||||||
|
|
||||||
# start addon mark as initialize
|
# start addon mark as initialize
|
||||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
||||||
|
|
||||||
@@ -125,47 +133,58 @@ class HassIO(object):
|
|||||||
# on release channel, try update itself
|
# on release channel, try update itself
|
||||||
# on beta channel, only read new versions
|
# on beta channel, only read new versions
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[hassio_update(self.config, self.supervisor, self.websession)()],
|
[hassio_update(self.supervisor, self.updater)()],
|
||||||
loop=self.loop
|
loop=self.loop
|
||||||
)
|
)
|
||||||
|
|
||||||
# start api
|
# start api
|
||||||
await self.api.start()
|
await self.api.start()
|
||||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
_LOGGER.info("Start hassio api on %s", self.docker.network.supervisor)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# HomeAssistant is already running / supervisor have only reboot
|
# HomeAssistant is already running / supervisor have only reboot
|
||||||
if await self.homeassistant.is_running():
|
if self.hardware.last_boot == self.config.last_boot:
|
||||||
_LOGGER.info("HassIO reboot detected")
|
_LOGGER.info("HassIO reboot detected")
|
||||||
return
|
return
|
||||||
|
|
||||||
# start addon mark as before
|
# start addon mark as system
|
||||||
await self.addons.auto_boot(STARTUP_BEFORE)
|
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||||
|
|
||||||
|
# start addon mark as services
|
||||||
|
await self.addons.auto_boot(STARTUP_SERVICES)
|
||||||
|
|
||||||
# run HomeAssistant
|
# run HomeAssistant
|
||||||
await self.homeassistant.run()
|
if self.homeassistant.boot:
|
||||||
|
await self.homeassistant.run()
|
||||||
|
|
||||||
# start addon mark as after
|
# start addon mark as application
|
||||||
await self.addons.auto_boot(STARTUP_AFTER)
|
await self.addons.auto_boot(STARTUP_APPLICATION)
|
||||||
|
|
||||||
|
# store new last boot
|
||||||
|
self.config.last_boot = self.hardware.last_boot
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# schedule homeassistant watchdog
|
# schedule homeassistant watchdog
|
||||||
self.scheduler.register_task(
|
self.scheduler.register_task(
|
||||||
homeassistant_watchdog(self.loop, self.homeassistant),
|
homeassistant_watchdog_docker(self.loop, self.homeassistant),
|
||||||
RUN_WATCHDOG_HOMEASSISTANT)
|
RUN_WATCHDOG_HOMEASSISTANT_DOCKER)
|
||||||
|
|
||||||
|
# self.scheduler.register_task(
|
||||||
|
# homeassistant_watchdog_api(self.loop, self.homeassistant),
|
||||||
|
# RUN_WATCHDOG_HOMEASSISTANT_API)
|
||||||
|
|
||||||
# If landingpage / run upgrade in background
|
# If landingpage / run upgrade in background
|
||||||
if self.homeassistant.version == 'landingpage':
|
if self.homeassistant.version == 'landingpage':
|
||||||
self.loop.create_task(self.homeassistant.install())
|
self.loop.create_task(self.homeassistant.install())
|
||||||
|
|
||||||
async def stop(self, exit_code=0):
|
async def stop(self):
|
||||||
"""Stop a running orchestration."""
|
"""Stop a running orchestration."""
|
||||||
# don't process scheduler anymore
|
# don't process scheduler anymore
|
||||||
self.scheduler.suspend = True
|
self.scheduler.suspend = True
|
||||||
|
|
||||||
# process stop tasks
|
# process stop tasks
|
||||||
self.websession.close()
|
self.websession.close()
|
||||||
await self.api.stop()
|
self.homeassistant.websession.close()
|
||||||
|
|
||||||
self.exit_code = exit_code
|
# process async stop tasks
|
||||||
self.loop.stop()
|
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
|
||||||
|
40
hassio/dns.py
Normal file
40
hassio/dns.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""Setup the internal DNS service for host applications."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import shlex
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
||||||
|
|
||||||
|
|
||||||
|
class DNSForward(object):
|
||||||
|
"""Manage DNS forwarding to internal DNS."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize DNS forwarding."""
|
||||||
|
self.proc = None
|
||||||
|
|
||||||
|
async def start(self):
|
||||||
|
"""Start DNS forwarding."""
|
||||||
|
try:
|
||||||
|
self.proc = await asyncio.create_subprocess_exec(
|
||||||
|
*shlex.split(COMMAND),
|
||||||
|
stdin=asyncio.subprocess.DEVNULL,
|
||||||
|
stdout=asyncio.subprocess.DEVNULL,
|
||||||
|
stderr=asyncio.subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't start DNS forwarding -> %s", err)
|
||||||
|
else:
|
||||||
|
_LOGGER.info("Start DNS port forwarding for host add-ons")
|
||||||
|
|
||||||
|
async def stop(self):
|
||||||
|
"""Stop DNS forwarding."""
|
||||||
|
if not self.proc:
|
||||||
|
_LOGGER.warning("DNS forwarding is not running!")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.proc.kill()
|
||||||
|
await self.proc.wait()
|
||||||
|
_LOGGER.info("Stop DNS forwarding")
|
@@ -1,353 +1,108 @@
|
|||||||
"""Init file for HassIO docker object."""
|
"""Init file for HassIO docker object."""
|
||||||
import asyncio
|
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
from .network import DockerNetwork
|
||||||
|
from ..const import SOCKET_DOCKER
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerBase(object):
|
class DockerAPI(object):
|
||||||
"""Docker hassio wrapper."""
|
"""Docker hassio wrapper.
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, image=None, timeout=30):
|
This class is not AsyncIO safe!
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.config = config
|
self.docker = docker.DockerClient(
|
||||||
self.loop = loop
|
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||||
self.dock = dock
|
self.network = DockerNetwork(self.docker)
|
||||||
self.image = image
|
|
||||||
self.timeout = timeout
|
|
||||||
self.version = None
|
|
||||||
self.arch = None
|
|
||||||
self._lock = asyncio.Lock(loop=loop)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def images(self):
|
||||||
"""Return name of docker container."""
|
"""Return api images."""
|
||||||
return None
|
return self.docker.images
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_progress(self):
|
def containers(self):
|
||||||
"""Return True if a task is in progress."""
|
"""Return api containers."""
|
||||||
return self._lock.locked()
|
return self.docker.containers
|
||||||
|
|
||||||
def process_metadata(self, metadata, force=False):
|
@property
|
||||||
"""Read metadata and set it to object."""
|
def api(self):
|
||||||
# read image
|
"""Return api containers."""
|
||||||
if not self.image:
|
return self.docker.api
|
||||||
self.image = metadata['Config']['Image']
|
|
||||||
|
|
||||||
# read version
|
def run(self, image, **kwargs):
|
||||||
need_version = force or not self.version
|
""""Create a docker and run it.
|
||||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
|
||||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
|
||||||
elif need_version:
|
|
||||||
_LOGGER.warning("Can't read version from %s", self.name)
|
|
||||||
|
|
||||||
# read arch
|
|
||||||
need_arch = force or not self.arch
|
|
||||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
|
||||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
|
||||||
|
|
||||||
async def install(self, tag):
|
|
||||||
"""Pull docker image."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute install while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._install, tag)
|
|
||||||
|
|
||||||
def _install(self, tag):
|
|
||||||
"""Pull docker image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
|
name = kwargs.get('name', image)
|
||||||
|
network_mode = kwargs.get('network_mode')
|
||||||
|
hostname = kwargs.get('hostname')
|
||||||
|
|
||||||
|
# setup network
|
||||||
|
if network_mode:
|
||||||
|
kwargs['dns'] = [str(self.network.supervisor)]
|
||||||
|
else:
|
||||||
|
kwargs['network'] = None
|
||||||
|
|
||||||
|
# create container
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
container = self.docker.containers.create(image, **kwargs)
|
||||||
image = self.dock.images.pull("{}:{}".format(self.image, tag))
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't create container from %s -> %s", name, err)
|
||||||
image.tag(self.image, tag='latest')
|
|
||||||
self.process_metadata(image.attrs, force=True)
|
|
||||||
except docker.errors.APIError as err:
|
|
||||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
|
# attach network
|
||||||
return True
|
if not network_mode:
|
||||||
|
alias = [hostname] if hostname else None
|
||||||
def exists(self):
|
if self.network.attach_container(container, alias=alias):
|
||||||
"""Return True if docker image exists in local repo.
|
self.network.detach_default_bridge(container)
|
||||||
|
|
||||||
Return a Future.
|
|
||||||
"""
|
|
||||||
return self.loop.run_in_executor(None, self._exists)
|
|
||||||
|
|
||||||
def _exists(self):
|
|
||||||
"""Return True if docker image exists in local repo.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.dock.images.get(self.image)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def is_running(self):
|
|
||||||
"""Return True if docker is Running.
|
|
||||||
|
|
||||||
Return a Future.
|
|
||||||
"""
|
|
||||||
return self.loop.run_in_executor(None, self._is_running)
|
|
||||||
|
|
||||||
def _is_running(self):
|
|
||||||
"""Return True if docker is Running.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
container = self.dock.containers.get(self.name)
|
|
||||||
image = self.dock.images.get(self.image)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# container is not running
|
|
||||||
if container.status != 'running':
|
|
||||||
return False
|
|
||||||
|
|
||||||
# we run on a old image, stop and start it
|
|
||||||
if container.image.id != image.id:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def attach(self):
|
|
||||||
"""Attach to running docker container."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute attach while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._attach)
|
|
||||||
|
|
||||||
def _attach(self):
|
|
||||||
"""Attach to running docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if self.image:
|
|
||||||
obj_data = self.dock.images.get(self.image).attrs
|
|
||||||
else:
|
else:
|
||||||
obj_data = self.dock.containers.get(self.name).attrs
|
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.process_metadata(obj_data)
|
# run container
|
||||||
_LOGGER.info(
|
try:
|
||||||
"Attach to image %s with version %s", self.image, self.version)
|
container.start()
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't start %s -> %s", name, err)
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def run(self):
|
def run_command(self, image, command=None, **kwargs):
|
||||||
"""Run docker image."""
|
"""Create a temporary container and run command.
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute run while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._run)
|
|
||||||
|
|
||||||
def _run(self):
|
|
||||||
"""Run docker image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
stdout = kwargs.get('stdout', True)
|
||||||
|
stderr = kwargs.get('stderr', True)
|
||||||
|
|
||||||
async def stop(self):
|
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||||
"""Stop/remove docker container."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute stop while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
await self.loop.run_in_executor(None, self._stop)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _stop(self):
|
|
||||||
"""Stop/remove and remove docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
container = self.dock.containers.get(self.name)
|
container = self.docker.containers.run(
|
||||||
except docker.errors.DockerException:
|
image,
|
||||||
return
|
command=command,
|
||||||
|
network=self.network.name,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
if container.status == 'running':
|
# wait until command is done
|
||||||
_LOGGER.info("Stop %s docker application", self.image)
|
exit_code = container.wait()
|
||||||
with suppress(docker.errors.DockerException):
|
output = container.logs(stdout=stdout, stderr=stderr)
|
||||||
container.stop(timeout=self.timeout)
|
|
||||||
|
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't execute command -> %s", err)
|
||||||
|
return (None, b"")
|
||||||
|
|
||||||
|
# cleanup container
|
||||||
with suppress(docker.errors.DockerException):
|
with suppress(docker.errors.DockerException):
|
||||||
_LOGGER.info("Clean %s docker application", self.image)
|
|
||||||
container.remove(force=True)
|
container.remove(force=True)
|
||||||
|
|
||||||
async def remove(self):
|
return (exit_code, output)
|
||||||
"""Remove docker images."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute remove while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._remove)
|
|
||||||
|
|
||||||
def _remove(self):
|
|
||||||
"""remove docker images.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
# cleanup container
|
|
||||||
self._stop()
|
|
||||||
|
|
||||||
_LOGGER.info(
|
|
||||||
"Remove docker %s with latest and %s", self.image, self.version)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with suppress(docker.errors.ImageNotFound):
|
|
||||||
self.dock.images.remove(
|
|
||||||
image="{}:latest".format(self.image), force=True)
|
|
||||||
|
|
||||||
with suppress(docker.errors.ImageNotFound):
|
|
||||||
self.dock.images.remove(
|
|
||||||
image="{}:{}".format(self.image, self.version), force=True)
|
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# clean metadata
|
|
||||||
self.version = None
|
|
||||||
self.arch = None
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def update(self, tag):
|
|
||||||
"""Update a docker image."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute update while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._update, tag)
|
|
||||||
|
|
||||||
def _update(self, tag):
|
|
||||||
"""Update a docker image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
was_running = self._is_running()
|
|
||||||
|
|
||||||
_LOGGER.info(
|
|
||||||
"Update docker %s with %s:%s", self.version, self.image, tag)
|
|
||||||
|
|
||||||
# update docker image
|
|
||||||
if not self._install(tag):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# run or cleanup container
|
|
||||||
if was_running:
|
|
||||||
self._run()
|
|
||||||
else:
|
|
||||||
self._stop()
|
|
||||||
|
|
||||||
# cleanup images
|
|
||||||
self._cleanup()
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def logs(self):
|
|
||||||
"""Return docker logs of container."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute logs while a task is in progress")
|
|
||||||
return b""
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._logs)
|
|
||||||
|
|
||||||
def _logs(self):
|
|
||||||
"""Return docker logs of container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
container = self.dock.containers.get(self.name)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return b""
|
|
||||||
|
|
||||||
try:
|
|
||||||
return container.logs(tail=100, stdout=True, stderr=True)
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
|
||||||
|
|
||||||
async def restart(self):
|
|
||||||
"""Restart docker container."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute restart while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._restart)
|
|
||||||
|
|
||||||
def _restart(self):
|
|
||||||
"""Restart docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
container = self.dock.containers.get(self.name)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
_LOGGER.info("Restart %s", self.image)
|
|
||||||
|
|
||||||
try:
|
|
||||||
container.restart(timeout=self.timeout)
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def cleanup(self):
|
|
||||||
"""Check if old version exists and cleanup."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute cleanup while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
await self.loop.run_in_executor(None, self._cleanup)
|
|
||||||
|
|
||||||
def _cleanup(self):
|
|
||||||
"""Check if old version exists and cleanup.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
latest = self.dock.images.get(self.image)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
|
||||||
return
|
|
||||||
|
|
||||||
for image in self.dock.images.list(name=self.image):
|
|
||||||
if latest.id == image.id:
|
|
||||||
continue
|
|
||||||
|
|
||||||
with suppress(docker.errors.DockerException):
|
|
||||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
|
||||||
self.dock.images.remove(image.id, force=True)
|
|
||||||
|
@@ -1,43 +1,96 @@
|
|||||||
"""Init file for HassIO addon docker object."""
|
"""Init file for HassIO addon docker object."""
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
import os
|
||||||
import shutil
|
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from . import DockerBase
|
from .interface import DockerInterface
|
||||||
from .util import dockerfile_template
|
from .util import docker_process
|
||||||
|
from ..addons.build import AddonBuild
|
||||||
from ..const import (
|
from ..const import (
|
||||||
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||||
|
|
||||||
class DockerAddon(DockerBase):
|
|
||||||
|
class DockerAddon(DockerInterface):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, addon):
|
def __init__(self, config, loop, api, addon):
|
||||||
"""Initialize docker homeassistant wrapper."""
|
"""Initialize docker homeassistant wrapper."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
config, loop, dock, image=addon.image, timeout=addon.timeout)
|
config, loop, api, image=addon.image, timeout=addon.timeout)
|
||||||
self.addon = addon
|
self.addon = addon
|
||||||
|
|
||||||
|
def process_metadata(self, metadata, force=False):
|
||||||
|
"""Use addon data instead meta data with legacy."""
|
||||||
|
if not self.addon.legacy:
|
||||||
|
return super().process_metadata(metadata, force=force)
|
||||||
|
|
||||||
|
# set meta data
|
||||||
|
if not self.version or force:
|
||||||
|
if force: # called on install/update/build
|
||||||
|
self.version = self.addon.last_version
|
||||||
|
else:
|
||||||
|
self.version = self.addon.version_installed
|
||||||
|
|
||||||
|
if not self.arch:
|
||||||
|
self.arch = self.config.arch
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""Return name of docker container."""
|
"""Return name of docker container."""
|
||||||
return "addon_{}".format(self.addon.slug)
|
return "addon_{}".format(self.addon.slug)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hostname(self):
|
||||||
|
"""Return slug/id of addon."""
|
||||||
|
return self.addon.slug.replace('_', '-')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def environment(self):
|
def environment(self):
|
||||||
"""Return environment for docker add-on."""
|
"""Return environment for docker add-on."""
|
||||||
addon_env = self.addon.environment or {}
|
addon_env = self.addon.environment or {}
|
||||||
|
if self.addon.with_audio:
|
||||||
|
addon_env.update({
|
||||||
|
'ALSA_OUTPUT': self.addon.audio_output,
|
||||||
|
'ALSA_INPUT': self.addon.audio_input,
|
||||||
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
**addon_env,
|
**addon_env,
|
||||||
'TZ': self.config.timezone,
|
'TZ': self.config.timezone,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def devices(self):
|
||||||
|
"""Return needed devices."""
|
||||||
|
devices = self.addon.devices or []
|
||||||
|
|
||||||
|
# use audio devices
|
||||||
|
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||||
|
devices.append(AUDIO_DEVICE)
|
||||||
|
|
||||||
|
# Return None if no devices is present
|
||||||
|
if devices:
|
||||||
|
return devices
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ports(self):
|
||||||
|
"""Filter None from addon ports."""
|
||||||
|
if not self.addon.ports:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return {
|
||||||
|
container_port: host_port
|
||||||
|
for container_port, host_port in self.addon.ports.items()
|
||||||
|
if host_port
|
||||||
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tmpfs(self):
|
def tmpfs(self):
|
||||||
"""Return tmpfs for docker add-on."""
|
"""Return tmpfs for docker add-on."""
|
||||||
@@ -46,6 +99,21 @@ class DockerAddon(DockerBase):
|
|||||||
return {"/tmpfs": "{}".format(options)}
|
return {"/tmpfs": "{}".format(options)}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def network_mapping(self):
|
||||||
|
"""Return hosts mapping."""
|
||||||
|
return {
|
||||||
|
'homeassistant': self.docker.network.gateway,
|
||||||
|
'hassio': self.docker.network.supervisor,
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def network_mode(self):
|
||||||
|
"""Return network mode for addon."""
|
||||||
|
if self.addon.host_network:
|
||||||
|
return 'host'
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def volumes(self):
|
def volumes(self):
|
||||||
"""Generate volumes for mappings."""
|
"""Generate volumes for mappings."""
|
||||||
@@ -56,6 +124,7 @@ class DockerAddon(DockerBase):
|
|||||||
|
|
||||||
addon_mapping = self.addon.map_volumes
|
addon_mapping = self.addon.map_volumes
|
||||||
|
|
||||||
|
# setup config mappings
|
||||||
if MAP_CONFIG in addon_mapping:
|
if MAP_CONFIG in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_config): {
|
str(self.config.path_extern_config): {
|
||||||
@@ -86,6 +155,17 @@ class DockerAddon(DockerBase):
|
|||||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
|
# init other hardware mappings
|
||||||
|
if self.addon.with_gpio:
|
||||||
|
volumes.update({
|
||||||
|
'/sys/class/gpio': {
|
||||||
|
'bind': '/sys/class/gpio', 'mode': "rw"
|
||||||
|
},
|
||||||
|
'/sys/devices/platform/soc': {
|
||||||
|
'bind': '/sys/devices/platform/soc', 'mode': "rw"
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
return volumes
|
return volumes
|
||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
@@ -103,27 +183,27 @@ class DockerAddon(DockerBase):
|
|||||||
if not self.addon.write_options():
|
if not self.addon.write_options():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
ret = self.docker.run(
|
||||||
self.dock.containers.run(
|
self.image,
|
||||||
self.image,
|
name=self.name,
|
||||||
name=self.name,
|
hostname=self.hostname,
|
||||||
detach=True,
|
detach=True,
|
||||||
network_mode=self.addon.network_mode,
|
stdin_open=self.addon.with_stdin,
|
||||||
ports=self.addon.ports,
|
network_mode=self.network_mode,
|
||||||
devices=self.addon.devices,
|
ports=self.ports,
|
||||||
cap_add=self.addon.privileged,
|
extra_hosts=self.network_mapping,
|
||||||
environment=self.environment,
|
devices=self.devices,
|
||||||
volumes=self.volumes,
|
cap_add=self.addon.privileged,
|
||||||
tmpfs=self.tmpfs
|
environment=self.environment,
|
||||||
)
|
volumes=self.volumes,
|
||||||
|
tmpfs=self.tmpfs
|
||||||
|
)
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
if ret:
|
||||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
_LOGGER.info("Start docker addon %s with version %s",
|
||||||
return False
|
self.image, self.version)
|
||||||
|
|
||||||
_LOGGER.info(
|
return ret
|
||||||
"Start docker addon %s with version %s", self.image, self.version)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _install(self, tag):
|
def _install(self, tag):
|
||||||
"""Pull docker image or build it.
|
"""Pull docker image or build it.
|
||||||
@@ -140,55 +220,26 @@ class DockerAddon(DockerBase):
|
|||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
build_dir = Path(self.config.path_tmp, self.addon.slug)
|
build_env = AddonBuild(self.config, self.addon)
|
||||||
|
|
||||||
|
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||||
try:
|
try:
|
||||||
# prepare temporary addon build folder
|
image = self.docker.images.build(**build_env.get_docker_args(tag))
|
||||||
try:
|
|
||||||
source = self.addon.path_addon_location
|
|
||||||
shutil.copytree(str(source), str(build_dir))
|
|
||||||
except shutil.Error as err:
|
|
||||||
_LOGGER.error("Can't copy %s to temporary build folder -> %s",
|
|
||||||
source, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# prepare Dockerfile
|
image.tag(self.image, tag='latest')
|
||||||
try:
|
self.process_metadata(image.attrs, force=True)
|
||||||
dockerfile_template(
|
|
||||||
Path(build_dir, 'Dockerfile'), self.config.arch,
|
|
||||||
tag, META_ADDON)
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("Can't prepare dockerfile -> %s", err)
|
|
||||||
|
|
||||||
# run docker build
|
except (docker.errors.DockerException) as err:
|
||||||
try:
|
_LOGGER.error("Can't build %s:%s -> %s", self.image, tag, err)
|
||||||
build_tag = "{}:{}".format(self.image, tag)
|
|
||||||
|
|
||||||
_LOGGER.info("Start build %s on %s", build_tag, build_dir)
|
|
||||||
image = self.dock.images.build(
|
|
||||||
path=str(build_dir), tag=build_tag, pull=True)
|
|
||||||
|
|
||||||
image.tag(self.image, tag='latest')
|
|
||||||
self.process_metadata(image.attrs, force=True)
|
|
||||||
|
|
||||||
except (docker.errors.DockerException, TypeError) as err:
|
|
||||||
_LOGGER.error("Can't build %s -> %s", build_tag, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
_LOGGER.info("Build %s done", build_tag)
|
|
||||||
return True
|
|
||||||
|
|
||||||
finally:
|
|
||||||
shutil.rmtree(str(build_dir), ignore_errors=True)
|
|
||||||
|
|
||||||
async def export_image(self, path):
|
|
||||||
"""Export current images into a tar file."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute export while a task is in progress")
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async with self._lock:
|
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||||
return await self.loop.run_in_executor(
|
return True
|
||||||
None, self._export_image, path)
|
|
||||||
|
@docker_process
|
||||||
|
def export_image(self, path):
|
||||||
|
"""Export current images into a tar file."""
|
||||||
|
return self.loop.run_in_executor(None, self._export_image, path)
|
||||||
|
|
||||||
def _export_image(self, tar_file):
|
def _export_image(self, tar_file):
|
||||||
"""Export current images into a tar file.
|
"""Export current images into a tar file.
|
||||||
@@ -196,7 +247,7 @@ class DockerAddon(DockerBase):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
image = self.dock.api.get_image(self.image)
|
image = self.docker.api.get_image(self.image)
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
@@ -212,15 +263,10 @@ class DockerAddon(DockerBase):
|
|||||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def import_image(self, path, tag):
|
@docker_process
|
||||||
|
def import_image(self, path, tag):
|
||||||
"""Import a tar file as image."""
|
"""Import a tar file as image."""
|
||||||
if self._lock.locked():
|
return self.loop.run_in_executor(None, self._import_image, path, tag)
|
||||||
_LOGGER.error("Can't excute import while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(
|
|
||||||
None, self._import_image, path, tag)
|
|
||||||
|
|
||||||
def _import_image(self, tar_file, tag):
|
def _import_image(self, tar_file, tag):
|
||||||
"""Import a tar file as image.
|
"""Import a tar file as image.
|
||||||
@@ -229,9 +275,9 @@ class DockerAddon(DockerBase):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with tar_file.open("rb") as read_tar:
|
with tar_file.open("rb") as read_tar:
|
||||||
self.dock.api.load_image(read_tar)
|
self.docker.api.load_image(read_tar)
|
||||||
|
|
||||||
image = self.dock.images.get(self.image)
|
image = self.docker.images.get(self.image)
|
||||||
image.tag(self.image, tag=tag)
|
image.tag(self.image, tag=tag)
|
||||||
except (docker.errors.DockerException, OSError) as err:
|
except (docker.errors.DockerException, OSError) as err:
|
||||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
||||||
@@ -250,3 +296,35 @@ class DockerAddon(DockerBase):
|
|||||||
"""
|
"""
|
||||||
self._stop()
|
self._stop()
|
||||||
return self._run()
|
return self._run()
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def write_stdin(self, data):
|
||||||
|
"""Write to add-on stdin."""
|
||||||
|
return self.loop.run_in_executor(None, self._write_stdin, data)
|
||||||
|
|
||||||
|
def _write_stdin(self, data):
|
||||||
|
"""Write to add-on stdin.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
if not self._is_running():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# load needed docker objects
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't attach to %s stdin -> %s", self.name, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# write to stdin
|
||||||
|
data += b"\n"
|
||||||
|
os.write(socket.fileno(), data)
|
||||||
|
socket.close()
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't write to %s stdin -> %s", self.name, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
@@ -3,19 +3,19 @@ import logging
|
|||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from . import DockerBase
|
from .interface import DockerInterface
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
HASS_DOCKER_NAME = 'homeassistant'
|
HASS_DOCKER_NAME = 'homeassistant'
|
||||||
|
|
||||||
|
|
||||||
class DockerHomeAssistant(DockerBase):
|
class DockerHomeAssistant(DockerInterface):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, data):
|
def __init__(self, config, loop, api, data):
|
||||||
"""Initialize docker homeassistant wrapper."""
|
"""Initialize docker homeassistant wrapper."""
|
||||||
super().__init__(config, loop, dock, image=data.image)
|
super().__init__(config, loop, api, image=data.image)
|
||||||
self.data = data
|
self.data = data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -46,31 +46,68 @@ class DockerHomeAssistant(DockerBase):
|
|||||||
# cleanup
|
# cleanup
|
||||||
self._stop()
|
self._stop()
|
||||||
|
|
||||||
try:
|
ret = self.docker.run(
|
||||||
self.dock.containers.run(
|
self.image,
|
||||||
self.image,
|
name=self.name,
|
||||||
name=self.name,
|
hostname=self.name,
|
||||||
detach=True,
|
detach=True,
|
||||||
privileged=True,
|
privileged=True,
|
||||||
devices=self.devices,
|
devices=self.devices,
|
||||||
network_mode='host',
|
network_mode='host',
|
||||||
environment={
|
environment={
|
||||||
'HASSIO': self.config.api_endpoint,
|
'HASSIO': self.docker.network.supervisor,
|
||||||
'TZ': self.config.timezone,
|
'TZ': self.config.timezone,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
str(self.config.path_extern_config):
|
str(self.config.path_extern_config):
|
||||||
{'bind': '/config', 'mode': 'rw'},
|
{'bind': '/config', 'mode': 'rw'},
|
||||||
str(self.config.path_extern_ssl):
|
str(self.config.path_extern_ssl):
|
||||||
{'bind': '/ssl', 'mode': 'ro'},
|
{'bind': '/ssl', 'mode': 'ro'},
|
||||||
str(self.config.path_extern_share):
|
str(self.config.path_extern_share):
|
||||||
{'bind': '/share', 'mode': 'rw'},
|
{'bind': '/share', 'mode': 'rw'},
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
if ret:
|
||||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
_LOGGER.info("Start homeassistant %s with version %s",
|
||||||
|
self.image, self.version)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _execute_command(self, command):
|
||||||
|
"""Create a temporary container and run command.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
return self.docker.run_command(
|
||||||
|
self.image,
|
||||||
|
command,
|
||||||
|
detach=True,
|
||||||
|
stdout=True,
|
||||||
|
stderr=True,
|
||||||
|
environment={
|
||||||
|
'TZ': self.config.timezone,
|
||||||
|
},
|
||||||
|
volumes={
|
||||||
|
str(self.config.path_extern_config):
|
||||||
|
{'bind': '/config', 'mode': 'ro'},
|
||||||
|
str(self.config.path_extern_ssl):
|
||||||
|
{'bind': '/ssl', 'mode': 'ro'},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_initialize(self):
|
||||||
|
"""Return True if docker container exists."""
|
||||||
|
return self.loop.run_in_executor(None, self._is_initialize)
|
||||||
|
|
||||||
|
def _is_initialize(self):
|
||||||
|
"""Return True if docker container exists.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info(
|
|
||||||
"Start homeassistant %s with version %s", self.image, self.version)
|
|
||||||
return True
|
return True
|
||||||
|
327
hassio/dock/interface.py
Normal file
327
hassio/dock/interface.py
Normal file
@@ -0,0 +1,327 @@
|
|||||||
|
"""Interface class for HassIO docker object."""
|
||||||
|
import asyncio
|
||||||
|
from contextlib import suppress
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
|
from .util import docker_process
|
||||||
|
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DockerInterface(object):
|
||||||
|
"""Docker hassio interface."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop, api, image=None, timeout=30):
|
||||||
|
"""Initialize docker base wrapper."""
|
||||||
|
self.config = config
|
||||||
|
self.loop = loop
|
||||||
|
self.docker = api
|
||||||
|
|
||||||
|
self.image = image
|
||||||
|
self.timeout = timeout
|
||||||
|
self.version = None
|
||||||
|
self.arch = None
|
||||||
|
self._lock = asyncio.Lock(loop=loop)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return name of docker container."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def in_progress(self):
|
||||||
|
"""Return True if a task is in progress."""
|
||||||
|
return self._lock.locked()
|
||||||
|
|
||||||
|
def process_metadata(self, metadata, force=False):
|
||||||
|
"""Read metadata and set it to object."""
|
||||||
|
# read image
|
||||||
|
if not self.image:
|
||||||
|
self.image = metadata['Config']['Image']
|
||||||
|
|
||||||
|
# read version
|
||||||
|
need_version = force or not self.version
|
||||||
|
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
||||||
|
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
||||||
|
elif need_version:
|
||||||
|
_LOGGER.warning("Can't read version from %s", self.name)
|
||||||
|
|
||||||
|
# read arch
|
||||||
|
need_arch = force or not self.arch
|
||||||
|
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||||
|
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def install(self, tag):
|
||||||
|
"""Pull docker image."""
|
||||||
|
return self.loop.run_in_executor(None, self._install, tag)
|
||||||
|
|
||||||
|
def _install(self, tag):
|
||||||
|
"""Pull docker image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||||
|
image = self.docker.images.pull("{}:{}".format(self.image, tag))
|
||||||
|
|
||||||
|
image.tag(self.image, tag='latest')
|
||||||
|
self.process_metadata(image.attrs, force=True)
|
||||||
|
except docker.errors.APIError as err:
|
||||||
|
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def exists(self):
|
||||||
|
"""Return True if docker image exists in local repo."""
|
||||||
|
return self.loop.run_in_executor(None, self._exists)
|
||||||
|
|
||||||
|
def _exists(self):
|
||||||
|
"""Return True if docker image exists in local repo.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.docker.images.get(self.image)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_running(self):
|
||||||
|
"""Return True if docker is Running.
|
||||||
|
|
||||||
|
Return a Future.
|
||||||
|
"""
|
||||||
|
return self.loop.run_in_executor(None, self._is_running)
|
||||||
|
|
||||||
|
def _is_running(self):
|
||||||
|
"""Return True if docker is Running.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
image = self.docker.images.get(self.image)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# container is not running
|
||||||
|
if container.status != 'running':
|
||||||
|
return False
|
||||||
|
|
||||||
|
# we run on a old image, stop and start it
|
||||||
|
if container.image.id != image.id:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def attach(self):
|
||||||
|
"""Attach to running docker container."""
|
||||||
|
return self.loop.run_in_executor(None, self._attach)
|
||||||
|
|
||||||
|
def _attach(self):
|
||||||
|
"""Attach to running docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.image:
|
||||||
|
obj_data = self.docker.images.get(self.image).attrs
|
||||||
|
else:
|
||||||
|
obj_data = self.docker.containers.get(self.name).attrs
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.process_metadata(obj_data)
|
||||||
|
_LOGGER.info(
|
||||||
|
"Attach to image %s with version %s", self.image, self.version)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def run(self):
|
||||||
|
"""Run docker image."""
|
||||||
|
return self.loop.run_in_executor(None, self._run)
|
||||||
|
|
||||||
|
def _run(self):
|
||||||
|
"""Run docker image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def stop(self):
|
||||||
|
"""Stop/remove docker container."""
|
||||||
|
return self.loop.run_in_executor(None, self._stop)
|
||||||
|
|
||||||
|
def _stop(self):
|
||||||
|
"""Stop/remove and remove docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if container.status == 'running':
|
||||||
|
_LOGGER.info("Stop %s docker application", self.image)
|
||||||
|
with suppress(docker.errors.DockerException):
|
||||||
|
container.stop(timeout=self.timeout)
|
||||||
|
|
||||||
|
with suppress(docker.errors.DockerException):
|
||||||
|
_LOGGER.info("Clean %s docker application", self.image)
|
||||||
|
container.remove(force=True)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def remove(self):
|
||||||
|
"""Remove docker images."""
|
||||||
|
return self.loop.run_in_executor(None, self._remove)
|
||||||
|
|
||||||
|
def _remove(self):
|
||||||
|
"""remove docker images.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
# cleanup container
|
||||||
|
self._stop()
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Remove docker %s with latest and %s", self.image, self.version)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with suppress(docker.errors.ImageNotFound):
|
||||||
|
self.docker.images.remove(
|
||||||
|
image="{}:latest".format(self.image), force=True)
|
||||||
|
|
||||||
|
with suppress(docker.errors.ImageNotFound):
|
||||||
|
self.docker.images.remove(
|
||||||
|
image="{}:{}".format(self.image, self.version), force=True)
|
||||||
|
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# clean metadata
|
||||||
|
self.version = None
|
||||||
|
self.arch = None
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def update(self, tag):
|
||||||
|
"""Update a docker image."""
|
||||||
|
return self.loop.run_in_executor(None, self._update, tag)
|
||||||
|
|
||||||
|
def _update(self, tag):
|
||||||
|
"""Update a docker image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
_LOGGER.info(
|
||||||
|
"Update docker %s with %s:%s", self.version, self.image, tag)
|
||||||
|
|
||||||
|
# update docker image
|
||||||
|
if not self._install(tag):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# stop container & cleanup
|
||||||
|
self._stop()
|
||||||
|
self._cleanup()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def logs(self):
|
||||||
|
"""Return docker logs of container.
|
||||||
|
|
||||||
|
Return a Future.
|
||||||
|
"""
|
||||||
|
return self.loop.run_in_executor(None, self._logs)
|
||||||
|
|
||||||
|
def _logs(self):
|
||||||
|
"""Return docker logs of container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return container.logs(tail=100, stdout=True, stderr=True)
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def restart(self):
|
||||||
|
"""Restart docker container."""
|
||||||
|
return self.loop.run_in_executor(None, self._restart)
|
||||||
|
|
||||||
|
def _restart(self):
|
||||||
|
"""Restart docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
_LOGGER.info("Restart %s", self.image)
|
||||||
|
|
||||||
|
try:
|
||||||
|
container.restart(timeout=self.timeout)
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def cleanup(self):
|
||||||
|
"""Check if old version exists and cleanup."""
|
||||||
|
return self.loop.run_in_executor(None, self._cleanup)
|
||||||
|
|
||||||
|
def _cleanup(self):
|
||||||
|
"""Check if old version exists and cleanup.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
latest = self.docker.images.get(self.image)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||||
|
return False
|
||||||
|
|
||||||
|
for image in self.docker.images.list(name=self.image):
|
||||||
|
if latest.id == image.id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
with suppress(docker.errors.DockerException):
|
||||||
|
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||||
|
self.docker.images.remove(image.id, force=True)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def execute_command(self, command):
|
||||||
|
"""Create a temporary container and run command."""
|
||||||
|
return self.loop.run_in_executor(None, self._execute_command, command)
|
||||||
|
|
||||||
|
def _execute_command(self, command):
|
||||||
|
"""Create a temporary container and run command.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
89
hassio/dock/network.py
Normal file
89
hassio/dock/network.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""Internal network manager for HassIO."""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
|
from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DockerNetwork(object):
|
||||||
|
"""Internal HassIO Network."""
|
||||||
|
|
||||||
|
def __init__(self, dock):
|
||||||
|
"""Initialize internal hassio network."""
|
||||||
|
self.docker = dock
|
||||||
|
self.network = self._get_network()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return name of network."""
|
||||||
|
return DOCKER_NETWORK
|
||||||
|
|
||||||
|
@property
|
||||||
|
def containers(self):
|
||||||
|
"""Return of connected containers from network."""
|
||||||
|
return self.network.containers
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gateway(self):
|
||||||
|
"""Return gateway of the network."""
|
||||||
|
return DOCKER_NETWORK_MASK[1]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supervisor(self):
|
||||||
|
"""Return supervisor of the network."""
|
||||||
|
return DOCKER_NETWORK_MASK[2]
|
||||||
|
|
||||||
|
def _get_network(self):
|
||||||
|
"""Get HassIO network."""
|
||||||
|
try:
|
||||||
|
return self.docker.networks.get(DOCKER_NETWORK)
|
||||||
|
except docker.errors.NotFound:
|
||||||
|
_LOGGER.info("Can't find HassIO network, create new network")
|
||||||
|
|
||||||
|
ipam_pool = docker.types.IPAMPool(
|
||||||
|
subnet=str(DOCKER_NETWORK_MASK),
|
||||||
|
gateway=str(self.gateway),
|
||||||
|
iprange=str(DOCKER_NETWORK_RANGE)
|
||||||
|
)
|
||||||
|
|
||||||
|
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||||
|
|
||||||
|
return self.docker.networks.create(
|
||||||
|
DOCKER_NETWORK, driver='bridge', ipam=ipam_config, options={
|
||||||
|
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||||
|
})
|
||||||
|
|
||||||
|
def attach_container(self, container, alias=None, ipv4=None):
|
||||||
|
"""Attach container to hassio network.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
ipv4 = str(ipv4) if ipv4 else None
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||||
|
except docker.errors.APIError as err:
|
||||||
|
_LOGGER.error("Can't link container to hassio-net -> %s", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.network.reload()
|
||||||
|
return True
|
||||||
|
|
||||||
|
def detach_default_bridge(self, container):
|
||||||
|
"""Detach default docker bridge.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
default_network = self.docker.networks.get('bridge')
|
||||||
|
default_network.disconnect(container)
|
||||||
|
|
||||||
|
except docker.errors.NotFound:
|
||||||
|
return
|
||||||
|
|
||||||
|
except docker.errors.APIError as err:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Can't disconnect container from default -> %s", err)
|
@@ -2,18 +2,20 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from . import DockerBase
|
import docker
|
||||||
from ..const import RESTART_EXIT_CODE
|
|
||||||
|
from .interface import DockerInterface
|
||||||
|
from .util import docker_process
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerSupervisor(DockerBase):
|
class DockerSupervisor(DockerInterface):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, stop_callback, image=None):
|
def __init__(self, config, loop, api, stop_callback, image=None):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
super().__init__(config, loop, dock, image=image)
|
super().__init__(config, loop, api, image=image)
|
||||||
self.stop_callback = stop_callback
|
self.stop_callback = stop_callback
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -21,20 +23,38 @@ class DockerSupervisor(DockerBase):
|
|||||||
"""Return name of docker container."""
|
"""Return name of docker container."""
|
||||||
return os.environ['SUPERVISOR_NAME']
|
return os.environ['SUPERVISOR_NAME']
|
||||||
|
|
||||||
|
def _attach(self):
|
||||||
|
"""Attach to running docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.process_metadata(container.attrs)
|
||||||
|
_LOGGER.info("Attach to supervisor %s with version %s",
|
||||||
|
self.image, self.version)
|
||||||
|
|
||||||
|
# if already attach
|
||||||
|
if container in self.docker.network.containers:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# attach to network
|
||||||
|
return self.docker.network.attach_container(
|
||||||
|
container, alias=['hassio'], ipv4=self.docker.network.supervisor)
|
||||||
|
|
||||||
|
@docker_process
|
||||||
async def update(self, tag):
|
async def update(self, tag):
|
||||||
"""Update a supervisor docker image."""
|
"""Update a supervisor docker image."""
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute update while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||||
|
|
||||||
async with self._lock:
|
if await self.loop.run_in_executor(None, self._install, tag):
|
||||||
if await self.loop.run_in_executor(None, self._install, tag):
|
self.loop.call_later(1, self.loop.stop)
|
||||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
return True
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def run(self):
|
async def run(self):
|
||||||
"""Run docker image."""
|
"""Run docker image."""
|
||||||
|
@@ -1,42 +1,20 @@
|
|||||||
"""HassIO docker utilitys."""
|
"""HassIO docker utilitys."""
|
||||||
import re
|
import logging
|
||||||
|
|
||||||
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
HASSIO_BASE_IMAGE = {
|
# pylint: disable=protected-access
|
||||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
def docker_process(method):
|
||||||
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
"""Wrap function with only run once."""
|
||||||
ARCH_I386: "homeassistant/i386-base:latest",
|
async def wrap_api(api, *args, **kwargs):
|
||||||
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
"""Return api wrapper."""
|
||||||
}
|
if api._lock.locked():
|
||||||
|
_LOGGER.error(
|
||||||
|
"Can't excute %s while a task is in progress", method.__name__)
|
||||||
|
return False
|
||||||
|
|
||||||
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
|
async with api._lock:
|
||||||
|
return await method(api, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrap_api
|
||||||
def dockerfile_template(dockerfile, arch, version, meta_type):
|
|
||||||
"""Prepare a Hass.IO dockerfile."""
|
|
||||||
buff = []
|
|
||||||
hassio_image = HASSIO_BASE_IMAGE[arch]
|
|
||||||
custom_image = re.compile(r"^#{}:FROM".format(arch))
|
|
||||||
|
|
||||||
# read docker
|
|
||||||
with dockerfile.open('r') as dock_input:
|
|
||||||
for line in dock_input:
|
|
||||||
line = TMPL_IMAGE.sub(hassio_image, line)
|
|
||||||
line = custom_image.sub("FROM", line)
|
|
||||||
buff.append(line)
|
|
||||||
|
|
||||||
# add metadata
|
|
||||||
buff.append(create_metadata(version, arch, meta_type))
|
|
||||||
|
|
||||||
# write docker
|
|
||||||
with dockerfile.open('w') as dock_output:
|
|
||||||
dock_output.writelines(buff)
|
|
||||||
|
|
||||||
|
|
||||||
def create_metadata(version, arch, meta_type):
|
|
||||||
"""Generate docker label layer for hassio."""
|
|
||||||
return ('LABEL io.hass.version="{}" '
|
|
||||||
'io.hass.arch="{}" '
|
|
||||||
'io.hass.type="{}"').format(version, arch, meta_type)
|
|
||||||
|
120
hassio/hardware.py
Normal file
120
hassio/hardware.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
"""Read hardware info from system."""
|
||||||
|
from datetime import datetime
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
|
||||||
|
import pyudev
|
||||||
|
|
||||||
|
from .const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ASOUND_CARDS = Path("/proc/asound/cards")
|
||||||
|
RE_CARDS = re.compile(r"(\d+) \[(\w*) *\]: (.*\w)")
|
||||||
|
|
||||||
|
ASOUND_DEVICES = Path("/proc/asound/devices")
|
||||||
|
RE_DEVICES = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)")
|
||||||
|
|
||||||
|
PROC_STAT = Path("/proc/stat")
|
||||||
|
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
||||||
|
|
||||||
|
GPIO_DEVICES = Path("/sys/class/gpio")
|
||||||
|
|
||||||
|
|
||||||
|
class Hardware(object):
|
||||||
|
"""Represent a interface to procfs, sysfs and udev."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Init hardware object."""
|
||||||
|
self.context = pyudev.Context()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serial_devices(self):
|
||||||
|
"""Return all serial and connected devices."""
|
||||||
|
dev_list = set()
|
||||||
|
for device in self.context.list_devices(subsystem='tty'):
|
||||||
|
if 'ID_VENDOR' in device:
|
||||||
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
|
return dev_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def input_devices(self):
|
||||||
|
"""Return all input devices."""
|
||||||
|
dev_list = set()
|
||||||
|
for device in self.context.list_devices(subsystem='input'):
|
||||||
|
if 'NAME' in device:
|
||||||
|
dev_list.add(device['NAME'].replace('"', ''))
|
||||||
|
|
||||||
|
return dev_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def disk_devices(self):
|
||||||
|
"""Return all disk devices."""
|
||||||
|
dev_list = set()
|
||||||
|
for device in self.context.list_devices(subsystem='block'):
|
||||||
|
if device.device_node.startswith('/dev/sd'):
|
||||||
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
|
return dev_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_devices(self):
|
||||||
|
"""Return all available audio interfaces."""
|
||||||
|
try:
|
||||||
|
with ASOUND_CARDS.open('r') as cards_file:
|
||||||
|
cards = cards_file.read()
|
||||||
|
with ASOUND_DEVICES.open('r') as devices_file:
|
||||||
|
devices = devices_file.read()
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't read asound data -> %s", err)
|
||||||
|
return
|
||||||
|
|
||||||
|
audio_list = {}
|
||||||
|
|
||||||
|
# parse cards
|
||||||
|
for match in RE_CARDS.finditer(cards):
|
||||||
|
audio_list[match.group(1)] = {
|
||||||
|
ATTR_NAME: match.group(3),
|
||||||
|
ATTR_TYPE: match.group(2),
|
||||||
|
ATTR_DEVICES: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
# parse devices
|
||||||
|
for match in RE_DEVICES.finditer(devices):
|
||||||
|
try:
|
||||||
|
audio_list[match.group(1)][ATTR_DEVICES][match.group(2)] = \
|
||||||
|
match.group(3)
|
||||||
|
except KeyError:
|
||||||
|
_LOGGER.warning("Wrong audio device found %s", match.group(0))
|
||||||
|
continue
|
||||||
|
|
||||||
|
return audio_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gpio_devices(self):
|
||||||
|
"""Return list of GPIO interface on device."""
|
||||||
|
dev_list = set()
|
||||||
|
for interface in GPIO_DEVICES.glob("gpio*"):
|
||||||
|
dev_list.add(interface.name)
|
||||||
|
|
||||||
|
return dev_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_boot(self):
|
||||||
|
"""Return last boot time."""
|
||||||
|
try:
|
||||||
|
with PROC_STAT.open("r") as stat_file:
|
||||||
|
stats = stat_file.read()
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't read stat data -> %s", err)
|
||||||
|
return
|
||||||
|
|
||||||
|
# parse stat file
|
||||||
|
found = RE_BOOT_TIME.search(stats)
|
||||||
|
if not found:
|
||||||
|
_LOGGER.error("Can't found last boot time!")
|
||||||
|
return
|
||||||
|
|
||||||
|
return datetime.utcfromtimestamp(int(found.group(1)))
|
@@ -2,27 +2,38 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from aiohttp.hdrs import CONTENT_TYPE
|
||||||
|
import async_timeout
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||||
ATTR_VERSION)
|
ATTR_VERSION, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||||
|
HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
||||||
from .dock.homeassistant import DockerHomeAssistant
|
from .dock.homeassistant import DockerHomeAssistant
|
||||||
from .tools import JsonConfig
|
from .tools import JsonConfig, convert_to_ascii
|
||||||
from .validate import SCHEMA_HASS_CONFIG
|
from .validate import SCHEMA_HASS_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||||
|
|
||||||
|
|
||||||
class HomeAssistant(JsonConfig):
|
class HomeAssistant(JsonConfig):
|
||||||
"""Hass core object for handle it."""
|
"""Hass core object for handle it."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, websession):
|
def __init__(self, config, loop, docker, updater):
|
||||||
"""Initialize hass object."""
|
"""Initialize hass object."""
|
||||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||||
self.config = config
|
self.config = config
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.websession = websession
|
self.updater = updater
|
||||||
self.docker = DockerHomeAssistant(config, loop, dock, self)
|
self.docker = DockerHomeAssistant(config, loop, docker, self)
|
||||||
|
self.api_ip = docker.network.gateway
|
||||||
|
self.websession = aiohttp.ClientSession(
|
||||||
|
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
|
||||||
|
|
||||||
async def prepare(self):
|
async def prepare(self):
|
||||||
"""Prepare HomeAssistant object."""
|
"""Prepare HomeAssistant object."""
|
||||||
@@ -35,6 +46,57 @@ class HomeAssistant(JsonConfig):
|
|||||||
else:
|
else:
|
||||||
await self.docker.attach()
|
await self.docker.attach()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_port(self):
|
||||||
|
"""Return network port to home-assistant instance."""
|
||||||
|
return self._data[ATTR_PORT]
|
||||||
|
|
||||||
|
@api_port.setter
|
||||||
|
def api_port(self, value):
|
||||||
|
"""Set network port for home-assistant instance."""
|
||||||
|
self._data[ATTR_PORT] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_password(self):
|
||||||
|
"""Return password for home-assistant instance."""
|
||||||
|
return self._data.get(ATTR_PASSWORD)
|
||||||
|
|
||||||
|
@api_password.setter
|
||||||
|
def api_password(self, value):
|
||||||
|
"""Set password for home-assistant instance."""
|
||||||
|
self._data[ATTR_PASSWORD] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_ssl(self):
|
||||||
|
"""Return if we need ssl to home-assistant instance."""
|
||||||
|
return self._data[ATTR_SSL]
|
||||||
|
|
||||||
|
@api_ssl.setter
|
||||||
|
def api_ssl(self, value):
|
||||||
|
"""Set SSL for home-assistant instance."""
|
||||||
|
self._data[ATTR_SSL] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_url(self):
|
||||||
|
"""Return API url to Home-Assistant."""
|
||||||
|
return "{}://{}:{}".format(
|
||||||
|
'https' if self.api_ssl else 'http', self.api_ip, self.api_port
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def watchdog(self):
|
||||||
|
"""Return True if the watchdog should protect Home-Assistant."""
|
||||||
|
return self._data[ATTR_WATCHDOG]
|
||||||
|
|
||||||
|
@watchdog.setter
|
||||||
|
def watchdog(self, value):
|
||||||
|
"""Return True if the watchdog should protect Home-Assistant."""
|
||||||
|
self._data[ATTR_WATCHDOG] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
"""Return version of running homeassistant."""
|
"""Return version of running homeassistant."""
|
||||||
@@ -45,7 +107,7 @@ class HomeAssistant(JsonConfig):
|
|||||||
"""Return last available version of homeassistant."""
|
"""Return last available version of homeassistant."""
|
||||||
if self.is_custom_image:
|
if self.is_custom_image:
|
||||||
return self._data.get(ATTR_LAST_VERSION)
|
return self._data.get(ATTR_LAST_VERSION)
|
||||||
return self.config.last_homeassistant
|
return self.updater.version_homeassistant
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image(self):
|
def image(self):
|
||||||
@@ -70,6 +132,17 @@ class HomeAssistant(JsonConfig):
|
|||||||
self._data[ATTR_DEVICES] = value
|
self._data[ATTR_DEVICES] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def boot(self):
|
||||||
|
"""Return True if home-assistant boot is enabled."""
|
||||||
|
return self._data[ATTR_BOOT]
|
||||||
|
|
||||||
|
@boot.setter
|
||||||
|
def boot(self, value):
|
||||||
|
"""Set home-assistant boot options."""
|
||||||
|
self._data[ATTR_BOOT] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
def set_custom(self, image, version):
|
def set_custom(self, image, version):
|
||||||
"""Set a custom image for homeassistant."""
|
"""Set a custom image for homeassistant."""
|
||||||
# reset
|
# reset
|
||||||
@@ -95,13 +168,16 @@ class HomeAssistant(JsonConfig):
|
|||||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||||
await asyncio.sleep(60, loop=self.loop)
|
await asyncio.sleep(60, loop=self.loop)
|
||||||
|
|
||||||
|
# run landingpage after installation
|
||||||
|
await self.docker.run()
|
||||||
|
|
||||||
async def install(self):
|
async def install(self):
|
||||||
"""Install a landingpage."""
|
"""Install a landingpage."""
|
||||||
_LOGGER.info("Setup HomeAssistant")
|
_LOGGER.info("Setup HomeAssistant")
|
||||||
while True:
|
while True:
|
||||||
# read homeassistant tag and install it
|
# read homeassistant tag and install it
|
||||||
if not self.last_version:
|
if not self.last_version:
|
||||||
await self.config.fetch_update_infos(self.websession)
|
await self.updater.fetch_data()
|
||||||
|
|
||||||
tag = self.last_version
|
tag = self.last_version
|
||||||
if tag and await self.docker.install(tag):
|
if tag and await self.docker.install(tag):
|
||||||
@@ -109,17 +185,26 @@ class HomeAssistant(JsonConfig):
|
|||||||
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
||||||
await asyncio.sleep(60, loop=self.loop)
|
await asyncio.sleep(60, loop=self.loop)
|
||||||
|
|
||||||
# store version
|
# finishing
|
||||||
_LOGGER.info("HomeAssistant docker now installed")
|
_LOGGER.info("HomeAssistant docker now installed")
|
||||||
|
if self.boot:
|
||||||
|
await self.docker.run()
|
||||||
await self.docker.cleanup()
|
await self.docker.cleanup()
|
||||||
|
|
||||||
def update(self, version=None):
|
async def update(self, version=None):
|
||||||
"""Update HomeAssistant version.
|
"""Update HomeAssistant version."""
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
version = version or self.last_version
|
version = version or self.last_version
|
||||||
return self.docker.update(version)
|
running = await self.docker.is_running()
|
||||||
|
|
||||||
|
if version == self.docker.version:
|
||||||
|
_LOGGER.warning("Version %s is already installed", version)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self.docker.update(version)
|
||||||
|
finally:
|
||||||
|
if running:
|
||||||
|
await self.docker.run()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
"""Run HomeAssistant docker.
|
"""Run HomeAssistant docker.
|
||||||
@@ -156,7 +241,50 @@ class HomeAssistant(JsonConfig):
|
|||||||
"""
|
"""
|
||||||
return self.docker.is_running()
|
return self.docker.is_running()
|
||||||
|
|
||||||
|
def is_initialize(self):
|
||||||
|
"""Return True if a docker container is exists.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.is_initialize()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_progress(self):
|
def in_progress(self):
|
||||||
"""Return True if a task is in progress."""
|
"""Return True if a task is in progress."""
|
||||||
return self.docker.in_progress
|
return self.docker.in_progress
|
||||||
|
|
||||||
|
async def check_config(self):
|
||||||
|
"""Run homeassistant config check."""
|
||||||
|
exit_code, log = await self.docker.execute_command(
|
||||||
|
"python3 -m homeassistant -c /config --script check_config"
|
||||||
|
)
|
||||||
|
|
||||||
|
# if not valid
|
||||||
|
if exit_code is None:
|
||||||
|
return (False, "")
|
||||||
|
|
||||||
|
# parse output
|
||||||
|
log = convert_to_ascii(log)
|
||||||
|
if exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||||
|
return (False, log)
|
||||||
|
return (True, log)
|
||||||
|
|
||||||
|
async def check_api_state(self):
|
||||||
|
"""Check if Home-Assistant up and running."""
|
||||||
|
url = "{}/api/".format(self.api_url)
|
||||||
|
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||||
|
|
||||||
|
if self.api_password:
|
||||||
|
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with async_timeout.timeout(30, loop=self.loop):
|
||||||
|
async with self.websession.get(url, headers=header) as request:
|
||||||
|
status = request.status
|
||||||
|
|
||||||
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if status not in (200, 201):
|
||||||
|
_LOGGER.warning("Home-Assistant API config missmatch")
|
||||||
|
return True
|
||||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -197,6 +197,8 @@ class SnapshotsManager(object):
|
|||||||
await snapshot.restore_folders()
|
await snapshot.restore_folders()
|
||||||
|
|
||||||
# start homeassistant restore
|
# start homeassistant restore
|
||||||
|
_LOGGER.info("Full-Restore %s restore Home-Assistant",
|
||||||
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant(self.homeassistant)
|
snapshot.restore_homeassistant(self.homeassistant)
|
||||||
task_hass = self.loop.create_task(
|
task_hass = self.loop.create_task(
|
||||||
self.homeassistant.update(snapshot.homeassistant_version))
|
self.homeassistant.update(snapshot.homeassistant_version))
|
||||||
@@ -279,6 +281,8 @@ class SnapshotsManager(object):
|
|||||||
await snapshot.restore_folders(folders)
|
await snapshot.restore_folders(folders)
|
||||||
|
|
||||||
if homeassistant:
|
if homeassistant:
|
||||||
|
_LOGGER.info("Partial-Restore %s restore Home-Assistant",
|
||||||
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant(self.homeassistant)
|
snapshot.restore_homeassistant(self.homeassistant)
|
||||||
tasks.append(self.homeassistant.update(
|
tasks.append(self.homeassistant.update(
|
||||||
snapshot.homeassistant_version))
|
snapshot.homeassistant_version))
|
||||||
|
@@ -14,7 +14,7 @@ from .util import remove_folder
|
|||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
|
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
|
||||||
ATTR_IMAGE)
|
ATTR_IMAGE, ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT)
|
||||||
from ..tools import write_json_file
|
from ..tools import write_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -101,6 +101,56 @@ class Snapshot(object):
|
|||||||
"""Set snapshot homeassistant custom image."""
|
"""Set snapshot homeassistant custom image."""
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
|
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_ssl(self):
|
||||||
|
"""Return snapshot homeassistant api ssl."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_SSL)
|
||||||
|
|
||||||
|
@homeassistant_ssl.setter
|
||||||
|
def homeassistant_ssl(self, value):
|
||||||
|
"""Set snapshot homeassistant api ssl."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_SSL] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_port(self):
|
||||||
|
"""Return snapshot homeassistant api port."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_PORT)
|
||||||
|
|
||||||
|
@homeassistant_port.setter
|
||||||
|
def homeassistant_port(self, value):
|
||||||
|
"""Set snapshot homeassistant api port."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_PORT] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_password(self):
|
||||||
|
"""Return snapshot homeassistant api password."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_PASSWORD)
|
||||||
|
|
||||||
|
@homeassistant_password.setter
|
||||||
|
def homeassistant_password(self, value):
|
||||||
|
"""Set snapshot homeassistant api password."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_PASSWORD] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_watchdog(self):
|
||||||
|
"""Return snapshot homeassistant watchdog options."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_WATCHDOG)
|
||||||
|
|
||||||
|
@homeassistant_watchdog.setter
|
||||||
|
def homeassistant_watchdog(self, value):
|
||||||
|
"""Set snapshot homeassistant watchdog options."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_WATCHDOG] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_boot(self):
|
||||||
|
"""Return snapshot homeassistant boot options."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_BOOT)
|
||||||
|
|
||||||
|
@homeassistant_boot.setter
|
||||||
|
def homeassistant_boot(self, value):
|
||||||
|
"""Set snapshot homeassistant boot options."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_BOOT] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def size(self):
|
def size(self):
|
||||||
"""Return snapshot size."""
|
"""Return snapshot size."""
|
||||||
@@ -126,20 +176,34 @@ class Snapshot(object):
|
|||||||
"""Read all data from homeassistant object."""
|
"""Read all data from homeassistant object."""
|
||||||
self.homeassistant_version = homeassistant.version
|
self.homeassistant_version = homeassistant.version
|
||||||
self.homeassistant_devices = homeassistant.devices
|
self.homeassistant_devices = homeassistant.devices
|
||||||
|
self.homeassistant_watchdog = homeassistant.watchdog
|
||||||
|
self.homeassistant_boot = homeassistant.boot
|
||||||
|
|
||||||
# custom image
|
# custom image
|
||||||
if homeassistant.is_custom_image:
|
if homeassistant.is_custom_image:
|
||||||
self.homeassistant_image = homeassistant.image
|
self.homeassistant_image = homeassistant.image
|
||||||
|
|
||||||
|
# api
|
||||||
|
self.homeassistant_port = homeassistant.api_port
|
||||||
|
self.homeassistant_ssl = homeassistant.api_ssl
|
||||||
|
self.homeassistant_password = homeassistant.api_password
|
||||||
|
|
||||||
def restore_homeassistant(self, homeassistant):
|
def restore_homeassistant(self, homeassistant):
|
||||||
"""Write all data to homeassistant object."""
|
"""Write all data to homeassistant object."""
|
||||||
homeassistant.devices = self.homeassistant_devices
|
homeassistant.devices = self.homeassistant_devices
|
||||||
|
homeassistant.watchdog = self.homeassistant_watchdog
|
||||||
|
homeassistant.boot = self.homeassistant_boot
|
||||||
|
|
||||||
# custom image
|
# custom image
|
||||||
if self.homeassistant_image:
|
if self.homeassistant_image:
|
||||||
homeassistant.set_custom(
|
homeassistant.set_custom(
|
||||||
self.homeassistant_image, self.homeassistant_version)
|
self.homeassistant_image, self.homeassistant_version)
|
||||||
|
|
||||||
|
# api
|
||||||
|
homeassistant.api_port = self.homeassistant_port
|
||||||
|
homeassistant.api_ssl = self.homeassistant_ssl
|
||||||
|
homeassistant.api_password = self.homeassistant_password
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Read snapshot.json from tar file."""
|
"""Read snapshot.json from tar file."""
|
||||||
if not self.tar_file.is_file():
|
if not self.tar_file.is_file():
|
||||||
@@ -197,7 +261,8 @@ class Snapshot(object):
|
|||||||
"""Async context to close a snapshot."""
|
"""Async context to close a snapshot."""
|
||||||
# exists snapshot or exception on build
|
# exists snapshot or exception on build
|
||||||
if self.tar_file.is_file() or exception_type is not None:
|
if self.tar_file.is_file() or exception_type is not None:
|
||||||
return self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
|
return
|
||||||
|
|
||||||
# validate data
|
# validate data
|
||||||
try:
|
try:
|
||||||
@@ -219,7 +284,6 @@ class Snapshot(object):
|
|||||||
_LOGGER.error("Can't write snapshot.json")
|
_LOGGER.error("Can't write snapshot.json")
|
||||||
|
|
||||||
self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
self._tmp = None
|
|
||||||
|
|
||||||
async def import_addon(self, addon):
|
async def import_addon(self, addon):
|
||||||
"""Add a addon into snapshot."""
|
"""Add a addon into snapshot."""
|
||||||
@@ -259,9 +323,11 @@ class Snapshot(object):
|
|||||||
origin_dir = Path(self.config.path_hassio, name)
|
origin_dir = Path(self.config.path_hassio, name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
_LOGGER.info("Snapshot folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "w:gz",
|
with tarfile.open(snapshot_tar, "w:gz",
|
||||||
compresslevel=1) as tar_file:
|
compresslevel=1) as tar_file:
|
||||||
tar_file.add(origin_dir, arcname=".")
|
tar_file.add(origin_dir, arcname=".")
|
||||||
|
_LOGGER.info("Snapshot folder %s done", name)
|
||||||
|
|
||||||
self._data[ATTR_FOLDERS].append(name)
|
self._data[ATTR_FOLDERS].append(name)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
@@ -288,8 +354,10 @@ class Snapshot(object):
|
|||||||
remove_folder(origin_dir)
|
remove_folder(origin_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
_LOGGER.info("Restore folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
||||||
tar_file.extractall(path=origin_dir)
|
tar_file.extractall(path=origin_dir)
|
||||||
|
_LOGGER.info("Restore folder %s done", name)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
||||||
|
|
||||||
|
@@ -5,9 +5,10 @@ import voluptuous as vol
|
|||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
||||||
ATTR_IMAGE, FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
ATTR_IMAGE, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT,
|
||||||
|
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||||
from ..validate import HASS_DEVICES
|
from ..validate import HASS_DEVICES, NETWORK_PORT
|
||||||
|
|
||||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||||
|
|
||||||
@@ -21,6 +22,11 @@ SCHEMA_SNAPSHOT = vol.Schema({
|
|||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||||
}),
|
}),
|
||||||
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
||||||
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
||||||
|
@@ -13,7 +13,7 @@ def api_sessions_cleanup(config):
|
|||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
for session, until_valid in config.security_sessions.items():
|
for session, until_valid in config.security_sessions.items():
|
||||||
if now >= until_valid:
|
if now >= until_valid:
|
||||||
config.security_sessions = (session, None)
|
config.drop_security_session(session)
|
||||||
|
|
||||||
return _api_sessions_cleanup
|
return _api_sessions_cleanup
|
||||||
|
|
||||||
@@ -27,8 +27,14 @@ def addons_update(loop, addons):
|
|||||||
if not addon.is_installed or not addon.auto_update:
|
if not addon.is_installed or not addon.auto_update:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if addon.version_installed != addon.version:
|
if addon.version_installed == addon.last_version:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if addon.test_udpate_schema():
|
||||||
tasks.append(addon.update())
|
tasks.append(addon.update())
|
||||||
|
else:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Addon %s will be ignore, schema tests fails", addon.slug)
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
||||||
@@ -37,32 +43,73 @@ def addons_update(loop, addons):
|
|||||||
return _addons_update
|
return _addons_update
|
||||||
|
|
||||||
|
|
||||||
def hassio_update(config, supervisor, websession):
|
def hassio_update(supervisor, updater):
|
||||||
"""Create scheduler task for update of supervisor hassio."""
|
"""Create scheduler task for update of supervisor hassio."""
|
||||||
async def _hassio_update():
|
async def _hassio_update():
|
||||||
"""Check and run update of supervisor hassio."""
|
"""Check and run update of supervisor hassio."""
|
||||||
await config.fetch_update_infos(websession)
|
await updater.fetch_data()
|
||||||
if config.last_hassio == supervisor.version:
|
if updater.version_hassio == supervisor.version:
|
||||||
return
|
return
|
||||||
|
|
||||||
# don't perform a update on beta/dev channel
|
# don't perform a update on beta/dev channel
|
||||||
if config.upstream_beta:
|
if updater.beta_channel:
|
||||||
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.info("Found new HassIO version %s.", config.last_hassio)
|
_LOGGER.info("Found new HassIO version %s.", updater.version_hassio)
|
||||||
await supervisor.update(config.last_hassio)
|
await supervisor.update(updater.version_hassio)
|
||||||
|
|
||||||
return _hassio_update
|
return _hassio_update
|
||||||
|
|
||||||
|
|
||||||
def homeassistant_watchdog(loop, homeassistant):
|
def homeassistant_watchdog_docker(loop, homeassistant):
|
||||||
"""Create scheduler task for montoring running state."""
|
"""Create scheduler task for montoring running state of docker."""
|
||||||
async def _homeassistant_watchdog():
|
async def _homeassistant_watchdog_docker():
|
||||||
"""Check running state and start if they is close."""
|
"""Check running state of docker and start if they is close."""
|
||||||
|
# if Home-Assistant is active
|
||||||
|
if not await homeassistant.is_initialize() or \
|
||||||
|
not homeassistant.watchdog:
|
||||||
|
return
|
||||||
|
|
||||||
|
# if Home-Assistant is running
|
||||||
if homeassistant.in_progress or await homeassistant.is_running():
|
if homeassistant.in_progress or await homeassistant.is_running():
|
||||||
return
|
return
|
||||||
|
|
||||||
loop.create_task(homeassistant.run())
|
loop.create_task(homeassistant.run())
|
||||||
|
_LOGGER.error("Watchdog found a problem with Home-Assistant docker!")
|
||||||
|
|
||||||
return _homeassistant_watchdog
|
return _homeassistant_watchdog_docker
|
||||||
|
|
||||||
|
|
||||||
|
def homeassistant_watchdog_api(loop, homeassistant):
|
||||||
|
"""Create scheduler task for montoring running state of API.
|
||||||
|
|
||||||
|
Try 2 times to call API before we restart Home-Assistant. Maybe we had a
|
||||||
|
delay in our system.
|
||||||
|
"""
|
||||||
|
retry_scan = 0
|
||||||
|
|
||||||
|
async def _homeassistant_watchdog_api():
|
||||||
|
"""Check running state of API and start if they is close."""
|
||||||
|
nonlocal retry_scan
|
||||||
|
|
||||||
|
# if Home-Assistant is active
|
||||||
|
if not await homeassistant.is_initialize() or \
|
||||||
|
not homeassistant.watchdog:
|
||||||
|
return
|
||||||
|
|
||||||
|
# if Home-Assistant API is up
|
||||||
|
if homeassistant.in_progress or await homeassistant.check_api_state():
|
||||||
|
return
|
||||||
|
retry_scan += 1
|
||||||
|
|
||||||
|
# Retry active
|
||||||
|
if retry_scan == 1:
|
||||||
|
_LOGGER.warning("Watchdog miss API response from Home-Assistant")
|
||||||
|
return
|
||||||
|
|
||||||
|
loop.create_task(homeassistant.restart())
|
||||||
|
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
|
||||||
|
retry_scan = 0
|
||||||
|
|
||||||
|
return _homeassistant_watchdog_api
|
||||||
|
133
hassio/tools.py
133
hassio/tools.py
@@ -1,9 +1,10 @@
|
|||||||
"""Tools file for HassIO."""
|
"""Tools file for HassIO."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import re
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import async_timeout
|
import async_timeout
|
||||||
@@ -11,51 +12,21 @@ import pytz
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||||
|
|
||||||
|
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||||
|
|
||||||
async def fetch_last_versions(websession, beta=False):
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
"""Fetch current versions from github.
|
# All rights reserved.
|
||||||
|
# https://github.com/django/django/blob/master/LICENSE
|
||||||
Is a coroutine.
|
DATETIME_RE = re.compile(
|
||||||
"""
|
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
|
||||||
url = URL_HASSIO_VERSION_BETA if beta else URL_HASSIO_VERSION
|
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
|
||||||
try:
|
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
|
||||||
with async_timeout.timeout(10, loop=websession.loop):
|
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
|
||||||
async with websession.get(url) as request:
|
)
|
||||||
return await request.json(content_type=None)
|
|
||||||
|
|
||||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
|
||||||
_LOGGER.warning("Can't fetch versions from %s! %s", url, err)
|
|
||||||
|
|
||||||
except json.JSONDecodeError as err:
|
|
||||||
_LOGGER.warning("Can't parse versions from %s! %s", url, err)
|
|
||||||
|
|
||||||
|
|
||||||
def get_local_ip(loop):
|
|
||||||
"""Retrieve local IP address.
|
|
||||||
|
|
||||||
Return a future.
|
|
||||||
"""
|
|
||||||
def local_ip():
|
|
||||||
"""Return local ip."""
|
|
||||||
try:
|
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
||||||
|
|
||||||
# Use Google Public DNS server to determine own IP
|
|
||||||
sock.connect(('8.8.8.8', 80))
|
|
||||||
|
|
||||||
return sock.getsockname()[0]
|
|
||||||
except socket.error:
|
|
||||||
return socket.gethostbyname(socket.gethostname())
|
|
||||||
finally:
|
|
||||||
sock.close()
|
|
||||||
|
|
||||||
return loop.run_in_executor(None, local_ip)
|
|
||||||
|
|
||||||
|
|
||||||
def write_json_file(jsonfile, data):
|
def write_json_file(jsonfile, data):
|
||||||
@@ -76,19 +47,6 @@ def read_json_file(jsonfile):
|
|||||||
return json.loads(cfile.read())
|
return json.loads(cfile.read())
|
||||||
|
|
||||||
|
|
||||||
def validate_timezone(timezone):
|
|
||||||
"""Validate voluptuous timezone."""
|
|
||||||
try:
|
|
||||||
pytz.timezone(timezone)
|
|
||||||
except pytz.exceptions.UnknownTimeZoneError:
|
|
||||||
raise vol.Invalid(
|
|
||||||
"Invalid time zone passed in. Valid options can be found here: "
|
|
||||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
|
||||||
from None
|
|
||||||
|
|
||||||
return timezone
|
|
||||||
|
|
||||||
|
|
||||||
async def fetch_timezone(websession):
|
async def fetch_timezone(websession):
|
||||||
"""Read timezone from freegeoip."""
|
"""Read timezone from freegeoip."""
|
||||||
data = {}
|
data = {}
|
||||||
@@ -101,6 +59,47 @@ async def fetch_timezone(websession):
|
|||||||
return data.get('time_zone', 'UTC')
|
return data.get('time_zone', 'UTC')
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_ascii(raw):
|
||||||
|
"""Convert binary to ascii and remove colors."""
|
||||||
|
return RE_STRING.sub("", raw.decode())
|
||||||
|
|
||||||
|
|
||||||
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
# All rights reserved.
|
||||||
|
# https://github.com/django/django/blob/master/LICENSE
|
||||||
|
def parse_datetime(dt_str):
|
||||||
|
"""Parse a string and return a datetime.datetime.
|
||||||
|
|
||||||
|
This function supports time zone offsets. When the input contains one,
|
||||||
|
the output uses a timezone with a fixed offset from UTC.
|
||||||
|
Raises ValueError if the input is well formatted but not a valid datetime.
|
||||||
|
Returns None if the input isn't well formatted.
|
||||||
|
"""
|
||||||
|
match = DATETIME_RE.match(dt_str)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
kws = match.groupdict() # type: Dict[str, Any]
|
||||||
|
if kws['microsecond']:
|
||||||
|
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
|
||||||
|
tzinfo_str = kws.pop('tzinfo')
|
||||||
|
|
||||||
|
tzinfo = None # type: Optional[dt.tzinfo]
|
||||||
|
if tzinfo_str == 'Z':
|
||||||
|
tzinfo = pytz.utc
|
||||||
|
elif tzinfo_str is not None:
|
||||||
|
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
||||||
|
offset_hours = int(tzinfo_str[1:3])
|
||||||
|
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
||||||
|
if tzinfo_str[0] == '-':
|
||||||
|
offset = -offset
|
||||||
|
tzinfo = timezone(offset)
|
||||||
|
else:
|
||||||
|
tzinfo = None
|
||||||
|
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
||||||
|
kws['tzinfo'] = tzinfo
|
||||||
|
return datetime(**kws)
|
||||||
|
|
||||||
|
|
||||||
class JsonConfig(object):
|
class JsonConfig(object):
|
||||||
"""Hass core object for handle it."""
|
"""Hass core object for handle it."""
|
||||||
|
|
||||||
@@ -124,6 +123,8 @@ class JsonConfig(object):
|
|||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
_LOGGER.error("Can't parse %s -> %s",
|
_LOGGER.error("Can't parse %s -> %s",
|
||||||
self._file, humanize_error(self._data, ex))
|
self._file, humanize_error(self._data, ex))
|
||||||
|
# reset data to default
|
||||||
|
self._data = self._schema({})
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""Store data to config file."""
|
"""Store data to config file."""
|
||||||
@@ -140,3 +141,27 @@ class JsonConfig(object):
|
|||||||
_LOGGER.error("Can't store config in %s", self._file)
|
_LOGGER.error("Can't store config in %s", self._file)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncThrottle(object):
|
||||||
|
"""
|
||||||
|
Decorator that prevents a function from being called more than once every
|
||||||
|
time period.
|
||||||
|
"""
|
||||||
|
def __init__(self, delta):
|
||||||
|
"""Initialize async throttle."""
|
||||||
|
self.throttle_period = delta
|
||||||
|
self.time_of_last_call = datetime.min
|
||||||
|
|
||||||
|
def __call__(self, method):
|
||||||
|
"""Throttle function"""
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
"""Throttle function wrapper"""
|
||||||
|
now = datetime.now()
|
||||||
|
time_since_last_call = now - self.time_of_last_call
|
||||||
|
|
||||||
|
if time_since_last_call > self.throttle_period:
|
||||||
|
self.time_of_last_call = now
|
||||||
|
return await method(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
86
hassio/updater.py
Normal file
86
hassio/updater.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
"""Fetch last versions from webserver."""
|
||||||
|
import asyncio
|
||||||
|
from datetime import timedelta
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import async_timeout
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||||
|
ATTR_BETA_CHANNEL)
|
||||||
|
from .tools import AsyncThrottle, JsonConfig
|
||||||
|
from .validate import SCHEMA_UPDATER_CONFIG
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Updater(JsonConfig):
|
||||||
|
"""Fetch last versions from version.json."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop, websession):
|
||||||
|
"""Initialize updater."""
|
||||||
|
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||||
|
self.config = config
|
||||||
|
self.loop = loop
|
||||||
|
self.websession = websession
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version_homeassistant(self):
|
||||||
|
"""Return last version of homeassistant."""
|
||||||
|
return self._data.get(ATTR_HOMEASSISTANT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version_hassio(self):
|
||||||
|
"""Return last version of hassio."""
|
||||||
|
return self._data.get(ATTR_HASSIO)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def upstream(self):
|
||||||
|
"""Return Upstream branch for version."""
|
||||||
|
if self.beta_channel:
|
||||||
|
return 'dev'
|
||||||
|
return 'master'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def beta_channel(self):
|
||||||
|
"""Return True if we run in beta upstream."""
|
||||||
|
return self._data[ATTR_BETA_CHANNEL]
|
||||||
|
|
||||||
|
@beta_channel.setter
|
||||||
|
def beta_channel(self, value):
|
||||||
|
"""Set beta upstream mode."""
|
||||||
|
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@AsyncThrottle(timedelta(seconds=60))
|
||||||
|
async def fetch_data(self):
|
||||||
|
"""Fetch current versions from github.
|
||||||
|
|
||||||
|
Is a coroutine.
|
||||||
|
"""
|
||||||
|
url = URL_HASSIO_VERSION.format(self.upstream)
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Fetch update data from %s", url)
|
||||||
|
with async_timeout.timeout(10, loop=self.loop):
|
||||||
|
async with self.websession.get(url) as request:
|
||||||
|
data = await request.json(content_type=None)
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||||
|
_LOGGER.warning("Can't fetch versions from %s -> %s", url, err)
|
||||||
|
return
|
||||||
|
|
||||||
|
except json.JSONDecodeError as err:
|
||||||
|
_LOGGER.warning("Can't parse versions from %s -> %s", url, err)
|
||||||
|
return
|
||||||
|
|
||||||
|
# data valid?
|
||||||
|
if not data:
|
||||||
|
_LOGGER.warning("Invalid data from %s", url)
|
||||||
|
return
|
||||||
|
|
||||||
|
# update versions
|
||||||
|
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
||||||
|
self._data[ATTR_HASSIO] = data.get('hassio')
|
||||||
|
self.save()
|
@@ -1,11 +1,32 @@
|
|||||||
"""Validate functions."""
|
"""Validate functions."""
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .const import ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION
|
import pytz
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD,
|
||||||
|
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||||
|
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL,
|
||||||
|
ATTR_PORT, ATTR_WATCHDOG)
|
||||||
|
|
||||||
|
|
||||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||||
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
||||||
|
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_timezone(timezone):
|
||||||
|
"""Validate voluptuous timezone."""
|
||||||
|
try:
|
||||||
|
pytz.timezone(timezone)
|
||||||
|
except pytz.exceptions.UnknownTimeZoneError:
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Invalid time zone passed in. Valid options can be found here: "
|
||||||
|
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||||
|
from None
|
||||||
|
|
||||||
|
return timezone
|
||||||
|
|
||||||
|
|
||||||
def convert_to_docker_ports(data):
|
def convert_to_docker_ports(data):
|
||||||
@@ -35,8 +56,37 @@ DOCKER_PORTS = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||||
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||||
})
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_UPDATER_CONFIG = vol.Schema({
|
||||||
|
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_HASSIO_CONFIG = vol.Schema({
|
||||||
|
vol.Optional(ATTR_TIMEZONE, default='UTC'): validate_timezone,
|
||||||
|
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
||||||
|
vol.Optional(ATTR_SECURITY, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_TOTP): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_SESSIONS, default={}):
|
||||||
|
vol.Schema({vol.Coerce(str): vol.Coerce(str)}),
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
Submodule home-assistant-polymer updated: d2a56655d0...9b9cba86c2
3
setup.py
3
setup.py
@@ -46,6 +46,7 @@ setup(
|
|||||||
'gitpython',
|
'gitpython',
|
||||||
'pyotp',
|
'pyotp',
|
||||||
'pyqrcode',
|
'pyqrcode',
|
||||||
'pytz'
|
'pytz',
|
||||||
|
'pyudev'
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
11
version.json
11
version.json
@@ -1,7 +1,8 @@
|
|||||||
{
|
{
|
||||||
"hassio": "0.43",
|
"hassio": "0.73",
|
||||||
"homeassistant": "0.48.1",
|
"homeassistant": "0.56.2",
|
||||||
"resinos": "0.8",
|
"resinos": "1.1",
|
||||||
"resinhup": "0.1",
|
"resinhup": "0.3",
|
||||||
"generic": "0.3"
|
"generic": "0.3",
|
||||||
|
"cluster": "0.1"
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user