mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-11 18:19:21 +00:00
Compare commits
278 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
463c97f9e7 | ||
![]() |
3983928c6c | ||
![]() |
15e626027f | ||
![]() |
d46810752e | ||
![]() |
3d10b502a0 | ||
![]() |
433c5cef3b | ||
![]() |
697caf553a | ||
![]() |
1e11359c71 | ||
![]() |
5285431825 | ||
![]() |
7743a572a9 | ||
![]() |
3b974920d3 | ||
![]() |
6bc9792248 | ||
![]() |
da55f6fb10 | ||
![]() |
ffa90a3407 | ||
![]() |
0a13ea3743 | ||
![]() |
0e2e588145 | ||
![]() |
b8c50fee36 | ||
![]() |
8cb0b7c498 | ||
![]() |
699fcdafba | ||
![]() |
b4d5aeb5d0 | ||
![]() |
d067dd643e | ||
![]() |
65a2bf2d18 | ||
![]() |
e826e8184f | ||
![]() |
dacbde7d77 | ||
![]() |
5b0587b672 | ||
![]() |
f0320c0f6d | ||
![]() |
e05c32df25 | ||
![]() |
9c40c32e95 | ||
![]() |
ac60de0360 | ||
![]() |
587047f9d6 | ||
![]() |
e815223047 | ||
![]() |
b6fb5ab950 | ||
![]() |
a0906937c4 | ||
![]() |
07c47df369 | ||
![]() |
85e9a949cc | ||
![]() |
3933fb0664 | ||
![]() |
a885fbdb41 | ||
![]() |
210793eb34 | ||
![]() |
0235c7bce0 | ||
![]() |
4419c0fc6c | ||
![]() |
2f3701693d | ||
![]() |
3bf446cbdb | ||
![]() |
0c67cc13a1 | ||
![]() |
0b80d7b6f4 | ||
![]() |
23c35d4c80 | ||
![]() |
e939c29efa | ||
![]() |
ea0655b4e5 | ||
![]() |
4117ce2e86 | ||
![]() |
dec04386bf | ||
![]() |
b50756785e | ||
![]() |
b9538bdc67 | ||
![]() |
a928281bbe | ||
![]() |
4533d17e27 | ||
![]() |
546df6d001 | ||
![]() |
f14eef62ae | ||
![]() |
ee86770570 | ||
![]() |
385a4e9f6f | ||
![]() |
142cdcffca | ||
![]() |
eb6c753514 | ||
![]() |
c3b62c80fb | ||
![]() |
f77e176a6e | ||
![]() |
3f99dec858 | ||
![]() |
81b0cf55b0 | ||
![]() |
1d5d2dc731 | ||
![]() |
04f5ee0a80 | ||
![]() |
7a02777cfb | ||
![]() |
7257c44d27 | ||
![]() |
cb15602814 | ||
![]() |
0f2c333484 | ||
![]() |
6f2cf2ef85 | ||
![]() |
70a721a47d | ||
![]() |
b32947af98 | ||
![]() |
94b44ec7fe | ||
![]() |
5c8aa71c31 | ||
![]() |
a6c424b7c8 | ||
![]() |
38e40c342d | ||
![]() |
26d390b66e | ||
![]() |
baddafa552 | ||
![]() |
f443d3052b | ||
![]() |
8fc27ff28e | ||
![]() |
3784d759f5 | ||
![]() |
61037f3852 | ||
![]() |
db8aaecdbe | ||
![]() |
15a4541595 | ||
![]() |
50ae8e2335 | ||
![]() |
279df17ba4 | ||
![]() |
f8e6362283 | ||
![]() |
0c44064926 | ||
![]() |
73c437574c | ||
![]() |
69a2182c04 | ||
![]() |
ce80e6cd32 | ||
![]() |
054def09f7 | ||
![]() |
eebe90bd14 | ||
![]() |
6ea280ce60 | ||
![]() |
e992b70f92 | ||
![]() |
0f58bb35ba | ||
![]() |
56abfb6adc | ||
![]() |
8352d61f8d | ||
![]() |
51d585f299 | ||
![]() |
d017a52922 | ||
![]() |
78ec0d1314 | ||
![]() |
c84151e9e8 | ||
![]() |
e8e599cb8c | ||
![]() |
232b9ea239 | ||
![]() |
1c49351e66 | ||
![]() |
34d1f4725d | ||
![]() |
7cd81dcc95 | ||
![]() |
1bdd3d88de | ||
![]() |
d105552fa9 | ||
![]() |
b5af35bd6c | ||
![]() |
7d46487491 | ||
![]() |
38a599011e | ||
![]() |
e59e2fc8d7 | ||
![]() |
b9ce405ada | ||
![]() |
d7df423deb | ||
![]() |
99eea99e93 | ||
![]() |
63d82ce03e | ||
![]() |
13a2c1ecd9 | ||
![]() |
627ab4ee81 | ||
![]() |
54f45539be | ||
![]() |
53297205c8 | ||
![]() |
0f09fdfcce | ||
![]() |
24db0fdb86 | ||
![]() |
7349234638 | ||
![]() |
c691f2a559 | ||
![]() |
110cd32dc3 | ||
![]() |
26d8dc0ec6 | ||
![]() |
fd41bda828 | ||
![]() |
1e3868bb70 | ||
![]() |
ece6c644cf | ||
![]() |
6a5bd5a014 | ||
![]() |
664334f1ad | ||
![]() |
e5e28747d4 | ||
![]() |
c7956d95ae | ||
![]() |
5ce6abdbb6 | ||
![]() |
fad0185c26 | ||
![]() |
86faf32709 | ||
![]() |
19f413796d | ||
![]() |
8f94b4d63f | ||
![]() |
db263f84af | ||
![]() |
747810b729 | ||
![]() |
d6768f15a1 | ||
![]() |
6c75957578 | ||
![]() |
3a8307acfe | ||
![]() |
f20c7d42ee | ||
![]() |
9419fbff94 | ||
![]() |
3ac6c03637 | ||
![]() |
a95274f1b3 | ||
![]() |
9d2fb87cec | ||
![]() |
ce9c3565b6 | ||
![]() |
b0ec58ed1b | ||
![]() |
893a5f8dd3 | ||
![]() |
98064f6a90 | ||
![]() |
5146f89354 | ||
![]() |
fb46592d48 | ||
![]() |
b4fb5ac681 | ||
![]() |
4b7201dc59 | ||
![]() |
3a5a4e4c27 | ||
![]() |
70104a9280 | ||
![]() |
efbc7b17a1 | ||
![]() |
64c5e20fc4 | ||
![]() |
13498afa97 | ||
![]() |
f6375f1bd6 | ||
![]() |
8fd1599173 | ||
![]() |
63302b73b0 | ||
![]() |
f591f67a2a | ||
![]() |
cda3184a55 | ||
![]() |
afc811e975 | ||
![]() |
2e169dcb42 | ||
![]() |
34e24e184f | ||
![]() |
2e4751ed7d | ||
![]() |
8c82c467d4 | ||
![]() |
f3f6771534 | ||
![]() |
0a75a4dcbc | ||
![]() |
1a4542fc4e | ||
![]() |
7e0525749e | ||
![]() |
b33b26018d | ||
![]() |
66c93e7176 | ||
![]() |
5674d32bad | ||
![]() |
7a84972770 | ||
![]() |
638f0f5371 | ||
![]() |
dca1b6f1d3 | ||
![]() |
2b0ee109d6 | ||
![]() |
e7430d87d7 | ||
![]() |
9751c1de79 | ||
![]() |
c497167b64 | ||
![]() |
7fb2aca88b | ||
![]() |
0d544845b1 | ||
![]() |
602eb472f9 | ||
![]() |
f22fa46bdb | ||
![]() |
4171a28260 | ||
![]() |
55365a631a | ||
![]() |
547415b30b | ||
![]() |
cbf79f1fab | ||
![]() |
31cc1dce82 | ||
![]() |
8a11e6c845 | ||
![]() |
2df4f80aa5 | ||
![]() |
68566ee9e1 | ||
![]() |
fe04b7ec59 | ||
![]() |
38f96d7ddd | ||
![]() |
2b2edd6e98 | ||
![]() |
361969aca2 | ||
![]() |
e61e7f41f2 | ||
![]() |
75150fd149 | ||
![]() |
bd1c8be1e1 | ||
![]() |
f167197640 | ||
![]() |
f084ecc007 | ||
![]() |
65becbd0ae | ||
![]() |
f38e28a4d9 | ||
![]() |
2998cd94ff | ||
![]() |
79e2f3e8ab | ||
![]() |
13291f52f2 | ||
![]() |
4baa80c3de | ||
![]() |
be28a6b012 | ||
![]() |
d94ada6216 | ||
![]() |
b2d7743e06 | ||
![]() |
40324beb72 | ||
![]() |
c02f6913b3 | ||
![]() |
d56af22d5e | ||
![]() |
1795103086 | ||
![]() |
02e1689dd1 | ||
![]() |
ab4d96331f | ||
![]() |
cb881cba28 | ||
![]() |
44b247f397 | ||
![]() |
8bb43daf91 | ||
![]() |
a7e65613d6 | ||
![]() |
3c04c71401 | ||
![]() |
1353d52bd1 | ||
![]() |
7701457791 | ||
![]() |
b7820bc6a6 | ||
![]() |
df66102de0 | ||
![]() |
4b308d0de1 | ||
![]() |
4448ba886b | ||
![]() |
f39006be01 | ||
![]() |
e5204eef8a | ||
![]() |
1f07d47fd6 | ||
![]() |
ba352abf0b | ||
![]() |
2bf440a744 | ||
![]() |
3b26136636 | ||
![]() |
8249f042c0 | ||
![]() |
84bbaeee5f | ||
![]() |
b7620b7adf | ||
![]() |
5a80be9fd4 | ||
![]() |
a733886803 | ||
![]() |
834fd29fab | ||
![]() |
fd1caf8aa6 | ||
![]() |
975c9e8061 | ||
![]() |
0b3c5885ec | ||
![]() |
711b63e2d0 | ||
![]() |
c7b833b5eb | ||
![]() |
fd472b3084 | ||
![]() |
dcbb6a2160 | ||
![]() |
56fa1550d2 | ||
![]() |
e1f97860ee | ||
![]() |
6ab3fe18d9 | ||
![]() |
7969f3dfd7 | ||
![]() |
6f05b90e4e | ||
![]() |
3aa53d99d7 | ||
![]() |
3525f5a02f | ||
![]() |
04514a9f5c | ||
![]() |
1c915ef4cd | ||
![]() |
b03a2c5c5f | ||
![]() |
64988b285e | ||
![]() |
5c69dca7b3 | ||
![]() |
dfda7dc748 | ||
![]() |
cb7710c23f | ||
![]() |
f9b12a2eb2 | ||
![]() |
6a7617faad | ||
![]() |
a94e6c5303 | ||
![]() |
a3209c4bde | ||
![]() |
09bba96940 | ||
![]() |
6cab017042 | ||
![]() |
d08343d040 | ||
![]() |
8ab0ed5047 | ||
![]() |
1382a7b36e | ||
![]() |
47491ca55b | ||
![]() |
261bda82db | ||
![]() |
f751b0e6fc |
9
.dockerignore
Normal file
9
.dockerignore
Normal file
@@ -0,0 +1,9 @@
|
||||
# General files
|
||||
.git
|
||||
.github
|
||||
|
||||
# Test related files
|
||||
.tox
|
||||
|
||||
# Temporary files
|
||||
**/__pycache__
|
13
.github/move.yml
vendored
Normal file
13
.github/move.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Configuration for move-issues - https://github.com/dessant/move-issues
|
||||
|
||||
# Delete the command comment. Ignored when the comment also contains other content
|
||||
deleteCommand: true
|
||||
# Close the source issue after moving
|
||||
closeSourceIssue: true
|
||||
# Lock the source issue after moving
|
||||
lockSourceIssue: false
|
||||
# Set custom aliases for targets
|
||||
# aliases:
|
||||
# r: repo
|
||||
# or: owner/repo
|
||||
|
312
API.md
312
API.md
@@ -4,7 +4,7 @@
|
||||
|
||||
Interface for Home Assistant to control things from supervisor.
|
||||
|
||||
On error:
|
||||
On error / Code 400:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -13,7 +13,7 @@ On error:
|
||||
}
|
||||
```
|
||||
|
||||
On success:
|
||||
On success / Code 200:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -22,6 +22,8 @@ On success:
|
||||
}
|
||||
```
|
||||
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with envoriment `HASSIO_TOKEN`.
|
||||
|
||||
### Hass.io
|
||||
|
||||
- GET `/supervisor/ping`
|
||||
@@ -36,6 +38,7 @@ The addons from `addons` are only installed one.
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"beta_channel": "true|false",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
@@ -44,6 +47,7 @@ The addons from `addons` are only installed one.
|
||||
"repository": "12345678|null",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"state": "started|stopped",
|
||||
}
|
||||
@@ -70,6 +74,7 @@ Optional:
|
||||
{
|
||||
"beta_channel": "true|false",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
@@ -84,44 +89,20 @@ Reload addons/version.
|
||||
|
||||
Output is the raw docker log.
|
||||
|
||||
### Security
|
||||
|
||||
- GET `/security/info`
|
||||
|
||||
- GET `/supervisor/stats`
|
||||
```json
|
||||
{
|
||||
"initialize": "bool",
|
||||
"totp": "bool"
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/totp`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
Return QR-Code
|
||||
|
||||
- POST `/security/session`
|
||||
```json
|
||||
{
|
||||
"password": "xy",
|
||||
"totp": "null|123456"
|
||||
}
|
||||
```
|
||||
|
||||
### Backup/Snapshot
|
||||
### Snapshot
|
||||
|
||||
- GET `/snapshots`
|
||||
|
||||
@@ -131,7 +112,9 @@ Return QR-Code
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"date": "ISO",
|
||||
"name": "Custom name"
|
||||
"name": "Custom name",
|
||||
"type": "full|partial",
|
||||
"protected": "bool"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -139,11 +122,28 @@ Return QR-Code
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- POST `/snapshots/new/upload`
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional"
|
||||
"name": "Optional",
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
@@ -153,7 +153,15 @@ Return QR-Code
|
||||
{
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
@@ -168,15 +176,14 @@ Return QR-Code
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"homeassistant": {
|
||||
"version": "INSTALLED_HASS_VERSION",
|
||||
"devices": []
|
||||
},
|
||||
"protected": "bool",
|
||||
"homeassistant": "version",
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION"
|
||||
"version": "INSTALLED_VERSION",
|
||||
"size": "SIZE_IN_MB"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
@@ -185,14 +192,25 @@ Return QR-Code
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
|
||||
- GET `/snapshots/{slug}/download`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -243,6 +261,7 @@ Optional:
|
||||
"serial": ["/dev/xy"],
|
||||
"input": ["Input device name"],
|
||||
"disk": ["/dev/sdax"],
|
||||
"gpio": ["gpiochip0", "gpiochip100"],
|
||||
"audio": {
|
||||
"CARD_ID": {
|
||||
"name": "xy",
|
||||
@@ -255,6 +274,8 @@ Optional:
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
### Network
|
||||
|
||||
- GET `/network/info`
|
||||
@@ -281,9 +302,13 @@ Optional:
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"devices": [""],
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image"
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
"port": 8123,
|
||||
"ssl": "bool",
|
||||
"watchdog": "bool",
|
||||
"startup_time": 600
|
||||
}
|
||||
```
|
||||
|
||||
@@ -302,19 +327,47 @@ Optional:
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/homeassistant/restart`
|
||||
- POST `/homeassistant/options`
|
||||
- POST `/homeassistant/check`
|
||||
- POST `/homeassistant/start`
|
||||
- POST `/homeassistant/stop`
|
||||
|
||||
- POST `/homeassistant/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"devices": [],
|
||||
"image": "Optional|null",
|
||||
"last_version": "Optional for custom image|null"
|
||||
"last_version": "Optional for custom image|null",
|
||||
"port": "port for access hass",
|
||||
"ssl": "bool",
|
||||
"password": "",
|
||||
"watchdog": "bool",
|
||||
"startup_time": 600
|
||||
}
|
||||
```
|
||||
|
||||
Image with `null` and last_version with `null` reset this options.
|
||||
|
||||
- POST/GET `/homeassistant/api`
|
||||
|
||||
Proxy to real home-assistant instance.
|
||||
|
||||
- GET `/homeassistant/websocket`
|
||||
|
||||
Proxy to real websocket instance.
|
||||
|
||||
- GET `/homeassistant/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### RESTful for API addons
|
||||
|
||||
- GET `/addons`
|
||||
@@ -334,12 +387,9 @@ Get all available addons.
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"build": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"devices": ["/dev/xy"],
|
||||
"url": "null|url",
|
||||
"logo": "bool",
|
||||
"audio": "bool",
|
||||
"hassio_api": "bool"
|
||||
"icon": "bool",
|
||||
"logo": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
@@ -361,6 +411,7 @@ Get all available addons.
|
||||
{
|
||||
"name": "xy bla",
|
||||
"description": "description",
|
||||
"long_description": "null|markdown",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
@@ -373,19 +424,33 @@ Get all available addons.
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"host_network": "bool",
|
||||
"host_ipc": "bool",
|
||||
"host_dbus": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"devices": ["/dev/xy"],
|
||||
"auto_uart": "bool",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"changelog": "bool",
|
||||
"hassio_api": "bool",
|
||||
"homeassistant_api": "bool",
|
||||
"stdin": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"gpio": "bool",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0"
|
||||
"audio_output": "null|0,0",
|
||||
"services": "null|['mqtt']",
|
||||
"discovery": "null|['component/platform']"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/icon`
|
||||
|
||||
- GET `/addons/{addon}/logo`
|
||||
|
||||
- GET `/addons/{addon}/changelog`
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
|
||||
```json
|
||||
@@ -401,7 +466,7 @@ Get all available addons.
|
||||
}
|
||||
```
|
||||
|
||||
For reset custom network/audio settings, set it `null`.
|
||||
Reset custom network/audio/options, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
@@ -409,32 +474,135 @@ For reset custom network/audio settings, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/install`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/{addon}/uninstall`
|
||||
|
||||
- POST `/addons/{addon}/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/logs`
|
||||
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/addons/{addon}/restart`
|
||||
|
||||
- POST `/addons/{addon}/rebuild`
|
||||
|
||||
Only supported for local build addons
|
||||
|
||||
- POST `/addons/{addon}/stdin`
|
||||
|
||||
Write data to add-on stdin
|
||||
|
||||
- GET `/addons/{addon}/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### Service discovery
|
||||
|
||||
- GET `/services/discovery`
|
||||
```json
|
||||
{
|
||||
"discovery": [
|
||||
{
|
||||
"provider": "name",
|
||||
"uuid": "uuid",
|
||||
"component": "component",
|
||||
"platform": "null|platform",
|
||||
"config": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/services/discovery/{UUID}`
|
||||
```json
|
||||
{
|
||||
"provider": "name",
|
||||
"uuid": "uuid",
|
||||
"component": "component",
|
||||
"platform": "null|platform",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/discovery`
|
||||
```json
|
||||
{
|
||||
"component": "component",
|
||||
"platform": "null|platform",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"uuid": "uuid"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/discovery/{UUID}`
|
||||
|
||||
- GET `/services`
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"slug": "name",
|
||||
"available": "bool",
|
||||
"provider": "null|name|list"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/services/xy`
|
||||
```json
|
||||
{
|
||||
"available": "bool",
|
||||
"xy": {}
|
||||
}
|
||||
```
|
||||
|
||||
#### MQTT
|
||||
|
||||
This service perform a auto discovery to Home-Assistant.
|
||||
|
||||
- GET `/services/mqtt`
|
||||
```json
|
||||
{
|
||||
"provider": "name",
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/mqtt`
|
||||
```json
|
||||
{
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool|optional",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/mqtt`
|
||||
|
||||
## Host Control
|
||||
|
||||
Communicate over UNIX socket with a host daemon.
|
||||
|
28
Dockerfile
Normal file
28
Dockerfile
Normal file
@@ -0,0 +1,28 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
# Add env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# Setup base
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
git \
|
||||
socat \
|
||||
libstdc++ \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
python3-dev \
|
||||
g++ \
|
||||
&& pip3 install --no-cache-dir \
|
||||
uvloop==0.9.1 \
|
||||
cchardet==2.1.1 \
|
||||
pycryptodome==3.4.11 \
|
||||
&& apk del .build-dependencies
|
||||
|
||||
# Install HassIO
|
||||
COPY . /usr/src/hassio
|
||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||
&& rm -rf /usr/src/hassio
|
||||
|
||||
CMD [ "python3", "-m", "hassio" ]
|
10
README.md
10
README.md
@@ -1,8 +1,12 @@
|
||||
# Hass.io
|
||||
|
||||
### First private cloud solution for home automation.
|
||||
## First private cloud solution for home automation
|
||||
|
||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||

|
||||
|
||||
@@ -11,4 +15,4 @@ Hass.io is a Docker based system for managing your Home Assistant installation a
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
|
@@ -10,36 +10,48 @@ import hassio.core as core
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def attempt_use_uvloop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
import uvloop
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
attempt_use_uvloop()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if not bootstrap.check_environment():
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
# init executor pool
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hassio setup")
|
||||
config = bootstrap.initialize_system_data()
|
||||
hassio = core.HassIO(loop, config)
|
||||
coresys = bootstrap.initialize_coresys(loop)
|
||||
hassio = core.HassIO(coresys)
|
||||
|
||||
bootstrap.migrate_system_env(config)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
_LOGGER.info("Run Hassio setup")
|
||||
_LOGGER.info("Setup HassIO")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
|
||||
_LOGGER.info("Start Hassio")
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
|
||||
_LOGGER.info("Run Hassio loop")
|
||||
loop.run_forever()
|
||||
|
||||
_LOGGER.info("Cleanup system")
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
try:
|
||||
_LOGGER.info("Run HassIO")
|
||||
loop.run_forever()
|
||||
finally:
|
||||
_LOGGER.info("Stopping HassIO")
|
||||
loop.run_until_complete(hassio.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hassio")
|
||||
sys.exit(hassio.exit_code)
|
||||
sys.exit(0)
|
||||
|
@@ -4,47 +4,59 @@ import logging
|
||||
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import Data
|
||||
from .data import AddonsData
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
|
||||
|
||||
class AddonManager(object):
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage addons inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.dock = dock
|
||||
self.data = Data(config)
|
||||
self.addons = {}
|
||||
self.repositories = {}
|
||||
self.coresys = coresys
|
||||
self.data = AddonsData(coresys)
|
||||
self.addons_obj = {}
|
||||
self.repositories_obj = {}
|
||||
|
||||
@property
|
||||
def list_addons(self):
|
||||
"""Return a list of all addons."""
|
||||
return list(self.addons.values())
|
||||
return list(self.addons_obj.values())
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed addons."""
|
||||
return [addon for addon in self.addons_obj.values()
|
||||
if addon.is_installed]
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of addon repositories."""
|
||||
return list(self.repositories.values())
|
||||
return list(self.repositories_obj.values())
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return a adddon from slug."""
|
||||
return self.addons.get(addon_slug)
|
||||
"""Return a add-on from slug."""
|
||||
return self.addons_obj.get(addon_slug)
|
||||
|
||||
async def prepare(self):
|
||||
def from_uuid(self, uuid):
|
||||
"""Return a add-on from uuid."""
|
||||
for addon in self.list_addons:
|
||||
if addon.is_installed and uuid == addon.uuid:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self):
|
||||
"""Startup addon management."""
|
||||
self.data.reload()
|
||||
|
||||
# init hassio built-in repositories
|
||||
repositories = \
|
||||
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
set(self._config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# init custom repositories & load addons
|
||||
await self.load_repositories(repositories)
|
||||
@@ -52,9 +64,9 @@ class AddonManager(object):
|
||||
async def reload(self):
|
||||
"""Update addons from repo and reload list."""
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories.values()]
|
||||
self.repositories_obj.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
@@ -65,29 +77,29 @@ class AddonManager(object):
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories)
|
||||
old_rep = set(self.repositories_obj)
|
||||
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.config, self.loop, self.data, url)
|
||||
repository = Repository(self.coresys, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories[url] = repository
|
||||
self.repositories_obj[url] = repository
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.config.add_addon_repository(url)
|
||||
self._config.add_addon_repository(url)
|
||||
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories.pop(url).remove()
|
||||
self.config.drop_addon_repository(url)
|
||||
self.repositories_obj.pop(url).remove()
|
||||
self._config.drop_addon_repository(url)
|
||||
|
||||
# update data
|
||||
self.data.reload()
|
||||
@@ -98,8 +110,8 @@ class AddonManager(object):
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons)
|
||||
del_addons = set(self.addons) - all_addons
|
||||
add_addons = all_addons - set(self.addons_obj)
|
||||
del_addons = set(self.addons_obj) - all_addons
|
||||
|
||||
_LOGGER.info("Load addons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
@@ -107,27 +119,27 @@ class AddonManager(object):
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(
|
||||
self.config, self.loop, self.dock, self.data, addon_slug)
|
||||
addon = Addon(self.coresys, addon_slug)
|
||||
|
||||
tasks.append(addon.load())
|
||||
self.addons[addon_slug] = addon
|
||||
self.addons_obj[addon_slug] = addon
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons.pop(addon_slug)
|
||||
self.addons_obj.pop(addon_slug)
|
||||
|
||||
async def auto_boot(self, stage):
|
||||
"""Boot addons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons.values():
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
|
||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.sleep(self._config.wait_boot, loop=self._loop)
|
||||
|
@@ -12,41 +12,43 @@ import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import (
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, MAP_VOLUME)
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME, RE_SERVICE)
|
||||
from .utils import check_installed
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_UUID,
|
||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT)
|
||||
from .util import check_installed
|
||||
from ..dock.addon import DockerAddon
|
||||
from ..tools import write_json_file, read_json_file
|
||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..utils.json import write_json_file, read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
RE_WEBUI = re.compile(r"^(.*\[HOST\]:)\[PORT:(\d+)\](.*)$")
|
||||
RE_WEBUI = re.compile(
|
||||
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
|
||||
|
||||
|
||||
class Addon(object):
|
||||
class Addon(CoreSysAttributes):
|
||||
"""Hold data for addon inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock, data, slug):
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize data holder."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.data = data
|
||||
self._id = slug
|
||||
self.coresys = coresys
|
||||
self.instance = DockerAddon(coresys, slug)
|
||||
|
||||
self.docker = DockerAddon(config, loop, dock, self)
|
||||
self._id = slug
|
||||
|
||||
async def load(self):
|
||||
"""Async initialize of object."""
|
||||
if self.is_installed:
|
||||
await self.docker.attach()
|
||||
await self.instance.attach()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
@@ -56,90 +58,96 @@ class Addon(object):
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return addon data from system or cache."""
|
||||
return self.data.system.get(self._id, self.data.cache.get(self._id))
|
||||
return self._data.system.get(self._id, self._data.cache.get(self._id))
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return addons data storage."""
|
||||
return self._addons.data
|
||||
|
||||
@property
|
||||
def is_installed(self):
|
||||
"""Return True if a addon is installed."""
|
||||
return self._id in self.data.system
|
||||
return self._id in self._data.system
|
||||
|
||||
@property
|
||||
def is_detached(self):
|
||||
"""Return True if addon is detached."""
|
||||
return self._id not in self.data.cache
|
||||
return self._id not in self._data.cache
|
||||
|
||||
@property
|
||||
def version_installed(self):
|
||||
"""Return installed version."""
|
||||
return self.data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
return self._data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
|
||||
def _set_install(self, version):
|
||||
"""Set addon as installed."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id] = {
|
||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||
self._data.user[self._id] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
}
|
||||
self.data.save()
|
||||
self._data.save_data()
|
||||
|
||||
def _set_uninstall(self):
|
||||
"""Set addon as uninstalled."""
|
||||
self.data.system.pop(self._id, None)
|
||||
self.data.user.pop(self._id, None)
|
||||
self.data.save()
|
||||
self._data.system.pop(self._id, None)
|
||||
self._data.user.pop(self._id, None)
|
||||
self._data.save_data()
|
||||
|
||||
def _set_update(self, version):
|
||||
"""Update version of addon."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id][ATTR_VERSION] = version
|
||||
self.data.save()
|
||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||
self._data.user[self._id][ATTR_VERSION] = version
|
||||
self._data.save_data()
|
||||
|
||||
def _restore_data(self, user, system):
|
||||
"""Restore data to addon."""
|
||||
self.data.user[self._id] = deepcopy(user)
|
||||
self.data.system[self._id] = deepcopy(system)
|
||||
self.data.save()
|
||||
self._data.user[self._id] = deepcopy(user)
|
||||
self._data.system[self._id] = deepcopy(system)
|
||||
self._data.save_data()
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Return options with local changes."""
|
||||
if self.is_installed:
|
||||
return {
|
||||
**self.data.system[self._id][ATTR_OPTIONS],
|
||||
**self.data.user[self._id][ATTR_OPTIONS],
|
||||
**self._data.system[self._id][ATTR_OPTIONS],
|
||||
**self._data.user[self._id][ATTR_OPTIONS]
|
||||
}
|
||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||
return self._data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
@options.setter
|
||||
def options(self, value):
|
||||
"""Store user addon options."""
|
||||
self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||
self.data.save()
|
||||
if value is None:
|
||||
self._data.user[self._id][ATTR_OPTIONS] = {}
|
||||
else:
|
||||
self._data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_BOOT]
|
||||
if ATTR_BOOT in self._data.user.get(self._id, {}):
|
||||
return self._data.user[self._id][ATTR_BOOT]
|
||||
return self._mesh[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value):
|
||||
"""Store user boot options."""
|
||||
self.data.user[self._id][ATTR_BOOT] = value
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_BOOT] = value
|
||||
|
||||
@property
|
||||
def auto_update(self):
|
||||
"""Return if auto update is enable."""
|
||||
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
||||
if ATTR_AUTO_UPDATE in self._data.user.get(self._id, {}):
|
||||
return self._data.user[self._id][ATTR_AUTO_UPDATE]
|
||||
return None
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value):
|
||||
"""Set auto update."""
|
||||
self.data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -151,11 +159,31 @@ class Addon(object):
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self._mesh[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
"""Return a API token for this add-on."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id][ATTR_UUID]
|
||||
return None
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Return description of addon."""
|
||||
return self._mesh[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self):
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, 'README.md')
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
with readme.open('r') as readme_file:
|
||||
return readme_file.read()
|
||||
|
||||
@property
|
||||
def repository(self):
|
||||
"""Return repository of addon."""
|
||||
@@ -164,8 +192,8 @@ class Addon(object):
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return version of addon."""
|
||||
if self._id in self.data.cache:
|
||||
return self.data.cache[self._id][ATTR_VERSION]
|
||||
if self._id in self._data.cache:
|
||||
return self._data.cache[self._id][ATTR_VERSION]
|
||||
return self.version_installed
|
||||
|
||||
@property
|
||||
@@ -173,63 +201,106 @@ class Addon(object):
|
||||
"""Return startup type of addon."""
|
||||
return self._mesh.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
"""Return dict of services with rights."""
|
||||
raw_services = self._mesh.get(ATTR_SERVICES)
|
||||
if not raw_services:
|
||||
return None
|
||||
|
||||
formated_services = {}
|
||||
for data in raw_services:
|
||||
service = RE_SERVICE.match(data)
|
||||
formated_services[service.group('service')] = \
|
||||
service.group('rights') or 'ro'
|
||||
|
||||
return formated_services
|
||||
|
||||
@property
|
||||
def discovery(self):
|
||||
"""Return list of discoverable components/platforms."""
|
||||
return self._mesh.get(ATTR_DISCOVERY)
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Return ports of addon."""
|
||||
if self.network_mode != 'bridge' or ATTR_PORTS not in self._mesh:
|
||||
return
|
||||
if self.host_network or ATTR_PORTS not in self._mesh:
|
||||
return None
|
||||
|
||||
if not self.is_installed or \
|
||||
ATTR_NETWORK not in self.data.user[self._id]:
|
||||
ATTR_NETWORK not in self._data.user[self._id]:
|
||||
return self._mesh[ATTR_PORTS]
|
||||
return self.data.user[self._id][ATTR_NETWORK]
|
||||
return self._data.user[self._id][ATTR_NETWORK]
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value):
|
||||
"""Set custom ports of addon."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
self._data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
else:
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
|
||||
self.data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
@property
|
||||
def webui(self):
|
||||
"""Return URL to webui or None."""
|
||||
if ATTR_WEBUI not in self._mesh:
|
||||
return
|
||||
return None
|
||||
webui = RE_WEBUI.match(self._mesh[ATTR_WEBUI])
|
||||
|
||||
webui = self._mesh[ATTR_WEBUI]
|
||||
dock_port = RE_WEBUI.sub(r"\2", webui)
|
||||
# extract arguments
|
||||
t_port = webui.group('t_port')
|
||||
t_proto = webui.group('t_proto')
|
||||
s_prefix = webui.group('s_prefix') or ""
|
||||
s_suffix = webui.group('s_suffix') or ""
|
||||
|
||||
# search host port for this docker port
|
||||
if self.ports is None:
|
||||
real_port = dock_port
|
||||
port = t_port
|
||||
else:
|
||||
real_port = self.ports.get("{}/tcp".format(dock_port), dock_port)
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
|
||||
# for interface config or port lists
|
||||
if isinstance(real_port, (tuple, list)):
|
||||
real_port = real_port[-1]
|
||||
if isinstance(port, (tuple, list)):
|
||||
port = port[-1]
|
||||
|
||||
return RE_WEBUI.sub(r"\g<1>{}\g<3>".format(real_port), webui)
|
||||
# lookup the correct protocol from config
|
||||
if t_proto:
|
||||
proto = 'https' if self.options[t_proto] else 'http'
|
||||
else:
|
||||
proto = s_prefix
|
||||
|
||||
return f"{proto}://[HOST]:{port}{s_suffix}"
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
"""Return network mode of addon."""
|
||||
if self._mesh[ATTR_HOST_NETWORK]:
|
||||
return 'host'
|
||||
return 'bridge'
|
||||
def host_network(self):
|
||||
"""Return True if addon run on host network."""
|
||||
return self._mesh[ATTR_HOST_NETWORK]
|
||||
|
||||
@property
|
||||
def host_ipc(self):
|
||||
"""Return True if addon run on host IPC namespace."""
|
||||
return self._mesh[ATTR_HOST_IPC]
|
||||
|
||||
@property
|
||||
def host_dbus(self):
|
||||
"""Return True if addon run on host DBUS."""
|
||||
return self._mesh[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return devices of addon."""
|
||||
return self._mesh.get(ATTR_DEVICES)
|
||||
|
||||
@property
|
||||
def auto_uart(self):
|
||||
"""Return True if we should map all uart device."""
|
||||
return self._mesh.get(ATTR_AUTO_UART)
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs of addon."""
|
||||
@@ -246,10 +317,30 @@ class Addon(object):
|
||||
return self._mesh.get(ATTR_PRIVILEGED)
|
||||
|
||||
@property
|
||||
def use_hassio_api(self):
|
||||
def legacy(self):
|
||||
"""Return if the add-on don't support hass labels."""
|
||||
return self._mesh.get(ATTR_LEGACY)
|
||||
|
||||
@property
|
||||
def access_hassio_api(self):
|
||||
"""Return True if the add-on access to hassio api."""
|
||||
return self._mesh[ATTR_HASSIO_API]
|
||||
|
||||
@property
|
||||
def access_homeassistant_api(self):
|
||||
"""Return True if the add-on access to Home-Assistant api proxy."""
|
||||
return self._mesh[ATTR_HOMEASSISTANT_API]
|
||||
|
||||
@property
|
||||
def with_stdin(self):
|
||||
"""Return True if the add-on access use stdin input."""
|
||||
return self._mesh[ATTR_STDIN]
|
||||
|
||||
@property
|
||||
def with_gpio(self):
|
||||
"""Return True if the add-on access to gpio interface."""
|
||||
return self._mesh[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_audio(self):
|
||||
"""Return True if the add-on access to audio."""
|
||||
@@ -259,52 +350,61 @@ class Addon(object):
|
||||
def audio_output(self):
|
||||
"""Return ALSA config for output or None."""
|
||||
if not self.with_audio:
|
||||
return
|
||||
return None
|
||||
|
||||
setting = self.config.audio_output
|
||||
if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]:
|
||||
setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
setting = self._config.audio_output
|
||||
if self.is_installed and \
|
||||
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
|
||||
setting = self._data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
return setting
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set/remove custom audio output settings."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||
else:
|
||||
self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA config for input or None."""
|
||||
if not self.with_audio:
|
||||
return
|
||||
return None
|
||||
|
||||
setting = self.config.audio_input
|
||||
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
||||
setting = self.data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
setting = self._config.audio_input
|
||||
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
|
||||
setting = self._data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
return setting
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set/remove custom audio input settings."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||
else:
|
||||
self.data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of addon."""
|
||||
return self._mesh.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def with_icon(self):
|
||||
"""Return True if a icon exists."""
|
||||
return self.path_icon.exists()
|
||||
|
||||
@property
|
||||
def with_logo(self):
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def with_changelog(self):
|
||||
"""Return True if a changelog exists."""
|
||||
return self.path_changelog.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self):
|
||||
"""Return list of supported arch."""
|
||||
@@ -317,11 +417,11 @@ class Addon(object):
|
||||
|
||||
# Repository with dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.config.arch)
|
||||
return addon_data[ATTR_IMAGE].format(arch=self._arch)
|
||||
|
||||
# local build
|
||||
return "{}/{}-addon-{}".format(
|
||||
addon_data[ATTR_REPOSITORY], self.config.arch,
|
||||
addon_data[ATTR_REPOSITORY], self._arch,
|
||||
addon_data[ATTR_SLUG])
|
||||
|
||||
@property
|
||||
@@ -342,12 +442,12 @@ class Addon(object):
|
||||
@property
|
||||
def path_data(self):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return Path(self.config.path_addons_data, self._id)
|
||||
return Path(self._config.path_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
"""Return addon data path external for docker."""
|
||||
return PurePath(self.config.path_extern_addons_data, self._id)
|
||||
return PurePath(self._config.path_extern_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_options(self):
|
||||
@@ -359,11 +459,25 @@ class Addon(object):
|
||||
"""Return path to this addon."""
|
||||
return Path(self._mesh[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_icon(self):
|
||||
"""Return path to addon icon."""
|
||||
return Path(self.path_location, 'icon.png')
|
||||
|
||||
@property
|
||||
def path_logo(self):
|
||||
"""Return path to addon logo."""
|
||||
return Path(self.path_location, 'logo.png')
|
||||
|
||||
@property
|
||||
def path_changelog(self):
|
||||
"""Return path to addon changelog."""
|
||||
return Path(self.path_location, 'CHANGELOG.md')
|
||||
|
||||
def save_data(self):
|
||||
"""Save data of addon."""
|
||||
self._addons.data.save_data()
|
||||
|
||||
def write_options(self):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.schema
|
||||
@@ -371,10 +485,14 @@ class Addon(object):
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_options, options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
|
||||
_LOGGER.error("Addon %s have wrong options: %s", self._id,
|
||||
humanize_error(options, ex))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Addon %s can't write options: %s", self._id, err)
|
||||
else:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -393,8 +511,8 @@ class Addon(object):
|
||||
return True
|
||||
|
||||
# load next schema
|
||||
new_raw_schema = self.data.cache[self._id][ATTR_SCHEMA]
|
||||
default_options = self.data.cache[self._id][ATTR_OPTIONS]
|
||||
new_raw_schema = self._data.cache[self._id][ATTR_SCHEMA]
|
||||
default_options = self._data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
# if disabled
|
||||
if isinstance(new_raw_schema, bool):
|
||||
@@ -402,7 +520,7 @@ class Addon(object):
|
||||
|
||||
# merge options
|
||||
options = {
|
||||
**self.data.user[self._id][ATTR_OPTIONS],
|
||||
**self._data.user[self._id][ATTR_OPTIONS],
|
||||
**default_options,
|
||||
}
|
||||
|
||||
@@ -417,11 +535,11 @@ class Addon(object):
|
||||
return False
|
||||
return True
|
||||
|
||||
async def install(self, version=None):
|
||||
async def install(self):
|
||||
"""Install a addon."""
|
||||
if self.config.arch not in self.supported_arch:
|
||||
if self._arch not in self.supported_arch:
|
||||
_LOGGER.error(
|
||||
"Addon %s not supported on %s", self._id, self.config.arch)
|
||||
"Addon %s not supported on %s", self._id, self._arch)
|
||||
return False
|
||||
|
||||
if self.is_installed:
|
||||
@@ -433,17 +551,16 @@ class Addon(object):
|
||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||
self.path_data.mkdir()
|
||||
|
||||
version = version or self.last_version
|
||||
if not await self.docker.install(version):
|
||||
if not await self.instance.install(self.last_version):
|
||||
return False
|
||||
|
||||
self._set_install(version)
|
||||
self._set_install(self.last_version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
"""Remove a addon."""
|
||||
if not await self.docker.remove():
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
|
||||
if self.path_data.is_dir():
|
||||
@@ -459,17 +576,17 @@ class Addon(object):
|
||||
if not self.is_installed:
|
||||
return STATE_NONE
|
||||
|
||||
if await self.docker.is_running():
|
||||
if await self.instance.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
@check_installed
|
||||
def start(self):
|
||||
"""Set options and start addon.
|
||||
async def start(self):
|
||||
"""Set options and start addon."""
|
||||
if not self.write_options():
|
||||
return False
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.run()
|
||||
return await self.instance.run()
|
||||
|
||||
@check_installed
|
||||
def stop(self):
|
||||
@@ -477,35 +594,31 @@ class Addon(object):
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.stop()
|
||||
return self.instance.stop()
|
||||
|
||||
@check_installed
|
||||
async def update(self, version=None):
|
||||
async def update(self):
|
||||
"""Update addon."""
|
||||
version = version or self.last_version
|
||||
last_state = await self.state()
|
||||
|
||||
if version == self.version_installed:
|
||||
_LOGGER.warning(
|
||||
"Addon %s is already installed in %s", self._id, version)
|
||||
if self.last_version == self.version_installed:
|
||||
_LOGGER.warning("No update available for Addon %s", self._id)
|
||||
return False
|
||||
|
||||
if not await self.docker.update(version):
|
||||
if not await self.instance.update(self.last_version):
|
||||
return False
|
||||
self._set_update(version)
|
||||
self._set_update(self.last_version)
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.docker.run()
|
||||
await self.start()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
def restart(self):
|
||||
"""Restart addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.restart()
|
||||
async def restart(self):
|
||||
"""Restart addon."""
|
||||
await self.stop()
|
||||
return await self.start()
|
||||
|
||||
@check_installed
|
||||
def logs(self):
|
||||
@@ -513,88 +626,135 @@ class Addon(object):
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.logs()
|
||||
return self.instance.logs()
|
||||
|
||||
@check_installed
|
||||
def stats(self):
|
||||
"""Return stats of container.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
||||
|
||||
@check_installed
|
||||
async def rebuild(self):
|
||||
"""Performe a rebuild of local build addon."""
|
||||
last_state = await self.state()
|
||||
|
||||
if not self.need_build:
|
||||
_LOGGER.error("Can't rebuild a none local build addon!")
|
||||
return False
|
||||
|
||||
# remove docker container but not addon config
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
|
||||
if not await self.instance.install(self.version_installed):
|
||||
return False
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.start()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def write_stdin(self, data):
|
||||
"""Write data to add-on stdin.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if not self.with_stdin:
|
||||
_LOGGER.error("Add-on don't support write to stdin!")
|
||||
return False
|
||||
|
||||
return await self.instance.write_stdin(data)
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
"""Snapshot a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.docker.export_image(Path(temp, "image.tar")):
|
||||
self.instance.export_image(Path(temp, "image.tar")):
|
||||
return False
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.data.user.get(self._id, {}),
|
||||
ATTR_SYSTEM: self.data.system.get(self._id, {}),
|
||||
ATTR_USER: self._data.user.get(self._id, {}),
|
||||
ATTR_SYSTEM: self._data.system.get(self._id, {}),
|
||||
ATTR_VERSION: self.version_installed,
|
||||
ATTR_STATE: await self.state(),
|
||||
}
|
||||
|
||||
# store local configs/state
|
||||
if not write_json_file(Path(temp, "addon.json"), data):
|
||||
_LOGGER.error("Can't write addon.json for %s", self._id)
|
||||
try:
|
||||
write_json_file(Path(temp, "addon.json"), data)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't save meta for %s: %s", self._id, err)
|
||||
return False
|
||||
|
||||
# write into tarfile
|
||||
def _create_tar():
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
with tarfile.open(tar_file, "w:gz",
|
||||
compresslevel=1) as snapshot:
|
||||
with tar_file as snapshot:
|
||||
snapshot.add(temp, arcname=".")
|
||||
snapshot.add(self.path_data, arcname="data")
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _create_tar)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
|
||||
_LOGGER.info("Build snapshot for addon %s", self._id)
|
||||
await self._loop.run_in_executor(None, _write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Finish snapshot for addon %s", self._id)
|
||||
return True
|
||||
|
||||
async def restore(self, tar_file):
|
||||
"""Restore a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
||||
# extract snapshot
|
||||
def _extract_tar():
|
||||
def _extract_tarfile():
|
||||
"""Extract tar snapshot."""
|
||||
with tarfile.open(tar_file, "r:gz") as snapshot:
|
||||
with tar_file as snapshot:
|
||||
snapshot.extractall(path=Path(temp))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _extract_tar)
|
||||
await self._loop.run_in_executor(None, _extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s -> %s", tar_file, err)
|
||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
# read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't read addon.json -> %s", err)
|
||||
_LOGGER.error("Can't read addon.json: %s", err)
|
||||
|
||||
# validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate %s, snapshot data -> %s",
|
||||
_LOGGER.error("Can't validate %s, snapshot data: %s",
|
||||
self._id, humanize_error(data, err))
|
||||
return False
|
||||
|
||||
# restore data / reload addon
|
||||
_LOGGER.info("Restore config for addon %s", self._id)
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
|
||||
|
||||
# check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if version != self.docker.version:
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore image for addon %s", self._id)
|
||||
|
||||
image_file = Path(temp, "image.tar")
|
||||
if image_file.is_file():
|
||||
await self.docker.import_image(image_file, version)
|
||||
await self.instance.import_image(image_file, version)
|
||||
else:
|
||||
if await self.docker.install(version):
|
||||
await self.docker.cleanup()
|
||||
if await self.instance.install(version):
|
||||
await self.instance.cleanup()
|
||||
else:
|
||||
await self.docker.stop()
|
||||
await self.instance.stop()
|
||||
|
||||
# restore data
|
||||
def _restore_data():
|
||||
@@ -604,13 +764,15 @@ class Addon(object):
|
||||
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _restore_data)
|
||||
_LOGGER.info("Restore data for addon %s", self._id)
|
||||
await self._loop.run_in_executor(None, _restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data -> %s", err)
|
||||
_LOGGER.error("Can't restore origin data: %s", err)
|
||||
return False
|
||||
|
||||
# run addon
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
return await self.start()
|
||||
|
||||
_LOGGER.info("Finish restore for addon %s", self._id)
|
||||
return True
|
||||
|
72
hassio/addons/build.py
Normal file
72
hassio/addons/build.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""HassIO addons build environment."""
|
||||
from pathlib import Path
|
||||
|
||||
from .validate import SCHEMA_BUILD_CONFIG, BASE_IMAGE
|
||||
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
|
||||
|
||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Handle build options for addons."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize addon builder."""
|
||||
self.coresys = coresys
|
||||
self._id = slug
|
||||
|
||||
super().__init__(
|
||||
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return addon of build data."""
|
||||
return self._addons.get(self._id)
|
||||
|
||||
@property
|
||||
def base_image(self):
|
||||
"""Base images for this addon."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self._arch, BASE_IMAGE[self._arch])
|
||||
|
||||
@property
|
||||
def squash(self):
|
||||
"""Return True or False if squash is active."""
|
||||
return self._data[ATTR_SQUASH]
|
||||
|
||||
@property
|
||||
def additional_args(self):
|
||||
"""Return additional docker build arguments."""
|
||||
return self._data[ATTR_ARGS]
|
||||
|
||||
def get_docker_args(self, version):
|
||||
"""Create a dict with docker build arguments."""
|
||||
args = {
|
||||
'path': str(self.addon.path_location),
|
||||
'tag': f"{self.addon.image}:{version}",
|
||||
'pull': True,
|
||||
'forcerm': True,
|
||||
'squash': self.squash,
|
||||
'labels': {
|
||||
'io.hass.version': version,
|
||||
'io.hass.arch': self._arch,
|
||||
'io.hass.type': META_ADDON,
|
||||
'io.hass.name': self.addon.name,
|
||||
'io.hass.description': self.addon.description,
|
||||
},
|
||||
'buildargs': {
|
||||
'BUILD_FROM': self.base_image,
|
||||
'BUILD_VERSION': version,
|
||||
'BUILD_ARCH': self._arch,
|
||||
**self.additional_args,
|
||||
}
|
||||
}
|
||||
|
||||
if self.addon.url:
|
||||
args['labels']['io.hass.url'] = self.addon.url
|
||||
|
||||
return args
|
@@ -2,7 +2,7 @@
|
||||
"local": {
|
||||
"name": "Local Add-Ons",
|
||||
"url": "https://home-assistant.io/hassio",
|
||||
"maintainer": "By our self"
|
||||
"maintainer": "you"
|
||||
},
|
||||
"core": {
|
||||
"name": "Built-in Add-Ons",
|
||||
|
@@ -3,32 +3,29 @@ import copy
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import extract_hash_from_path
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import (
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG,
|
||||
MAP_VOLUME)
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
from ..tools import JsonConfig, read_json_file
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils.json import JsonConfig, read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
|
||||
|
||||
class Data(JsonConfig):
|
||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Hold data for addons inside HassIO."""
|
||||
|
||||
def __init__(self, config):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
|
||||
self.config = config
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
||||
self.coresys = coresys
|
||||
self._repositories = {}
|
||||
self._cache = {}
|
||||
|
||||
@@ -59,17 +56,17 @@ class Data(JsonConfig):
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_core, REPOSITORY_CORE)
|
||||
self._config.path_addons_core, REPOSITORY_CORE)
|
||||
|
||||
# read local repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_local, REPOSITORY_LOCAL)
|
||||
self._config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
|
||||
# read custom git repositories
|
||||
for repository_element in self.config.path_addons_git.iterdir():
|
||||
for repository_element in self._config.path_addons_git.iterdir():
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
@@ -122,7 +119,7 @@ class Data(JsonConfig):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s -> %s", addon,
|
||||
_LOGGER.warning("Can't read %s: %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
|
||||
def _set_builtin_repositories(self):
|
||||
@@ -131,7 +128,7 @@ class Data(JsonConfig):
|
||||
builtin_file = Path(__file__).parent.joinpath('built-in.json')
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read built-in.json -> %s", err)
|
||||
_LOGGER.warning("Can't read built-in json: %s", err)
|
||||
return
|
||||
|
||||
# core repository
|
||||
@@ -162,4 +159,4 @@ class Data(JsonConfig):
|
||||
have_change = True
|
||||
|
||||
if have_change:
|
||||
self.save()
|
||||
self.save_data()
|
||||
|
@@ -7,33 +7,33 @@ import shutil
|
||||
|
||||
import git
|
||||
|
||||
from .util import get_hash_from_repository
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GitRepo(object):
|
||||
class GitRepo(CoreSysAttributes):
|
||||
"""Manage addons git repo."""
|
||||
|
||||
def __init__(self, config, loop, path, url):
|
||||
def __init__(self, coresys, path, url):
|
||||
"""Initialize git base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.coresys = coresys
|
||||
self.repo = None
|
||||
self.path = path
|
||||
self.url = url
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
async def load(self):
|
||||
"""Init git addon repo."""
|
||||
if not self.path.is_dir():
|
||||
return await self.clone()
|
||||
|
||||
async with self._lock:
|
||||
async with self.lock:
|
||||
try:
|
||||
_LOGGER.info("Load addon %s repository", self.path)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
self.repo = await self._loop.run_in_executor(
|
||||
None, git.Repo, str(self.path))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
@@ -45,10 +45,10 @@ class GitRepo(object):
|
||||
|
||||
async def clone(self):
|
||||
"""Clone git addon repo."""
|
||||
async with self._lock:
|
||||
async with self.lock:
|
||||
try:
|
||||
_LOGGER.info("Clone addon %s repository", self.url)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
self.repo = await self._loop.run_in_executor(
|
||||
None, ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path),
|
||||
recursive=True))
|
||||
@@ -62,18 +62,18 @@ class GitRepo(object):
|
||||
|
||||
async def pull(self):
|
||||
"""Pull git addon repo."""
|
||||
if self._lock.locked():
|
||||
if self.lock.locked():
|
||||
_LOGGER.warning("It is already a task in progress.")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
async with self.lock:
|
||||
try:
|
||||
_LOGGER.info("Pull addon %s repository", self.url)
|
||||
await self.loop.run_in_executor(
|
||||
await self._loop.run_in_executor(
|
||||
None, self.repo.remotes.origin.pull)
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.exc.GitCommandError) as err:
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
||||
return False
|
||||
|
||||
@@ -83,20 +83,22 @@ class GitRepo(object):
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""HassIO addons repository."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize git hassio addon repository."""
|
||||
super().__init__(
|
||||
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
coresys, coresys.config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
|
||||
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom addons repository."""
|
||||
|
||||
def __init__(self, config, loop, url):
|
||||
def __init__(self, coresys, url):
|
||||
"""Initialize git hassio addon repository."""
|
||||
path = Path(config.path_addons_git, get_hash_from_repository(url))
|
||||
path = Path(
|
||||
coresys.config.path_addons_git,
|
||||
get_hash_from_repository(url))
|
||||
|
||||
super().__init__(config, loop, path, url)
|
||||
super().__init__(coresys, path, url)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a custom addon."""
|
||||
|
@@ -1,18 +1,19 @@
|
||||
"""Represent a HassIO repository."""
|
||||
from .git import GitRepoHassIO, GitRepoCustom
|
||||
from .util import get_hash_from_repository
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import (
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
|
||||
class Repository(object):
|
||||
class Repository(CoreSysAttributes):
|
||||
"""Repository in HassIO."""
|
||||
|
||||
def __init__(self, config, loop, data, repository):
|
||||
def __init__(self, coresys, repository):
|
||||
"""Initialize repository object."""
|
||||
self.data = data
|
||||
self.coresys = coresys
|
||||
self.source = None
|
||||
self.git = None
|
||||
|
||||
@@ -20,16 +21,16 @@ class Repository(object):
|
||||
self._id = repository
|
||||
elif repository == REPOSITORY_CORE:
|
||||
self._id = repository
|
||||
self.git = GitRepoHassIO(config, loop)
|
||||
self.git = GitRepoHassIO(coresys)
|
||||
else:
|
||||
self._id = get_hash_from_repository(repository)
|
||||
self.git = GitRepoCustom(config, loop, repository)
|
||||
self.git = GitRepoCustom(coresys, repository)
|
||||
self.source = repository
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return data struct repository."""
|
||||
return self.data.repositories.get(self._id, {})
|
||||
return self._addons.data.repositories.get(self._id, {})
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
|
@@ -1,4 +1,8 @@
|
||||
"""Validate addons options schema."""
|
||||
import logging
|
||||
import re
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
@@ -9,13 +13,19 @@ from ..const import (
|
||||
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API)
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK, ATTR_UUID,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt)(?::(?P<rights>rw|:ro))?$")
|
||||
RE_DISCOVERY = re.compile(r"^(?P<component>\w*)(?:/(?P<platform>\w*>))?$")
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
@@ -24,8 +34,18 @@ V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
V_PORT = 'port'
|
||||
V_MATCH = 'match'
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL, V_PORT])
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|str|bool|email|url|port"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
|
||||
ARCH_ALL = [
|
||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
||||
@@ -39,9 +59,18 @@ STARTUP_ALL = [
|
||||
PRIVILEGED_ALL = [
|
||||
"NET_ADMIN",
|
||||
"SYS_ADMIN",
|
||||
"SYS_RAWIO"
|
||||
"SYS_RAWIO",
|
||||
"SYS_TIME",
|
||||
"SYS_NICE"
|
||||
]
|
||||
|
||||
BASE_IMAGE = {
|
||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
||||
ARCH_I386: "homeassistant/i386-base:latest",
|
||||
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
||||
}
|
||||
|
||||
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
@@ -66,26 +95,40 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS):
|
||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [vol.Match(RE_DISCOVERY)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
||||
], vol.Schema({vol.Coerce(str): ADDON_ELEMENT}))
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
], vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||
}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
@@ -93,20 +136,34 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({
|
||||
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
||||
}),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
||||
vol.Coerce(str): vol.Coerce(str)
|
||||
}),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
||||
vol.Match(r"^[0-9a-f]{32}$"),
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
})
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
@@ -115,11 +172,11 @@ SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default={}): {
|
||||
SCHEMA_ADDONS_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||
},
|
||||
vol.Optional(ATTR_SYSTEM, default={}): {
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||
}
|
||||
})
|
||||
@@ -130,7 +187,7 @@ SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
def validate_options(raw_schema):
|
||||
@@ -141,8 +198,10 @@ def validate_options(raw_schema):
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in raw_schema:
|
||||
raise vol.Invalid("Unknown options {}.".format(key))
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
@@ -156,41 +215,50 @@ def validate_options(raw_schema):
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, 'root')
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(typ, value, key):
|
||||
"""Validate a single element."""
|
||||
try:
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'")
|
||||
|
||||
if typ == V_STR:
|
||||
return str(value)
|
||||
elif typ == V_INT:
|
||||
return int(value)
|
||||
elif typ == V_FLOAT:
|
||||
return float(value)
|
||||
elif typ == V_BOOL:
|
||||
return vol.Boolean()(value)
|
||||
elif typ == V_EMAIL:
|
||||
return vol.Email()(value)
|
||||
elif typ == V_URL:
|
||||
return vol.Url()(value)
|
||||
elif typ == V_PORT:
|
||||
return NETWORK_PORT(value)
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||
except ValueError:
|
||||
raise vol.Invalid(
|
||||
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR):
|
||||
return str(value)
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(V_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(V_PORT):
|
||||
return NETWORK_PORT(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group('match'))(str(value))
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||
|
||||
|
||||
def _nested_validate_list(typ, data_list, key):
|
||||
@@ -198,17 +266,10 @@ def _nested_validate_list(typ, data_list, key):
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# dict list
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = {}
|
||||
for c_key, c_value in element.items():
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid(
|
||||
"Unknown nested options {}".format(c_key))
|
||||
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
c_options = _nested_validate_dict(typ, element, key)
|
||||
options.append(c_options)
|
||||
# normal list
|
||||
else:
|
||||
options.append(_single_validate(typ, element, key))
|
||||
|
||||
@@ -220,9 +281,27 @@ def _nested_validate_dict(typ, data_dict, key):
|
||||
options = {}
|
||||
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid("Unknow nested dict options {}".format(c_key))
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(typ[c_key][0],
|
||||
c_value, c_key)
|
||||
else:
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and \
|
||||
origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||
|
@@ -5,32 +5,53 @@ from pathlib import Path
|
||||
from aiohttp import web
|
||||
|
||||
from .addons import APIAddons
|
||||
from .discovery import APIDiscovery
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .proxy import APIProxy
|
||||
from .supervisor import APISupervisor
|
||||
from .security import APISecurity
|
||||
from .snapshots import APISnapshots
|
||||
from .services import APIServices
|
||||
from .security import security_layer
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RestAPI(object):
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle rest api for hassio."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.webapp = web.Application(loop=self.loop)
|
||||
self.coresys = coresys
|
||||
self.webapp = web.Application(
|
||||
middlewares=[security_layer], loop=self._loop)
|
||||
|
||||
# service stuff
|
||||
self._handler = None
|
||||
self.server = None
|
||||
|
||||
def register_host(self, host_control, hardware):
|
||||
# middleware
|
||||
self.webapp['coresys'] = coresys
|
||||
|
||||
async def load(self):
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
self._register_homeassistant()
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_snapshots()
|
||||
self._register_network()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
|
||||
def _register_host(self):
|
||||
"""Register hostcontrol function."""
|
||||
api_host = APIHost(self.config, self.loop, host_control, hardware)
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/host/info', api_host.info)
|
||||
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||
@@ -38,23 +59,24 @@ class RestAPI(object):
|
||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_post('/host/update', api_host.update)
|
||||
self.webapp.router.add_post('/host/options', api_host.options)
|
||||
self.webapp.router.add_post('/host/reload', api_host.reload)
|
||||
|
||||
def register_network(self, host_control):
|
||||
def _register_network(self):
|
||||
"""Register network function."""
|
||||
api_net = APINetwork(self.config, self.loop, host_control)
|
||||
api_net = APINetwork()
|
||||
api_net.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
|
||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||
updater):
|
||||
def _register_supervisor(self):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, supervisor, snapshots, addons,
|
||||
host_control, updater)
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get('/supervisor/stats', api_supervisor.stats)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_post(
|
||||
@@ -63,24 +85,46 @@ class RestAPI(object):
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||
|
||||
def register_homeassistant(self, dock_homeassistant):
|
||||
def _register_homeassistant(self):
|
||||
"""Register homeassistant function."""
|
||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
self.webapp.router.add_get('/homeassistant/stats', api_hass.stats)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop)
|
||||
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
||||
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||
|
||||
def register_addons(self, addons):
|
||||
def _register_proxy(self):
|
||||
"""Register HomeAssistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/websocket', api_proxy.websocket)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/websocket', api_proxy.websocket)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/stream', api_proxy.stream)
|
||||
self.webapp.router.add_post(
|
||||
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/', api_proxy.api)
|
||||
|
||||
def _register_addons(self):
|
||||
"""Register homeassistant function."""
|
||||
api_addons = APIAddons(self.config, self.loop, addons)
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/addons', api_addons.list)
|
||||
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
||||
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
@@ -94,21 +138,20 @@ class RestAPI(object):
|
||||
'/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||
self.webapp.router.add_get('/addons/{addon}/icon', api_addons.icon)
|
||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||
self.webapp.router.add_get(
|
||||
'/addons/{addon}/changelog', api_addons.changelog)
|
||||
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
||||
self.webapp.router.add_get('/addons/{addon}/stats', api_addons.stats)
|
||||
|
||||
def register_security(self):
|
||||
"""Register security function."""
|
||||
api_security = APISecurity(self.config, self.loop)
|
||||
|
||||
self.webapp.router.add_get('/security/info', api_security.info)
|
||||
self.webapp.router.add_post('/security/options', api_security.options)
|
||||
self.webapp.router.add_post('/security/totp', api_security.totp)
|
||||
self.webapp.router.add_post('/security/session', api_security.session)
|
||||
|
||||
def register_snapshots(self, snapshots):
|
||||
def _register_snapshots(self):
|
||||
"""Register snapshots function."""
|
||||
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
||||
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
||||
@@ -117,6 +160,8 @@ class RestAPI(object):
|
||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/upload', api_snapshots.upload)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||
@@ -127,23 +172,66 @@ class RestAPI(object):
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial)
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/download',
|
||||
api_snapshots.download)
|
||||
|
||||
def register_panel(self):
|
||||
def _register_services(self):
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/services', api_services.list)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/services/{service}', api_services.get_service)
|
||||
self.webapp.router.add_post(
|
||||
'/services/{service}', api_services.set_service)
|
||||
self.webapp.router.add_delete(
|
||||
'/services/{service}', api_services.del_service)
|
||||
|
||||
def _register_discovery(self):
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/services/discovery', api_discovery.list)
|
||||
self.webapp.router.add_get(
|
||||
'/services/discovery/{uuid}', api_discovery.get_discovery)
|
||||
self.webapp.router.add_delete(
|
||||
'/services/discovery/{uuid}', api_discovery.del_discovery)
|
||||
self.webapp.router.add_post(
|
||||
'/services/discovery', api_discovery.set_discovery)
|
||||
|
||||
def _register_panel(self):
|
||||
"""Register panel for homeassistant."""
|
||||
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
|
||||
def create_panel_response(build_type):
|
||||
"""Create a function to generate a response."""
|
||||
path = Path(__file__).parent.joinpath(
|
||||
f"panel/{build_type}.html")
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
def get_panel(request):
|
||||
"""Return file response with panel."""
|
||||
return web.FileResponse(panel)
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.router.add_get(
|
||||
'/panel', create_panel_response('hassio-main-es5'))
|
||||
|
||||
self.webapp.router.add_get('/panel', get_panel)
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.router.add_get(
|
||||
'/panel_es5', create_panel_response('hassio-main-es5'))
|
||||
self.webapp.router.add_get(
|
||||
'/panel_latest', create_panel_response('hassio-main-latest'))
|
||||
|
||||
# This route is for HA > 0.61
|
||||
self.webapp.router.add_get(
|
||||
'/app-es5/index.html', create_panel_response('index'))
|
||||
self.webapp.router.add_get(
|
||||
'/app-es5/hassio-app.html', create_panel_response('hassio-app'))
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
self._handler = self.webapp.make_handler(loop=self.loop)
|
||||
self._handler = self.webapp.make_handler(loop=self._loop)
|
||||
|
||||
try:
|
||||
self.server = await self.loop.create_server(
|
||||
self.server = await self._loop.create_server(
|
||||
self._handler, "0.0.0.0", "80")
|
||||
except OSError as err:
|
||||
_LOGGER.fatal(
|
||||
@@ -157,5 +245,5 @@ class RestAPI(object):
|
||||
await self.webapp.shutdown()
|
||||
|
||||
if self._handler:
|
||||
await self._handler.finish_connections(60)
|
||||
await self._handler.shutdown(60)
|
||||
await self.webapp.cleanup()
|
||||
|
@@ -5,7 +5,7 @@ import logging
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
@@ -13,7 +13,13 @@ from ..const import (
|
||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||
BOOT_AUTO, BOOT_MANUAL, CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -30,18 +36,12 @@ SCHEMA_OPTIONS = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class APIAddons(object):
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle rest api for addons functions."""
|
||||
|
||||
def __init__(self, config, loop, addons):
|
||||
"""Initialize homeassistant rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.addons = addons
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
addon = self.addons.get(request.match_info.get('addon'))
|
||||
addon = self._addons.get(request.match_info.get('addon'))
|
||||
if not addon:
|
||||
raise RuntimeError("Addon not exists")
|
||||
|
||||
@@ -55,14 +55,14 @@ class APIAddons(object):
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return
|
||||
return None
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return all addons / repositories ."""
|
||||
data_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
for addon in self._addons.list_addons:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
@@ -73,16 +73,13 @@ class APIAddons(object):
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.addons.list_repositories:
|
||||
for repository in self._addons.list_repositories:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
@@ -99,7 +96,7 @@ class APIAddons(object):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload all addons data."""
|
||||
await asyncio.shield(self.addons.reload(), loop=self.loop)
|
||||
await asyncio.shield(self._addons.reload(), loop=self._loop)
|
||||
return True
|
||||
|
||||
@api_process
|
||||
@@ -110,6 +107,7 @@ class APIAddons(object):
|
||||
return {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
@@ -121,15 +119,24 @@ class APIAddons(object):
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_HOST_NETWORK: addon.network_mode == 'host',
|
||||
ATTR_HOST_NETWORK: addon.host_network,
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
ATTR_HOST_DBUS: addon.host_dbus,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_SERVICES: addon.services,
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -138,7 +145,7 @@ class APIAddons(object):
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): addon.schema,
|
||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||
})
|
||||
|
||||
body = await api_validate(addon_schema, request)
|
||||
@@ -156,23 +163,39 @@ class APIAddons(object):
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def install(self, request):
|
||||
"""Install addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
version = body.get(ATTR_VERSION, addon.last_version)
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
addon = self._extract_addon(request)
|
||||
stats = await addon.stats()
|
||||
|
||||
return await asyncio.shield(
|
||||
addon.install(version=version), loop=self.loop)
|
||||
if not stats:
|
||||
raise RuntimeError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
def install(self, request):
|
||||
"""Install addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
return asyncio.shield(addon.install(), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||
return asyncio.shield(addon.uninstall(), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
@@ -186,32 +209,38 @@ class APIAddons(object):
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return asyncio.shield(addon.start(), loop=self.loop)
|
||||
return asyncio.shield(addon.start(), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||
return asyncio.shield(addon.stop(), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
def update(self, request):
|
||||
"""Update addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request)
|
||||
version = body.get(ATTR_VERSION, addon.last_version)
|
||||
|
||||
if version == addon.version_installed:
|
||||
raise RuntimeError("Version %s is already in use", version)
|
||||
if addon.last_version == addon.version_installed:
|
||||
raise RuntimeError("No update available!")
|
||||
|
||||
return await asyncio.shield(
|
||||
addon.update(version=version), loop=self.loop)
|
||||
return asyncio.shield(addon.update(), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart(), loop=self.loop)
|
||||
return asyncio.shield(addon.restart(), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request):
|
||||
"""Rebuild local build addon."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.need_build:
|
||||
raise RuntimeError("Only local build addons are supported")
|
||||
|
||||
return asyncio.shield(addon.rebuild(), loop=self._loop)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
@@ -219,12 +248,42 @@ class APIAddons(object):
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def icon(self, request):
|
||||
"""Return icon from addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_icon:
|
||||
raise RuntimeError("No icon found!")
|
||||
|
||||
with addon.path_icon.open('rb') as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request):
|
||||
"""Return logo from addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
raise RuntimeError("No image found!")
|
||||
raise RuntimeError("No logo found!")
|
||||
|
||||
with addon.path_logo.open('rb') as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def changelog(self, request):
|
||||
"""Return changelog from addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_changelog:
|
||||
raise RuntimeError("No changelog found!")
|
||||
|
||||
with addon.path_changelog.open('r') as changelog:
|
||||
return changelog.read()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request):
|
||||
"""Write to stdin of addon."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_stdin:
|
||||
raise RuntimeError("STDIN not supported by addon")
|
||||
|
||||
data = await request.read()
|
||||
return await asyncio.shield(addon.write_stdin(data), loop=self._loop)
|
||||
|
72
hassio/api/discovery.py
Normal file
72
hassio/api/discovery.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_PROVIDER, ATTR_UUID, ATTR_COMPONENT, ATTR_PLATFORM, ATTR_CONFIG,
|
||||
ATTR_DISCOVERY, REQUEST_FROM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema({
|
||||
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PLATFORM): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_CONFIG): vol.Any(None, dict),
|
||||
})
|
||||
|
||||
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
"""Handle rest api for discovery functions."""
|
||||
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self._services.discovery.get(request.match_info.get('uuid'))
|
||||
if not message:
|
||||
raise RuntimeError("Discovery message not found")
|
||||
return message
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
discovery = []
|
||||
for message in self._services.discovery.list_messages:
|
||||
discovery.append({
|
||||
ATTR_PROVIDER: message.provider,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_COMPONENT: message.component,
|
||||
ATTR_PLATFORM: message.platform,
|
||||
ATTR_CONFIG: message.config,
|
||||
})
|
||||
|
||||
return {ATTR_DISCOVERY: discovery}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
message = self._services.discovery.send(
|
||||
provider=request[REQUEST_FROM], **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@api_process
|
||||
async def get_discovery(self, request):
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
return {
|
||||
ATTR_PROVIDER: message.provider,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_COMPONENT: message.component,
|
||||
ATTR_PLATFORM: message.platform,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_discovery(self, request):
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
self._services.discovery.remove(message)
|
||||
return True
|
@@ -4,20 +4,31 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
||||
CONTENT_TYPE_BINARY)
|
||||
from ..validate import HASS_DEVICES
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
|
||||
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
|
||||
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, CONTENT_TYPE_BINARY)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import NETWORK_PORT, DOCKER_IMAGE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
||||
vol.Any(None, vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -25,24 +36,22 @@ SCHEMA_VERSION = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class APIHomeAssistant(object):
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle rest api for homeassistant functions."""
|
||||
|
||||
def __init__(self, config, loop, homeassistant):
|
||||
"""Initialize homeassistant rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.homeassistant = homeassistant
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.homeassistant.last_version,
|
||||
ATTR_IMAGE: self.homeassistant.image,
|
||||
ATTR_DEVICES: self.homeassistant.devices,
|
||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
||||
ATTR_VERSION: self._homeassistant.version,
|
||||
ATTR_LAST_VERSION: self._homeassistant.last_version,
|
||||
ATTR_IMAGE: self._homeassistant.image,
|
||||
ATTR_CUSTOM: self._homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self._homeassistant.boot,
|
||||
ATTR_PORT: self._homeassistant.api_port,
|
||||
ATTR_SSL: self._homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self._homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self._homeassistant.wait_boot,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -50,42 +59,85 @@ class APIHomeAssistant(object):
|
||||
"""Set homeassistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_DEVICES in body:
|
||||
self.homeassistant.devices = body[ATTR_DEVICES]
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
self._homeassistant.image = body[ATTR_IMAGE]
|
||||
self._homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.homeassistant.set_custom(
|
||||
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
||||
if ATTR_BOOT in body:
|
||||
self._homeassistant.boot = body[ATTR_BOOT]
|
||||
|
||||
if ATTR_PORT in body:
|
||||
self._homeassistant.api_port = body[ATTR_PORT]
|
||||
|
||||
if ATTR_PASSWORD in body:
|
||||
self._homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
|
||||
if ATTR_SSL in body:
|
||||
self._homeassistant.api_ssl = body[ATTR_SSL]
|
||||
|
||||
if ATTR_WATCHDOG in body:
|
||||
self._homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self._homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
self._homeassistant.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self._homeassistant.stats()
|
||||
if not stats:
|
||||
raise RuntimeError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update homeassistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
|
||||
version = body.get(ATTR_VERSION, self._homeassistant.last_version)
|
||||
|
||||
if version == self.homeassistant.version:
|
||||
if version == self._homeassistant.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.update(version), loop=self.loop)
|
||||
self._homeassistant.update(version), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop homeassistant."""
|
||||
return asyncio.shield(self._homeassistant.stop(), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
"""Start homeassistant."""
|
||||
return asyncio.shield(self._homeassistant.start(), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart homeassistant."""
|
||||
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
|
||||
return asyncio.shield(self._homeassistant.restart(), loop=self._loop)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return homeassistant docker logs."""
|
||||
return self.homeassistant.logs()
|
||||
return self._homeassistant.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request):
|
||||
"""Check config of homeassistant."""
|
||||
code, message = await self.homeassistant.check_config()
|
||||
if not code:
|
||||
raise RuntimeError(message)
|
||||
result = await self._homeassistant.check_config()
|
||||
if not result.valid:
|
||||
raise RuntimeError(result.log)
|
||||
|
||||
return True
|
||||
|
@@ -4,11 +4,12 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process_hostcontrol, api_process, api_validate
|
||||
from .utils import api_process_hostcontrol, api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT)
|
||||
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import ALSA_CHANNEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -23,26 +24,19 @@ SCHEMA_OPTIONS = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class APIHost(object):
|
||||
class APIHost(CoreSysAttributes):
|
||||
"""Handle rest api for host functions."""
|
||||
|
||||
def __init__(self, config, loop, host_control, hardware):
|
||||
"""Initialize host rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_control = host_control
|
||||
self.local_hw = hardware
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_TYPE: self.host_control.type,
|
||||
ATTR_VERSION: self.host_control.version,
|
||||
ATTR_LAST_VERSION: self.host_control.last_version,
|
||||
ATTR_FEATURES: self.host_control.features,
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
ATTR_OS: self.host_control.os_info,
|
||||
ATTR_TYPE: self._host_control.type,
|
||||
ATTR_VERSION: self._host_control.version,
|
||||
ATTR_LAST_VERSION: self._host_control.last_version,
|
||||
ATTR_FEATURES: self._host_control.features,
|
||||
ATTR_HOSTNAME: self._host_control.hostname,
|
||||
ATTR_OS: self._host_control.os_info,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -51,40 +45,48 @@ class APIHost(object):
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
self._config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self.config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
self._config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
self._config.save_data()
|
||||
return True
|
||||
|
||||
@api_process_hostcontrol
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return self.host_control.reboot()
|
||||
return self._host_control.reboot()
|
||||
|
||||
@api_process_hostcontrol
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return self.host_control.shutdown()
|
||||
return self._host_control.shutdown()
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def reload(self, request):
|
||||
"""Reload host data."""
|
||||
await self._host_control.load()
|
||||
return True
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
||||
version = body.get(ATTR_VERSION, self._host_control.last_version)
|
||||
|
||||
if version == self.host_control.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
if version == self._host_control.version:
|
||||
raise RuntimeError(f"Version {version} is already in use")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.host_control.update(version=version), loop=self.loop)
|
||||
self._host_control.update(version=version), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
async def hardware(self, request):
|
||||
"""Return local hardware infos."""
|
||||
return {
|
||||
ATTR_SERIAL: self.local_hw.serial_devices,
|
||||
ATTR_INPUT: self.local_hw.input_devices,
|
||||
ATTR_DISK: self.local_hw.disk_devices,
|
||||
ATTR_AUDIO: self.local_hw.audio_devices,
|
||||
ATTR_SERIAL: list(self._hardware.serial_devices),
|
||||
ATTR_INPUT: list(self._hardware.input_devices),
|
||||
ATTR_DISK: list(self._hardware.disk_devices),
|
||||
ATTR_GPIO: list(self._hardware.gpio_devices),
|
||||
ATTR_AUDIO: self._hardware.audio_devices,
|
||||
}
|
||||
|
@@ -3,8 +3,9 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_hostcontrol, api_validate
|
||||
from .utils import api_process, api_process_hostcontrol, api_validate
|
||||
from ..const import ATTR_HOSTNAME
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,20 +15,14 @@ SCHEMA_OPTIONS = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class APINetwork(object):
|
||||
class APINetwork(CoreSysAttributes):
|
||||
"""Handle rest api for network functions."""
|
||||
|
||||
def __init__(self, config, loop, host_control):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_control = host_control
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show network settings."""
|
||||
return {
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
ATTR_HOSTNAME: self._host_control.hostname,
|
||||
}
|
||||
|
||||
@api_process_hostcontrol
|
||||
@@ -37,7 +32,7 @@ class APINetwork(object):
|
||||
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
if self.host_control.hostname != body[ATTR_HOSTNAME]:
|
||||
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||
if self._host_control.hostname != body[ATTR_HOSTNAME]:
|
||||
await self._host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||
|
||||
return True
|
||||
|
82
hassio/api/panel/hassio-app.html
Normal file
82
hassio/api/panel/hassio-app.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
Binary file not shown.
72
hassio/api/panel/hassio-main-es5.html
Normal file
72
hassio/api/panel/hassio-main-es5.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-main-es5.html.gz
Normal file
BIN
hassio/api/panel/hassio-main-es5.html.gz
Normal file
Binary file not shown.
72
hassio/api/panel/hassio-main-latest.html
Normal file
72
hassio/api/panel/hassio-main-latest.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-main-latest.html.gz
Normal file
BIN
hassio/api/panel/hassio-main-latest.html.gz
Normal file
Binary file not shown.
37
hassio/api/panel/index.html
Normal file
37
hassio/api/panel/index.html
Normal file
@@ -0,0 +1,37 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Hass.io</title>
|
||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||
<style>
|
||||
body {
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<hassio-app></hassio-app>
|
||||
<script>
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.head.appendChild(e);
|
||||
}
|
||||
if (!window.parent.HASS_DEV) {
|
||||
addScript('/frontend_es5/custom-elements-es5-adapter.js');
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
</script>
|
||||
<link rel='import' href='./hassio-app.html'>
|
||||
<link rel='import' href='/static/mdi.html' async>
|
||||
</body>
|
||||
</html>
|
BIN
hassio/api/panel/index.html.gz
Normal file
BIN
hassio/api/panel/index.html.gz
Normal file
Binary file not shown.
224
hassio/api/proxy.py
Normal file
224
hassio/api/proxy.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""Utils for HomeAssistant Proxy."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPInternalServerError
|
||||
from aiohttp.hdrs import CONTENT_TYPE
|
||||
import async_timeout
|
||||
|
||||
from ..const import HEADER_HA_ACCESS
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIProxy(CoreSysAttributes):
|
||||
"""API Proxy for Home-Assistant."""
|
||||
|
||||
def _check_access(self, request):
|
||||
"""Check the Hass.io token."""
|
||||
hassio_token = request.headers.get(HEADER_HA_ACCESS)
|
||||
addon = self._addons.from_uuid(hassio_token)
|
||||
|
||||
if not addon:
|
||||
_LOGGER.warning("Unknown Home-Assistant API access!")
|
||||
else:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
|
||||
async def _api_client(self, request, path, timeout=300):
|
||||
"""Return a client request with proxy origin for Home-Assistant."""
|
||||
url = f"{self._homeassistant.api_url}/api/{path}"
|
||||
|
||||
try:
|
||||
data = None
|
||||
headers = {}
|
||||
method = getattr(self._websession_ssl, request.method.lower())
|
||||
params = request.query or None
|
||||
|
||||
# read data
|
||||
with async_timeout.timeout(30, loop=self._loop):
|
||||
data = await request.read()
|
||||
|
||||
if data:
|
||||
headers.update({CONTENT_TYPE: request.content_type})
|
||||
|
||||
# need api password?
|
||||
if self._homeassistant.api_password:
|
||||
headers = {HEADER_HA_ACCESS: self._homeassistant.api_password}
|
||||
|
||||
# reset headers
|
||||
if not headers:
|
||||
headers = None
|
||||
|
||||
client = await method(
|
||||
url, data=data, headers=headers, timeout=timeout,
|
||||
params=params
|
||||
)
|
||||
|
||||
return client
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Client error on API %s request %s.", path, err)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Client timeout error on API request %s.", path)
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def stream(self, request):
|
||||
"""Proxy HomeAssistant EventStream Requests."""
|
||||
self._check_access(request)
|
||||
|
||||
_LOGGER.info("Home-Assistant EventStream start")
|
||||
client = await self._api_client(request, 'stream', timeout=None)
|
||||
|
||||
response = web.StreamResponse()
|
||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||
try:
|
||||
await response.prepare(request)
|
||||
while True:
|
||||
data = await client.content.read(10)
|
||||
if not data:
|
||||
await response.write_eof()
|
||||
break
|
||||
response.write(data)
|
||||
|
||||
except aiohttp.ClientError:
|
||||
await response.write_eof()
|
||||
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
finally:
|
||||
client.close()
|
||||
_LOGGER.info("Home-Assistant EventStream close")
|
||||
|
||||
return response
|
||||
|
||||
async def api(self, request):
|
||||
"""Proxy HomeAssistant API Requests."""
|
||||
self._check_access(request)
|
||||
|
||||
# Normal request
|
||||
path = request.match_info.get('path', '')
|
||||
client = await self._api_client(request, path)
|
||||
|
||||
data = await client.read()
|
||||
return web.Response(
|
||||
body=data,
|
||||
status=client.status,
|
||||
content_type=client.content_type
|
||||
)
|
||||
|
||||
async def _websocket_client(self):
|
||||
"""Initialize a websocket api connection."""
|
||||
url = f"{self._homeassistant.api_url}/api/websocket"
|
||||
|
||||
try:
|
||||
client = await self._websession_ssl.ws_connect(
|
||||
url, heartbeat=60, verify_ssl=False)
|
||||
|
||||
# handle authentication
|
||||
for _ in range(2):
|
||||
data = await client.receive_json()
|
||||
if data.get('type') == 'auth_ok':
|
||||
return client
|
||||
elif data.get('type') == 'auth_required':
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'api_password': self._homeassistant.api_password,
|
||||
})
|
||||
|
||||
_LOGGER.error("Authentication to Home-Assistant websocket")
|
||||
|
||||
except (aiohttp.ClientError, RuntimeError) as err:
|
||||
_LOGGER.error("Client error on websocket API %s.", err)
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def websocket(self, request):
|
||||
"""Initialize a websocket api connection."""
|
||||
_LOGGER.info("Home-Assistant Websocket API request initialze")
|
||||
|
||||
# init server
|
||||
server = web.WebSocketResponse(heartbeat=60)
|
||||
await server.prepare(request)
|
||||
|
||||
# handle authentication
|
||||
try:
|
||||
await server.send_json({
|
||||
'type': 'auth_required',
|
||||
'ha_version': self._homeassistant.version,
|
||||
})
|
||||
|
||||
# Check API access
|
||||
response = await server.receive_json()
|
||||
hassio_token = response.get('api_password')
|
||||
addon = self._addons.from_uuid(hassio_token)
|
||||
|
||||
if not addon:
|
||||
_LOGGER.warning("Unauthorized websocket access!")
|
||||
else:
|
||||
_LOGGER.info("Websocket access from %s", addon.slug)
|
||||
|
||||
await server.send_json({
|
||||
'type': 'auth_ok',
|
||||
'ha_version': self._homeassistant.version,
|
||||
})
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||
raise HTTPInternalServerError() from None
|
||||
|
||||
# init connection to hass
|
||||
client = await self._websocket_client()
|
||||
|
||||
_LOGGER.info("Home-Assistant Websocket API request running")
|
||||
try:
|
||||
client_read = None
|
||||
server_read = None
|
||||
while not server.closed and not client.closed:
|
||||
if not client_read:
|
||||
client_read = asyncio.ensure_future(
|
||||
client.receive_str(), loop=self._loop)
|
||||
if not server_read:
|
||||
server_read = asyncio.ensure_future(
|
||||
server.receive_str(), loop=self._loop)
|
||||
|
||||
# wait until data need to be processed
|
||||
await asyncio.wait(
|
||||
[client_read, server_read],
|
||||
loop=self._loop, return_when=asyncio.FIRST_COMPLETED
|
||||
)
|
||||
|
||||
# server
|
||||
if server_read.done() and not client.closed:
|
||||
server_read.exception()
|
||||
await client.send_str(server_read.result())
|
||||
server_read = None
|
||||
|
||||
# client
|
||||
if client_read.done() and not server.closed:
|
||||
client_read.exception()
|
||||
await server.send_str(client_read.result())
|
||||
client_read = None
|
||||
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
except RuntimeError as err:
|
||||
_LOGGER.info("Home-Assistant Websocket API error: %s", err)
|
||||
|
||||
finally:
|
||||
if client_read:
|
||||
client_read.cancel()
|
||||
if server_read:
|
||||
server_read.cancel()
|
||||
|
||||
# close connections
|
||||
await client.close()
|
||||
await server.close()
|
||||
|
||||
_LOGGER.info("Home-Assistant Websocket API connection is closed")
|
||||
return server
|
@@ -1,102 +1,50 @@
|
||||
"""Init file for HassIO security rest api."""
|
||||
from datetime import datetime, timedelta
|
||||
import io
|
||||
"""Handle security part of this API."""
|
||||
import logging
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
import pyotp
|
||||
import pyqrcode
|
||||
from aiohttp.web import middleware
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
|
||||
from .util import api_process, api_validate, hash_password
|
||||
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
|
||||
from ..const import HEADER_TOKEN, REQUEST_FROM
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_PASSWORD = vol.Schema({
|
||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
|
||||
vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str),
|
||||
})
|
||||
NO_SECURITY_CHECK = set((
|
||||
re.compile(r"^/homeassistant/api/.*$"),
|
||||
re.compile(r"^/homeassistant/websocket$"),
|
||||
re.compile(r"^/supervisor/ping$"),
|
||||
))
|
||||
|
||||
|
||||
class APISecurity(object):
|
||||
"""Handle rest api for security functions."""
|
||||
@middleware
|
||||
async def security_layer(request, handler):
|
||||
"""Check security access of this layer."""
|
||||
coresys = request.app['coresys']
|
||||
hassio_token = request.headers.get(HEADER_TOKEN)
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize security rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
# Ignore security check
|
||||
for rule in NO_SECURITY_CHECK:
|
||||
if rule.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
return await handler(request)
|
||||
|
||||
def _check_password(self, body):
|
||||
"""Check if password is valid and security is initialize."""
|
||||
if not self.config.security_initialize:
|
||||
raise RuntimeError("First set a password")
|
||||
# Need to be removed later
|
||||
if not hassio_token:
|
||||
_LOGGER.warning("Invalid token for access %s", request.path)
|
||||
request[REQUEST_FROM] = 'UNKNOWN'
|
||||
return await handler(request)
|
||||
|
||||
password = hash_password(body[ATTR_PASSWORD])
|
||||
if password != self.config.security_password:
|
||||
raise RuntimeError("Wrong password")
|
||||
# Home-Assistant
|
||||
if hassio_token == coresys.homeassistant.uuid:
|
||||
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
||||
request[REQUEST_FROM] = 'homeassistant'
|
||||
return await handler(request)
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_INITIALIZE: self.config.security_initialize,
|
||||
ATTR_TOTP: self.config.security_totp is not None,
|
||||
}
|
||||
# Add-on
|
||||
addon = coresys.addons.from_uuid(hassio_token)
|
||||
if addon:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
request[REQUEST_FROM] = addon.slug
|
||||
return await handler(request)
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set options / password."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
|
||||
if self.config.security_initialize:
|
||||
raise RuntimeError("Password is already set!")
|
||||
|
||||
self.config.security_password = hash_password(body[ATTR_PASSWORD])
|
||||
self.config.security_initialize = True
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def totp(self, request):
|
||||
"""Set and initialze TOTP."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
self._check_password(body)
|
||||
|
||||
# generate TOTP
|
||||
totp_init_key = pyotp.random_base32()
|
||||
totp = pyotp.TOTP(totp_init_key)
|
||||
|
||||
# init qrcode
|
||||
buff = io.BytesIO()
|
||||
|
||||
qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO"))
|
||||
qrcode.svg(buff)
|
||||
|
||||
# finish
|
||||
self.config.security_totp = totp_init_key
|
||||
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
|
||||
|
||||
@api_process
|
||||
async def session(self, request):
|
||||
"""Set and initialze session."""
|
||||
body = await api_validate(SCHEMA_SESSION, request)
|
||||
self._check_password(body)
|
||||
|
||||
# check TOTP
|
||||
if self.config.security_totp:
|
||||
totp = pyotp.TOTP(self.config.security_totp)
|
||||
if body[ATTR_TOTP] != totp.now():
|
||||
raise RuntimeError("Invalid TOTP token!")
|
||||
|
||||
# create session
|
||||
valid_until = datetime.now() + timedelta(days=1)
|
||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
||||
|
||||
# store session
|
||||
self.config.add_security_session(session, valid_until)
|
||||
return {ATTR_SESSION: session}
|
||||
raise HTTPUnauthorized()
|
||||
|
55
hassio/api/services.py
Normal file
55
hassio/api/services.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_AVAILABLE, ATTR_PROVIDER, ATTR_SLUG, ATTR_SERVICES, REQUEST_FROM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class APIServices(CoreSysAttributes):
|
||||
"""Handle rest api for services functions."""
|
||||
|
||||
def _extract_service(self, request):
|
||||
"""Return service and if not exists trow a exception."""
|
||||
service = self._services.get(request.match_info.get('service'))
|
||||
if not service:
|
||||
raise RuntimeError("Service not exists")
|
||||
|
||||
return service
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
services = []
|
||||
for service in self._services.list_services:
|
||||
services.append({
|
||||
ATTR_SLUG: service.slug,
|
||||
ATTR_AVAILABLE: service.enabled,
|
||||
ATTR_PROVIDER: service.provider,
|
||||
})
|
||||
|
||||
return {ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_service(self, request):
|
||||
"""Write data into a service."""
|
||||
service = self._extract_service(request)
|
||||
body = await api_validate(service.schema, request)
|
||||
|
||||
return service.set_service_data(request[REQUEST_FROM], body)
|
||||
|
||||
@api_process
|
||||
async def get_service(self, request):
|
||||
"""Read data into a service."""
|
||||
service = self._extract_service(request)
|
||||
|
||||
return {
|
||||
ATTR_AVAILABLE: service.enabled,
|
||||
service.slug: service.get_service_data(),
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_service(self, request):
|
||||
"""Delete data into a service."""
|
||||
service = self._extract_service(request)
|
||||
return service.del_service_data(request[REQUEST_FROM])
|
@@ -1,48 +1,56 @@
|
||||
"""Init file for HassIO snapshot rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||
ATTR_DEVICES, ATTR_SNAPSHOTS)
|
||||
ATTR_SNAPSHOTS, ATTR_PASSWORD, ATTR_PROTECTED, CONTENT_TYPE_TAR)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS):
|
||||
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
})
|
||||
|
||||
SCHEMA_RESTORE_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS):
|
||||
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
})
|
||||
|
||||
|
||||
class APISnapshots(object):
|
||||
class APISnapshots(CoreSysAttributes):
|
||||
"""Handle rest api for snapshot functions."""
|
||||
|
||||
def __init__(self, config, loop, snapshots):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.snapshots = snapshots
|
||||
|
||||
def _extract_snapshot(self, request):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
|
||||
snapshot = self._snapshots.get(request.match_info.get('snapshot'))
|
||||
if not snapshot:
|
||||
raise RuntimeError("Snapshot not exists")
|
||||
return snapshot
|
||||
@@ -51,11 +59,13 @@ class APISnapshots(object):
|
||||
async def list(self, request):
|
||||
"""Return snapshot list."""
|
||||
data_snapshots = []
|
||||
for snapshot in self.snapshots.list_snapshots:
|
||||
for snapshot in self._snapshots.list_snapshots:
|
||||
data_snapshots.append({
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_TYPE: snapshot.sys_type,
|
||||
ATTR_PROTECTED: snapshot.protected,
|
||||
})
|
||||
|
||||
return {
|
||||
@@ -65,7 +75,7 @@ class APISnapshots(object):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload snapshot list."""
|
||||
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
|
||||
await asyncio.shield(self._snapshots.reload(), loop=self._loop)
|
||||
return True
|
||||
|
||||
@api_process
|
||||
@@ -79,6 +89,7 @@ class APISnapshots(object):
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
ATTR_SIZE: addon_data[ATTR_SIZE],
|
||||
})
|
||||
|
||||
return {
|
||||
@@ -87,10 +98,8 @@ class APISnapshots(object):
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_SIZE: snapshot.size,
|
||||
ATTR_HOMEASSISTANT: {
|
||||
ATTR_VERSION: snapshot.homeassistant_version,
|
||||
ATTR_DEVICES: snapshot.homeassistant_devices,
|
||||
},
|
||||
ATTR_PROTECTED: snapshot.protected,
|
||||
ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: snapshot.repositories,
|
||||
ATTR_FOLDERS: snapshot.folders,
|
||||
@@ -100,36 +109,82 @@ class APISnapshots(object):
|
||||
async def snapshot_full(self, request):
|
||||
"""Full-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_full(**body), loop=self.loop)
|
||||
snapshot = await asyncio.shield(
|
||||
self._snapshots.do_snapshot_full(**body), loop=self._loop)
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
async def snapshot_partial(self, request):
|
||||
"""Partial-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||
snapshot = await asyncio.shield(
|
||||
self._snapshots.do_snapshot_partial(**body), loop=self._loop)
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
def restore_full(self, request):
|
||||
async def restore_full(self, request):
|
||||
"""Full-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return asyncio.shield(
|
||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self._snapshots.do_restore_full(snapshot, **body),
|
||||
loop=self._loop
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self.loop
|
||||
self._snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self._loop
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return self.snapshots.remove(snapshot)
|
||||
return self._snapshots.remove(snapshot)
|
||||
|
||||
async def download(self, request):
|
||||
"""Download a snapshot file."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
|
||||
_LOGGER.info("Download snapshot %s", snapshot.slug)
|
||||
response = web.FileResponse(snapshot.tarfile)
|
||||
response.content_type = CONTENT_TYPE_TAR
|
||||
return response
|
||||
|
||||
@api_process
|
||||
async def upload(self, request):
|
||||
"""Upload a snapshot file."""
|
||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp_dir:
|
||||
tar_file = Path(temp_dir, f"snapshot.tar")
|
||||
|
||||
try:
|
||||
with tar_file.open('wb') as snapshot:
|
||||
async for data in request.content.iter_any():
|
||||
snapshot.write(data)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write new snapshot file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
snapshot = await asyncio.shield(
|
||||
self._snapshots.import_snapshot(tar_file), loop=self._loop)
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
@@ -4,21 +4,25 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, CONTENT_TYPE_BINARY)
|
||||
from ..validate import validate_timezone
|
||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -26,20 +30,9 @@ SCHEMA_VERSION = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class APISupervisor(object):
|
||||
class APISupervisor(CoreSysAttributes):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||
host_control, updater):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.supervisor = supervisor
|
||||
self.addons = addons
|
||||
self.snapshots = snapshots
|
||||
self.host_control = host_control
|
||||
self.updater = updater
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
"""Return ok for signal that the api is ready."""
|
||||
@@ -49,7 +42,7 @@ class APISupervisor(object):
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
for addon in self._addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -59,17 +52,19 @@ class APISupervisor(object):
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.updater.version_hassio,
|
||||
ATTR_BETA_CHANNEL: self.updater.beta_channel,
|
||||
ATTR_ARCH: self.config.arch,
|
||||
ATTR_TIMEZONE: self.config.timezone,
|
||||
ATTR_LAST_VERSION: self._updater.version_hassio,
|
||||
ATTR_BETA_CHANNEL: self._updater.beta_channel,
|
||||
ATTR_ARCH: self._arch,
|
||||
ATTR_WAIT_BOOT: self._config.wait_boot,
|
||||
ATTR_TIMEZONE: self._config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
||||
ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -78,40 +73,59 @@ class APISupervisor(object):
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||
self._updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.config.timezone = body[ATTR_TIMEZONE]
|
||||
self._config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self._config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.addons.load_repositories(new))
|
||||
await asyncio.shield(self._addons.load_repositories(new))
|
||||
|
||||
self._updater.save_data()
|
||||
self._config.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self._supervisor.stats()
|
||||
if not stats:
|
||||
raise RuntimeError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.updater.version_hassio)
|
||||
version = body.get(ATTR_VERSION, self._updater.version_hassio)
|
||||
|
||||
if version == self.supervisor.version:
|
||||
if version == self._supervisor.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.supervisor.update(version), loop=self.loop)
|
||||
self._supervisor.update(version), loop=self._loop)
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload addons, config ect."""
|
||||
tasks = [
|
||||
self.addons.reload(),
|
||||
self.snapshots.reload(),
|
||||
self.updater.fetch_data(),
|
||||
self.host_control.load()
|
||||
self._updater.reload(),
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
||||
asyncio.wait(tasks, loop=self._loop), loop=self._loop)
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
@@ -122,4 +136,4 @@ class APISupervisor(object):
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return supervisor docker logs."""
|
||||
return self.supervisor.logs()
|
||||
return self._supervisor.logs()
|
||||
|
@@ -17,10 +17,12 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def json_loads(data):
|
||||
"""Extract json from string with support for '' and None."""
|
||||
if not data:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
raise RuntimeError("Invalid json")
|
||||
|
||||
|
||||
def api_process(method):
|
||||
@@ -47,7 +49,8 @@ def api_process_hostcontrol(method):
|
||||
"""Wrap HostControl calls to rest api."""
|
||||
async def wrap_hostcontrol(api, *args, **kwargs):
|
||||
"""Return host information."""
|
||||
if not api.host_control.active:
|
||||
# pylint: disable=protected-access
|
||||
if not api._host_control.active:
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
try:
|
@@ -2,19 +2,48 @@
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .const import SOCKET_DOCKER
|
||||
from .config import CoreConfig
|
||||
from .coresys import CoreSys
|
||||
from .supervisor import Supervisor
|
||||
from .homeassistant import HomeAssistant
|
||||
from .snapshots import SnapshotManager
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
from .services import ServiceManager
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_system_data():
|
||||
def initialize_coresys(loop):
|
||||
"""Initialize HassIO coresys/objects."""
|
||||
coresys = CoreSys(loop)
|
||||
|
||||
# Initialize core objects
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
coresys.homeassistant = HomeAssistant(coresys)
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
|
||||
# bootstrap config
|
||||
initialize_system_data(coresys)
|
||||
|
||||
return coresys
|
||||
|
||||
|
||||
def initialize_system_data(coresys):
|
||||
"""Setup default config and create folders."""
|
||||
config = CoreConfig()
|
||||
config = coresys.config
|
||||
|
||||
# homeassistant config folder
|
||||
if not config.path_config.is_dir():
|
||||
@@ -61,8 +90,9 @@ def initialize_system_data():
|
||||
return config
|
||||
|
||||
|
||||
def migrate_system_env(config):
|
||||
def migrate_system_env(coresys):
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
@@ -100,6 +130,7 @@ def initialize_logging():
|
||||
|
||||
def check_environment():
|
||||
"""Check if all environment are exists."""
|
||||
# check environment variables
|
||||
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
||||
'HOMEASSISTANT_REPOSITORY'):
|
||||
try:
|
||||
@@ -108,29 +139,35 @@ def check_environment():
|
||||
_LOGGER.fatal("Can't find %s in env!", key)
|
||||
return False
|
||||
|
||||
# check docker socket
|
||||
if not SOCKET_DOCKER.is_socket():
|
||||
_LOGGER.fatal("Can't find docker socket!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which('socat'):
|
||||
_LOGGER.fatal("Can0t find socat program!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def reg_signal(loop, hassio):
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM, SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
||||
signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGHUP, lambda: loop.create_task(hassio.stop()))
|
||||
signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGINT, lambda: loop.create_task(hassio.stop()))
|
||||
signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
106
hassio/config.py
106
hassio/config.py
@@ -5,16 +5,14 @@ import os
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
||||
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_API_ENDPOINT,
|
||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT)
|
||||
from .tools import JsonConfig
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATETIME_FORMAT = "%Y%m%d %H:%M:%S"
|
||||
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
@@ -28,6 +26,8 @@ BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
|
||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||
|
||||
|
||||
class CoreConfig(JsonConfig):
|
||||
"""Hold all core config data."""
|
||||
@@ -35,17 +35,6 @@ class CoreConfig(JsonConfig):
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||
self.arch = None
|
||||
|
||||
@property
|
||||
def api_endpoint(self):
|
||||
"""Return IP address of api endpoint."""
|
||||
return self._data[ATTR_API_ENDPOINT]
|
||||
|
||||
@api_endpoint.setter
|
||||
def api_endpoint(self, value):
|
||||
"""Store IP address of api endpoint."""
|
||||
self._data[ATTR_API_ENDPOINT] = value
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
@@ -56,7 +45,31 @@ class CoreConfig(JsonConfig):
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
"""Return wait time for auto boot stages."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
"""Return last boot datetime."""
|
||||
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
||||
|
||||
boot_time = parse_datetime(boot_str)
|
||||
if not boot_time:
|
||||
return datetime.utcfromtimestamp(1)
|
||||
return boot_time
|
||||
|
||||
@last_boot.setter
|
||||
def last_boot(self, value):
|
||||
"""Set last boot datetime."""
|
||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||
|
||||
@property
|
||||
def path_hassio(self):
|
||||
@@ -154,7 +167,6 @@ class CoreConfig(JsonConfig):
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||
self.save()
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
@@ -162,60 +174,6 @@ class CoreConfig(JsonConfig):
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_initialize(self):
|
||||
"""Return is security was initialize."""
|
||||
return self._data[ATTR_SECURITY]
|
||||
|
||||
@security_initialize.setter
|
||||
def security_initialize(self, value):
|
||||
"""Set is security initialize."""
|
||||
self._data[ATTR_SECURITY] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_totp(self):
|
||||
"""Return the TOTP key."""
|
||||
return self._data.get(ATTR_TOTP)
|
||||
|
||||
@security_totp.setter
|
||||
def security_totp(self, value):
|
||||
"""Set the TOTP key."""
|
||||
self._data[ATTR_TOTP] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_password(self):
|
||||
"""Return the password key."""
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@security_password.setter
|
||||
def security_password(self, value):
|
||||
"""Set the password key."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_sessions(self):
|
||||
"""Return api sessions."""
|
||||
return {
|
||||
session: datetime.strptime(until, DATETIME_FORMAT) for
|
||||
session, until in self._data[ATTR_SESSIONS].items()
|
||||
}
|
||||
|
||||
def add_security_session(self, session, valid):
|
||||
"""Set the a new session."""
|
||||
self._data[ATTR_SESSIONS].update(
|
||||
{session: valid.strftime(DATETIME_FORMAT)}
|
||||
)
|
||||
self.save()
|
||||
|
||||
def drop_security_session(self, session):
|
||||
"""Delete the a session."""
|
||||
self._data[ATTR_SESSIONS].pop(session, None)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
@@ -226,7 +184,6 @@ class CoreConfig(JsonConfig):
|
||||
def audio_output(self, value):
|
||||
"""Set ALSA audio output card,dev."""
|
||||
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
@@ -237,4 +194,3 @@ class CoreConfig(JsonConfig):
|
||||
def audio_input(self, value):
|
||||
"""Set ALSA audio input card,dev."""
|
||||
self._data[ATTR_AUDIO_INPUT] = value
|
||||
self.save()
|
||||
|
@@ -1,7 +1,8 @@
|
||||
"""Const file for HassIO."""
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = '0.57'
|
||||
HASSIO_VERSION = '0.94'
|
||||
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/{}/version.json')
|
||||
@@ -10,24 +11,19 @@ URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
||||
RUN_WATCHDOG_HOMEASSISTANT = 15
|
||||
RUN_CLEANUP_API_SESSIONS = 900
|
||||
|
||||
RESTART_EXIT_CODE = 100
|
||||
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||
|
||||
DOCKER_NETWORK = 'hassio'
|
||||
DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23')
|
||||
DOCKER_NETWORK_RANGE = ip_network('172.30.33.0/24')
|
||||
|
||||
LABEL_VERSION = 'io.hass.version'
|
||||
LABEL_ARCH = 'io.hass.arch'
|
||||
LABEL_TYPE = 'io.hass.type'
|
||||
@@ -45,17 +41,34 @@ RESULT_OK = 'ok'
|
||||
|
||||
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||
CONTENT_TYPE_PNG = 'image/png'
|
||||
CONTENT_TYPE_JSON = 'application/json'
|
||||
CONTENT_TYPE_TEXT = 'text/plain'
|
||||
CONTENT_TYPE_TAR = 'application/tar'
|
||||
HEADER_HA_ACCESS = 'x-ha-access'
|
||||
HEADER_TOKEN = 'X-HASSIO-KEY'
|
||||
|
||||
ENV_TOKEN = 'HASSIO_TOKEN'
|
||||
ENV_TIME = 'TZ'
|
||||
|
||||
REQUEST_FROM = 'HASSIO_FROM'
|
||||
|
||||
ATTR_WAIT_BOOT = 'wait_boot'
|
||||
ATTR_WATCHDOG = 'watchdog'
|
||||
ATTR_CHANGELOG = 'changelog'
|
||||
ATTR_DATE = 'date'
|
||||
ATTR_ARCH = 'arch'
|
||||
ATTR_LONG_DESCRIPTION = 'long_description'
|
||||
ATTR_HOSTNAME = 'hostname'
|
||||
ATTR_TIMEZONE = 'timezone'
|
||||
ATTR_ARGS = 'args'
|
||||
ATTR_OS = 'os'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_SOURCE = 'source'
|
||||
ATTR_FEATURES = 'features'
|
||||
ATTR_ADDONS = 'addons'
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_AUTO_UART = 'auto_uart'
|
||||
ATTR_LAST_BOOT = 'last_boot'
|
||||
ATTR_LAST_VERSION = 'last_version'
|
||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||
ATTR_NAME = 'name'
|
||||
@@ -64,6 +77,8 @@ ATTR_DESCRIPTON = 'description'
|
||||
ATTR_STARTUP = 'startup'
|
||||
ATTR_BOOT = 'boot'
|
||||
ATTR_PORTS = 'ports'
|
||||
ATTR_PORT = 'port'
|
||||
ATTR_SSL = 'ssl'
|
||||
ATTR_MAP = 'map'
|
||||
ATTR_WEBUI = 'webui'
|
||||
ATTR_OPTIONS = 'options'
|
||||
@@ -72,7 +87,9 @@ ATTR_DETACHED = 'detached'
|
||||
ATTR_STATE = 'state'
|
||||
ATTR_SCHEMA = 'schema'
|
||||
ATTR_IMAGE = 'image'
|
||||
ATTR_ICON = 'icon'
|
||||
ATTR_LOGO = 'logo'
|
||||
ATTR_STDIN = 'stdin'
|
||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||
ATTR_REPOSITORY = 'repository'
|
||||
ATTR_REPOSITORIES = 'repositories'
|
||||
@@ -88,6 +105,8 @@ ATTR_BUILD = 'build'
|
||||
ATTR_DEVICES = 'devices'
|
||||
ATTR_ENVIRONMENT = 'environment'
|
||||
ATTR_HOST_NETWORK = 'host_network'
|
||||
ATTR_HOST_IPC = 'host_ipc'
|
||||
ATTR_HOST_DBUS = 'host_dbus'
|
||||
ATTR_NETWORK = 'network'
|
||||
ATTR_TMPFS = 'tmpfs'
|
||||
ATTR_PRIVILEGED = 'privileged'
|
||||
@@ -97,6 +116,8 @@ ATTR_SNAPSHOTS = 'snapshots'
|
||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||
ATTR_HASSIO = 'hassio'
|
||||
ATTR_HASSIO_API = 'hassio_api'
|
||||
ATTR_HOMEASSISTANT_API = 'homeassistant_api'
|
||||
ATTR_UUID = 'uuid'
|
||||
ATTR_FOLDERS = 'folders'
|
||||
ATTR_SIZE = 'size'
|
||||
ATTR_TYPE = 'type'
|
||||
@@ -111,8 +132,34 @@ ATTR_OUTPUT = 'output'
|
||||
ATTR_DISK = 'disk'
|
||||
ATTR_SERIAL = 'serial'
|
||||
ATTR_SECURITY = 'security'
|
||||
ATTR_API_ENDPOINT = 'api_endpoint'
|
||||
ATTR_BUILD_FROM = 'build_from'
|
||||
ATTR_SQUASH = 'squash'
|
||||
ATTR_GPIO = 'gpio'
|
||||
ATTR_LEGACY = 'legacy'
|
||||
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
ATTR_CPU_PERCENT = 'cpu_percent'
|
||||
ATTR_NETWORK_RX = 'network_rx'
|
||||
ATTR_NETWORK_TX = 'network_tx'
|
||||
ATTR_MEMORY_LIMIT = 'memory_limit'
|
||||
ATTR_MEMORY_USAGE = 'memory_usage'
|
||||
ATTR_BLK_READ = 'blk_read'
|
||||
ATTR_BLK_WRITE = 'blk_write'
|
||||
ATTR_PROVIDER = 'provider'
|
||||
ATTR_AVAILABLE = 'available'
|
||||
ATTR_HOST = 'host'
|
||||
ATTR_USERNAME = 'username'
|
||||
ATTR_PROTOCOL = 'protocol'
|
||||
ATTR_DISCOVERY = 'discovery'
|
||||
ATTR_PLATFORM = 'platform'
|
||||
ATTR_COMPONENT = 'component'
|
||||
ATTR_CONFIG = 'config'
|
||||
ATTR_DISCOVERY_ID = 'discovery_id'
|
||||
ATTR_SERVICES = 'services'
|
||||
ATTR_DISCOVERY = 'discovery'
|
||||
ATTR_PROTECTED = 'protected'
|
||||
ATTR_CRYPTO = 'crypto'
|
||||
|
||||
SERVICE_MQTT = 'mqtt'
|
||||
|
||||
STARTUP_INITIALIZE = 'initialize'
|
||||
STARTUP_SYSTEM = 'system'
|
||||
@@ -148,3 +195,5 @@ FOLDER_SSL = 'ssl'
|
||||
|
||||
SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
|
||||
CRYPTO_AES128 = 'aes128'
|
||||
|
184
hassio/core.py
184
hassio/core.py
@@ -2,178 +2,116 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
import docker
|
||||
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .host_control import HostControl
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
||||
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
||||
RUN_UPDATE_ADDONS_TASKS)
|
||||
from .hardware import Hardware
|
||||
from .homeassistant import HomeAssistant
|
||||
from .scheduler import Scheduler
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .snapshots import SnapshotsManager
|
||||
from .updater import Updater
|
||||
from .tasks import (
|
||||
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
||||
from .tools import get_local_ip, fetch_timezone
|
||||
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
|
||||
from .utils.dt import fetch_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassIO(object):
|
||||
class HassIO(CoreSysAttributes):
|
||||
"""Main object of hassio."""
|
||||
|
||||
def __init__(self, loop, config):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize hassio object."""
|
||||
self.exit_code = 0
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.websession = aiohttp.ClientSession(loop=loop)
|
||||
self.updater = Updater(config, loop, self.websession)
|
||||
self.scheduler = Scheduler(loop)
|
||||
self.api = RestAPI(config, loop)
|
||||
self.hardware = Hardware()
|
||||
self.dock = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
|
||||
|
||||
# init homeassistant
|
||||
self.homeassistant = HomeAssistant(
|
||||
config, loop, self.dock, self.updater)
|
||||
|
||||
# init HostControl
|
||||
self.host_control = HostControl(loop)
|
||||
|
||||
# init addon system
|
||||
self.addons = AddonManager(config, loop, self.dock)
|
||||
|
||||
# init snapshot system
|
||||
self.snapshots = SnapshotsManager(
|
||||
config, loop, self.scheduler, self.addons, self.homeassistant)
|
||||
self.coresys = coresys
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
if not await self.supervisor.attach():
|
||||
_LOGGER.fatal("Can't attach to supervisor docker container!")
|
||||
await self.supervisor.cleanup()
|
||||
|
||||
# set running arch
|
||||
self.config.arch = self.supervisor.arch
|
||||
|
||||
# set api endpoint
|
||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
||||
|
||||
# update timezone
|
||||
if self.config.timezone == 'UTC':
|
||||
self.config.timezone = await fetch_timezone(self.websession)
|
||||
if self._config.timezone == 'UTC':
|
||||
self._config.timezone = await fetch_timezone(self._websession)
|
||||
|
||||
# supervisor
|
||||
await self._supervisor.load()
|
||||
|
||||
# hostcontrol
|
||||
await self.host_control.load()
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
||||
|
||||
# rest api views
|
||||
self.api.register_host(self.host_control, self.hardware)
|
||||
self.api.register_network(self.host_control)
|
||||
self.api.register_supervisor(
|
||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||
self.updater)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
self.api.register_security()
|
||||
self.api.register_snapshots(self.snapshots)
|
||||
self.api.register_panel()
|
||||
|
||||
# schedule api session cleanup
|
||||
self.scheduler.register_task(
|
||||
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
|
||||
now=True)
|
||||
await self._host_control.load()
|
||||
|
||||
# Load homeassistant
|
||||
await self.homeassistant.prepare()
|
||||
await self._homeassistant.load()
|
||||
|
||||
# Load addons
|
||||
await self.addons.prepare()
|
||||
await self._addons.load()
|
||||
|
||||
# schedule addon update task
|
||||
self.scheduler.register_task(
|
||||
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
|
||||
self.scheduler.register_task(
|
||||
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
|
||||
# rest api views
|
||||
await self._api.load()
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
hassio_update(self.supervisor, self.updater),
|
||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
# load last available data
|
||||
await self._updater.load()
|
||||
|
||||
# schedule snapshot update tasks
|
||||
self.scheduler.register_task(
|
||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
||||
# load last available data
|
||||
await self._snapshots.load()
|
||||
|
||||
# load services
|
||||
await self._services.load()
|
||||
|
||||
# start dns forwarding
|
||||
self._loop.create_task(self._dns.start())
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
||||
await self._addons.auto_boot(STARTUP_INITIALIZE)
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
await asyncio.wait(
|
||||
[hassio_update(self.supervisor, self.updater)()],
|
||||
loop=self.loop
|
||||
)
|
||||
if not self._updater.beta_channel and self._supervisor.need_update:
|
||||
if await self._supervisor.update():
|
||||
return
|
||||
else:
|
||||
_LOGGER.info("Ignore Hass.io auto updates on beta mode")
|
||||
|
||||
# start api
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
||||
await self._api.start()
|
||||
_LOGGER.info("Start API on %s", self._docker.network.supervisor)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
if self._hardware.last_boot == self._config.last_boot:
|
||||
_LOGGER.info("Hass.io reboot detected")
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self._services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||
await self._addons.auto_boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self.addons.auto_boot(STARTUP_SERVICES)
|
||||
await self._addons.auto_boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
if self._homeassistant.boot:
|
||||
await self._homeassistant.start()
|
||||
|
||||
# start addon mark as application
|
||||
await self.addons.auto_boot(STARTUP_APPLICATION)
|
||||
await self._addons.auto_boot(STARTUP_APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self._config.last_boot = self._hardware.last_boot
|
||||
self._config.save_data()
|
||||
|
||||
finally:
|
||||
# schedule homeassistant watchdog
|
||||
self.scheduler.register_task(
|
||||
homeassistant_watchdog(self.loop, self.homeassistant),
|
||||
RUN_WATCHDOG_HOMEASSISTANT)
|
||||
# Add core tasks into scheduler
|
||||
await self._tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.homeassistant.version == 'landingpage':
|
||||
self.loop.create_task(self.homeassistant.install())
|
||||
if self._homeassistant.version == 'landingpage':
|
||||
self._loop.create_task(self._homeassistant.install())
|
||||
|
||||
async def stop(self, exit_code=0):
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.suspend = True
|
||||
self._scheduler.suspend = True
|
||||
|
||||
# process stop tasks
|
||||
self.websession.close()
|
||||
await self.api.stop()
|
||||
self._websession.close()
|
||||
self._websession_ssl.close()
|
||||
|
||||
self.exit_code = exit_code
|
||||
self.loop.stop()
|
||||
# process async stop tasks
|
||||
await asyncio.wait(
|
||||
[self._api.stop(), self._dns.stop()], loop=self._loop)
|
||||
|
203
hassio/coresys.py
Normal file
203
hassio/coresys.py
Normal file
@@ -0,0 +1,203 @@
|
||||
"""Handle core shared data."""
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .config import CoreConfig
|
||||
from .docker import DockerAPI
|
||||
from .misc.dns import DNSForward
|
||||
from .misc.hardware import Hardware
|
||||
from .misc.host_control import HostControl
|
||||
from .misc.scheduler import Scheduler
|
||||
|
||||
|
||||
class CoreSys(object):
|
||||
"""Class that handle all shared data."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize coresys."""
|
||||
# Static attributes
|
||||
self.exit_code = 0
|
||||
|
||||
# External objects
|
||||
self._loop = loop
|
||||
self._websession = aiohttp.ClientSession(loop=loop)
|
||||
self._websession_ssl = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
|
||||
|
||||
# Global objects
|
||||
self._config = CoreConfig()
|
||||
self._hardware = Hardware()
|
||||
self._docker = DockerAPI()
|
||||
self._scheduler = Scheduler(loop=loop)
|
||||
self._dns = DNSForward(loop=loop)
|
||||
self._host_control = HostControl(loop=loop)
|
||||
|
||||
# Internal objects pointers
|
||||
self._homeassistant = None
|
||||
self._supervisor = None
|
||||
self._addons = None
|
||||
self._api = None
|
||||
self._updater = None
|
||||
self._snapshots = None
|
||||
self._tasks = None
|
||||
self._services = None
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return running arch of hass.io system."""
|
||||
if self._supervisor:
|
||||
return self._supervisor.arch
|
||||
return None
|
||||
|
||||
@property
|
||||
def loop(self):
|
||||
"""Return loop object."""
|
||||
return self._loop
|
||||
|
||||
@property
|
||||
def websession(self):
|
||||
"""Return websession object."""
|
||||
return self._websession
|
||||
|
||||
@property
|
||||
def websession_ssl(self):
|
||||
"""Return websession object with disabled SSL."""
|
||||
return self._websession_ssl
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
"""Return CoreConfig object."""
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def hardware(self):
|
||||
"""Return Hardware object."""
|
||||
return self._hardware
|
||||
|
||||
@property
|
||||
def docker(self):
|
||||
"""Return DockerAPI object."""
|
||||
return self._docker
|
||||
|
||||
@property
|
||||
def scheduler(self):
|
||||
"""Return Scheduler object."""
|
||||
return self._scheduler
|
||||
|
||||
@property
|
||||
def dns(self):
|
||||
"""Return DNSForward object."""
|
||||
return self._dns
|
||||
|
||||
@property
|
||||
def host_control(self):
|
||||
"""Return HostControl object."""
|
||||
return self._host_control
|
||||
|
||||
@property
|
||||
def homeassistant(self):
|
||||
"""Return HomeAssistant object."""
|
||||
return self._homeassistant
|
||||
|
||||
@homeassistant.setter
|
||||
def homeassistant(self, value):
|
||||
"""Set a HomeAssistant object."""
|
||||
if self._homeassistant:
|
||||
raise RuntimeError("HomeAssistant already set!")
|
||||
self._homeassistant = value
|
||||
|
||||
@property
|
||||
def supervisor(self):
|
||||
"""Return Supervisor object."""
|
||||
return self._supervisor
|
||||
|
||||
@supervisor.setter
|
||||
def supervisor(self, value):
|
||||
"""Set a Supervisor object."""
|
||||
if self._supervisor:
|
||||
raise RuntimeError("Supervisor already set!")
|
||||
self._supervisor = value
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
"""Return API object."""
|
||||
return self._api
|
||||
|
||||
@api.setter
|
||||
def api(self, value):
|
||||
"""Set a API object."""
|
||||
if self._api:
|
||||
raise RuntimeError("API already set!")
|
||||
self._api = value
|
||||
|
||||
@property
|
||||
def updater(self):
|
||||
"""Return Updater object."""
|
||||
return self._updater
|
||||
|
||||
@updater.setter
|
||||
def updater(self, value):
|
||||
"""Set a Updater object."""
|
||||
if self._updater:
|
||||
raise RuntimeError("Updater already set!")
|
||||
self._updater = value
|
||||
|
||||
@property
|
||||
def addons(self):
|
||||
"""Return AddonManager object."""
|
||||
return self._addons
|
||||
|
||||
@addons.setter
|
||||
def addons(self, value):
|
||||
"""Set a AddonManager object."""
|
||||
if self._addons:
|
||||
raise RuntimeError("AddonManager already set!")
|
||||
self._addons = value
|
||||
|
||||
@property
|
||||
def snapshots(self):
|
||||
"""Return SnapshotManager object."""
|
||||
return self._snapshots
|
||||
|
||||
@snapshots.setter
|
||||
def snapshots(self, value):
|
||||
"""Set a SnapshotManager object."""
|
||||
if self._snapshots:
|
||||
raise RuntimeError("SnapshotsManager already set!")
|
||||
self._snapshots = value
|
||||
|
||||
@property
|
||||
def tasks(self):
|
||||
"""Return Tasks object."""
|
||||
return self._tasks
|
||||
|
||||
@tasks.setter
|
||||
def tasks(self, value):
|
||||
"""Set a Tasks object."""
|
||||
if self._tasks:
|
||||
raise RuntimeError("Tasks already set!")
|
||||
self._tasks = value
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
"""Return ServiceManager object."""
|
||||
return self._services
|
||||
|
||||
@services.setter
|
||||
def services(self, value):
|
||||
"""Set a ServiceManager object."""
|
||||
if self._services:
|
||||
raise RuntimeError("Services already set!")
|
||||
self._services = value
|
||||
|
||||
|
||||
class CoreSysAttributes(object):
|
||||
"""Inheret basic CoreSysAttributes."""
|
||||
|
||||
coresys = None
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Mapping to coresys."""
|
||||
if hasattr(self.coresys, name[1:]):
|
||||
return getattr(self.coresys, name[1:])
|
||||
raise AttributeError(f"Can't find {name} on {self.__class__}")
|
@@ -1,280 +0,0 @@
|
||||
"""Init file for HassIO addon docker object."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from . import DockerBase
|
||||
from .util import dockerfile_template, docker_process
|
||||
from ..const import (
|
||||
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
|
||||
|
||||
class DockerAddon(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, addon):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(
|
||||
config, loop, dock, image=addon.image, timeout=addon.timeout)
|
||||
self.addon = addon
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of addon."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
if self.addon.with_audio:
|
||||
addon_env.update({
|
||||
'ALSA_OUTPUT': self.addon.audio_output,
|
||||
'ALSA_INPUT': self.addon.audio_input,
|
||||
})
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
'TZ': self.config.timezone,
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# use audio devices
|
||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Return None if no devices is present
|
||||
if devices:
|
||||
return devices
|
||||
return None
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": "{}".format(options)}
|
||||
return None
|
||||
|
||||
@property
|
||||
def mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
if not self.addon.use_hassio_api:
|
||||
return None
|
||||
|
||||
return {
|
||||
'hassio': self.config.api_endpoint,
|
||||
}
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
}}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_config): {
|
||||
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_ssl): {
|
||||
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_addons_local): {
|
||||
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_backup): {
|
||||
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_share): {
|
||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return True
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
# write config
|
||||
if not self.addon.write_options():
|
||||
return False
|
||||
|
||||
try:
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
network_mode=self.addon.network_mode,
|
||||
ports=self.addon.ports,
|
||||
extra_hosts=self.mapping,
|
||||
devices=self.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs
|
||||
)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start docker addon %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_dir = Path(self.config.path_tmp, self.addon.slug)
|
||||
try:
|
||||
# prepare temporary addon build folder
|
||||
try:
|
||||
source = self.addon.path_location
|
||||
shutil.copytree(str(source), str(build_dir))
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't copy %s to temporary build folder -> %s",
|
||||
source, err)
|
||||
return False
|
||||
|
||||
# prepare Dockerfile
|
||||
try:
|
||||
dockerfile_template(
|
||||
Path(build_dir, 'Dockerfile'), self.config.arch,
|
||||
tag, META_ADDON)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't prepare dockerfile -> %s", err)
|
||||
|
||||
# run docker build
|
||||
try:
|
||||
build_tag = "{}:{}".format(self.image, tag)
|
||||
|
||||
_LOGGER.info("Start build %s on %s", build_tag, build_dir)
|
||||
image = self.dock.images.build(
|
||||
path=str(build_dir), tag=build_tag, pull=True)
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
|
||||
except (docker.errors.DockerException, TypeError) as err:
|
||||
_LOGGER.error("Can't build %s -> %s", build_tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Build %s done", build_tag)
|
||||
return True
|
||||
|
||||
finally:
|
||||
shutil.rmtree(str(build_dir), ignore_errors=True)
|
||||
|
||||
@docker_process
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
return self.loop.run_in_executor(None, self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.dock.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image.stream():
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
return self.loop.run_in_executor(None, self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.dock.api.load_image(read_tar)
|
||||
|
||||
image = self.dock.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Addons prepare some thing on start and that is normaly not repeatable.
|
||||
Need run inside executor.
|
||||
"""
|
||||
self._stop()
|
||||
return self._run()
|
@@ -1,117 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
|
||||
|
||||
class DockerHomeAssistant(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, data):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(config, loop, dock, image=data.image)
|
||||
self.data = data
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into docker."""
|
||||
if not self.data.devices:
|
||||
return
|
||||
|
||||
devices = []
|
||||
for device in self.data.devices:
|
||||
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
|
||||
|
||||
return devices
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
try:
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self.config.api_endpoint,
|
||||
'TZ': self.config.timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
}
|
||||
)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start homeassistant %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
_LOGGER.info("Run command '%s' on %s", command, self.image)
|
||||
try:
|
||||
container = self.dock.containers.run(
|
||||
self.image,
|
||||
command=command,
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={
|
||||
'TZ': self.config.timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'ro'},
|
||||
str(self.config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
}
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
exit_code = container.wait()
|
||||
output = container.logs()
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command -> %s", err)
|
||||
return (None, b"")
|
||||
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return (exit_code, output)
|
@@ -1,54 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
from . import DockerBase
|
||||
from .util import docker_process
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, stop_callback, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config, loop, dock, image=image)
|
||||
self.stop_callback = stop_callback
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
@docker_process
|
||||
async def update(self, tag):
|
||||
"""Update a supervisor docker image."""
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def remove(self):
|
||||
"""Remove docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def restart(self):
|
||||
"""Restart docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
@@ -1,60 +0,0 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import logging
|
||||
import re
|
||||
|
||||
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASSIO_BASE_IMAGE = {
|
||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
||||
ARCH_I386: "homeassistant/i386-base:latest",
|
||||
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
||||
}
|
||||
|
||||
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
|
||||
|
||||
|
||||
def dockerfile_template(dockerfile, arch, version, meta_type):
|
||||
"""Prepare a Hass.IO dockerfile."""
|
||||
buff = []
|
||||
hassio_image = HASSIO_BASE_IMAGE[arch]
|
||||
custom_image = re.compile(r"^#{}:FROM".format(arch))
|
||||
|
||||
# read docker
|
||||
with dockerfile.open('r') as dock_input:
|
||||
for line in dock_input:
|
||||
line = TMPL_IMAGE.sub(hassio_image, line)
|
||||
line = custom_image.sub("FROM", line)
|
||||
buff.append(line)
|
||||
|
||||
# add metadata
|
||||
buff.append(create_metadata(version, arch, meta_type))
|
||||
|
||||
# write docker
|
||||
with dockerfile.open('w') as dock_output:
|
||||
dock_output.writelines(buff)
|
||||
|
||||
|
||||
def create_metadata(version, arch, meta_type):
|
||||
"""Generate docker label layer for hassio."""
|
||||
return ('LABEL io.hass.version="{}" '
|
||||
'io.hass.arch="{}" '
|
||||
'io.hass.type="{}"').format(version, arch, meta_type)
|
||||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def docker_process(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api._lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api._lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
114
hassio/docker/__init__.py
Normal file
114
hassio/docker/__init__.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
from contextlib import suppress
|
||||
from collections import namedtuple
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .network import DockerNetwork
|
||||
from ..const import SOCKET_DOCKER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CommandReturn = namedtuple('CommandReturn', ['exit_code', 'output'])
|
||||
|
||||
|
||||
class DockerAPI(object):
|
||||
"""Docker hassio wrapper.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.docker = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
|
||||
version='auto', timeout=300)
|
||||
self.network = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
def images(self):
|
||||
"""Return api images."""
|
||||
return self.docker.images
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
"""Return api containers."""
|
||||
return self.docker.containers
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
"""Return api containers."""
|
||||
return self.docker.api
|
||||
|
||||
def run(self, image, **kwargs):
|
||||
""""Create a docker and run it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
name = kwargs.get('name', image)
|
||||
network_mode = kwargs.get('network_mode')
|
||||
hostname = kwargs.get('hostname')
|
||||
|
||||
# setup network
|
||||
kwargs['dns_search'] = ["."]
|
||||
if network_mode:
|
||||
kwargs['dns'] = [str(self.network.supervisor)]
|
||||
kwargs['dns_opt'] = ["ndots:0"]
|
||||
else:
|
||||
kwargs['network'] = None
|
||||
|
||||
# create container
|
||||
try:
|
||||
container = self.docker.containers.create(image, **kwargs)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
return False
|
||||
|
||||
# attach network
|
||||
if not network_mode:
|
||||
alias = [hostname] if hostname else None
|
||||
if self.network.attach_container(container, alias=alias):
|
||||
self.network.detach_default_bridge(container)
|
||||
else:
|
||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||
|
||||
# run container
|
||||
try:
|
||||
container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", name, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def run_command(self, image, command=None, **kwargs):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
stdout = kwargs.get('stdout', True)
|
||||
stderr = kwargs.get('stderr', True)
|
||||
|
||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||
try:
|
||||
container = self.docker.containers.run(
|
||||
image,
|
||||
command=command,
|
||||
network=self.network.name,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
result = container.wait()
|
||||
output = container.logs(stdout=stdout, stderr=stderr)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command: %s", err)
|
||||
return CommandReturn(None, b"")
|
||||
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return CommandReturn(result.get('StatusCode'), output)
|
371
hassio/docker/addon.py
Normal file
371
hassio/docker/addon.py
Normal file
@@ -0,0 +1,371 @@
|
||||
"""Init file for HassIO addon docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from .utils import docker_process
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (
|
||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN,
|
||||
ENV_TIME)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
|
||||
|
||||
class DockerAddon(DockerInterface):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(coresys)
|
||||
self._id = slug
|
||||
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return addon of docker image."""
|
||||
return self._addons.get(self._id)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of docker image."""
|
||||
return self.addon.image
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout for docker actions."""
|
||||
return self.addon.timeout
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of docker image."""
|
||||
if not self.addon.legacy:
|
||||
return super().version
|
||||
return self.addon.version_installed
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of docker image."""
|
||||
if not self.addon.legacy:
|
||||
return super().arch
|
||||
return self._arch
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def ipc(self):
|
||||
"""Return the IPC namespace."""
|
||||
if self.addon.host_ipc:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of addon."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
# Need audio settings
|
||||
if self.addon.with_audio:
|
||||
addon_env.update({
|
||||
'ALSA_OUTPUT': self.addon.audio_output,
|
||||
'ALSA_INPUT': self.addon.audio_input,
|
||||
})
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
ENV_TIME: self._config.timezone,
|
||||
ENV_TOKEN: self.addon.uuid,
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# Use audio devices
|
||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Auto mapping UART devices
|
||||
if self.addon.auto_uart:
|
||||
for device in self._hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
|
||||
# Return None if no devices is present
|
||||
return devices or None
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Filter None from addon ports."""
|
||||
if not self.addon.ports:
|
||||
return None
|
||||
|
||||
return {
|
||||
container_port: host_port
|
||||
for container_port, host_port in self.addon.ports.items()
|
||||
if host_port
|
||||
}
|
||||
|
||||
@property
|
||||
def security_opt(self):
|
||||
"""Controlling security opt."""
|
||||
privileged = self.addon.privileged or []
|
||||
|
||||
# Disable AppArmor sinse it make troubles wit SYS_ADMIN
|
||||
if 'SYS_ADMIN' in privileged:
|
||||
return [
|
||||
"apparmor:unconfined",
|
||||
]
|
||||
return None
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": f"{options}"}
|
||||
return None
|
||||
|
||||
@property
|
||||
def network_mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
return {
|
||||
'homeassistant': self._docker.network.gateway,
|
||||
'hassio': self._docker.network.supervisor,
|
||||
}
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
"""Return network mode for addon."""
|
||||
if self.addon.host_network:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': "/data", 'mode': 'rw'
|
||||
}}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
# setup config mappings
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_config): {
|
||||
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_ssl): {
|
||||
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_addons_local): {
|
||||
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_backup): {
|
||||
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_share): {
|
||||
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
|
||||
# init other hardware mappings
|
||||
if self.addon.with_gpio:
|
||||
volumes.update({
|
||||
"/sys/class/gpio": {
|
||||
'bind': "/sys/class/gpio", 'mode': 'rw'
|
||||
},
|
||||
"/sys/devices/platform/soc": {
|
||||
'bind': "/sys/devices/platform/soc", 'mode': 'rw'
|
||||
},
|
||||
})
|
||||
|
||||
# host dbus system
|
||||
if self.addon.host_dbus:
|
||||
volumes.update({
|
||||
"/var/run/dbus": {
|
||||
'bind': "/var/run/dbus", 'mode': 'rw'
|
||||
}})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return True
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self._docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
init=True,
|
||||
ipc_mode=self.ipc,
|
||||
stdin_open=self.addon.with_stdin,
|
||||
network_mode=self.network_mode,
|
||||
ports=self.ports,
|
||||
extra_hosts=self.network_mapping,
|
||||
devices=self.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
security_opt=self.security_opt,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs
|
||||
)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
return ret
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_env = AddonBuild(self.coresys, self._id)
|
||||
|
||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||
try:
|
||||
image, log = self._docker.images.build(
|
||||
**build_env.get_docker_args(tag))
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||
image.tag(self.image, tag='latest')
|
||||
|
||||
# Update meta data
|
||||
self._meta = image.attrs
|
||||
|
||||
except (docker.errors.DockerException) as err:
|
||||
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
return self._loop.run_in_executor(None, self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self._docker.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image:
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s done", self.image)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
return self._loop.run_in_executor(None, self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self._docker.api.load_image(read_tar, quiet=True)
|
||||
|
||||
image = self._docker.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self._meta = image.attrs
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def write_stdin(self, data):
|
||||
"""Write to add-on stdin."""
|
||||
return self._loop.run_in_executor(None, self._write_stdin, data)
|
||||
|
||||
def _write_stdin(self, data):
|
||||
"""Write to add-on stdin.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self._is_running():
|
||||
return False
|
||||
|
||||
try:
|
||||
# load needed docker objects
|
||||
container = self._docker.containers.get(self.name)
|
||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
# write to stdin
|
||||
data += b"\n"
|
||||
os.write(socket.fileno(), data)
|
||||
socket.close()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
return True
|
112
hassio/docker/homeassistant.py
Normal file
112
hassio/docker/homeassistant.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..const import ENV_TOKEN, ENV_TIME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
|
||||
|
||||
class DockerHomeAssistant(DockerInterface):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of docker image."""
|
||||
return self._homeassistant.image
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into docker."""
|
||||
devices = []
|
||||
for device in self._hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
return devices or None
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return False
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self._docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self._docker.network.supervisor,
|
||||
ENV_TIME: self._config.timezone,
|
||||
ENV_TOKEN: self._homeassistant.uuid,
|
||||
},
|
||||
volumes={
|
||||
str(self._config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self._config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self._config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
}
|
||||
)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start homeassistant %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
return ret
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
return self._docker.run_command(
|
||||
self.image,
|
||||
command,
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={
|
||||
ENV_TIME: self._config.timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self._config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'ro'},
|
||||
str(self._config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
}
|
||||
)
|
||||
|
||||
def is_initialize(self):
|
||||
"""Return True if docker container exists."""
|
||||
return self._loop.run_in_executor(None, self._is_initialize)
|
||||
|
||||
def _is_initialize(self):
|
||||
"""Return True if docker container exists.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self._docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
return True
|
@@ -1,62 +1,67 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
"""Interface class for HassIO docker object."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .util import docker_process
|
||||
from .utils import docker_process
|
||||
from .stats import DockerStats
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerBase(object):
|
||||
"""Docker hassio wrapper."""
|
||||
class DockerInterface(CoreSysAttributes):
|
||||
"""Docker hassio interface."""
|
||||
|
||||
def __init__(self, config, loop, dock, image=None, timeout=30):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.dock = dock
|
||||
self.image = image
|
||||
self.timeout = timeout
|
||||
self.version = None
|
||||
self.arch = None
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
self.coresys = coresys
|
||||
self._meta = None
|
||||
self.lock = asyncio.Lock(loop=self._loop)
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout for docker actions."""
|
||||
return 30
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of docker image."""
|
||||
if not self._meta:
|
||||
return None
|
||||
return self._meta['Config']['Image']
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of docker image."""
|
||||
if self._meta and LABEL_VERSION in self._meta['Config']['Labels']:
|
||||
return self._meta['Config']['Labels'][LABEL_VERSION]
|
||||
return None
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of docker image."""
|
||||
if self._meta and LABEL_ARCH in self._meta['Config']['Labels']:
|
||||
return self._meta['Config']['Labels'][LABEL_ARCH]
|
||||
return None
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self._lock.locked()
|
||||
|
||||
def process_metadata(self, metadata, force=False):
|
||||
"""Read metadata and set it to object."""
|
||||
# read image
|
||||
if not self.image:
|
||||
self.image = metadata['Config']['Image']
|
||||
|
||||
# read version
|
||||
need_version = force or not self.version
|
||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
||||
elif need_version:
|
||||
_LOGGER.warning("Can't read version from %s", self.name)
|
||||
|
||||
# read arch
|
||||
need_arch = force or not self.arch
|
||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||
return self.lock.locked()
|
||||
|
||||
@docker_process
|
||||
def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
return self.loop.run_in_executor(None, self._install, tag)
|
||||
return self._loop.run_in_executor(None, self._install, tag)
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image.
|
||||
@@ -65,10 +70,10 @@ class DockerBase(object):
|
||||
"""
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||
image = self.dock.images.pull("{}:{}".format(self.image, tag))
|
||||
image = self._docker.images.pull(f"{self.image}:{tag}")
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
self._meta = image.attrs
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
return False
|
||||
@@ -78,7 +83,7 @@ class DockerBase(object):
|
||||
|
||||
def exists(self):
|
||||
"""Return True if docker image exists in local repo."""
|
||||
return self.loop.run_in_executor(None, self._exists)
|
||||
return self._loop.run_in_executor(None, self._exists)
|
||||
|
||||
def _exists(self):
|
||||
"""Return True if docker image exists in local repo.
|
||||
@@ -86,8 +91,9 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
image = self._docker.images.get(self.image)
|
||||
assert f"{self.image}:{self.version}" in image.tags
|
||||
except (docker.errors.DockerException, AssertionError):
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -97,7 +103,7 @@ class DockerBase(object):
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.loop.run_in_executor(None, self._is_running)
|
||||
return self._loop.run_in_executor(None, self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
"""Return True if docker is Running.
|
||||
@@ -105,8 +111,8 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
image = self.dock.images.get(self.image)
|
||||
container = self._docker.containers.get(self.name)
|
||||
image = self._docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -123,7 +129,7 @@ class DockerBase(object):
|
||||
@docker_process
|
||||
def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
return self.loop.run_in_executor(None, self._attach)
|
||||
return self._loop.run_in_executor(None, self._attach)
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
@@ -132,13 +138,12 @@ class DockerBase(object):
|
||||
"""
|
||||
try:
|
||||
if self.image:
|
||||
obj_data = self.dock.images.get(self.image).attrs
|
||||
self._meta = self._docker.images.get(self.image).attrs
|
||||
else:
|
||||
obj_data = self.dock.containers.get(self.name).attrs
|
||||
self._meta = self._docker.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self.process_metadata(obj_data)
|
||||
_LOGGER.info(
|
||||
"Attach to image %s with version %s", self.image, self.version)
|
||||
|
||||
@@ -147,7 +152,7 @@ class DockerBase(object):
|
||||
@docker_process
|
||||
def run(self):
|
||||
"""Run docker image."""
|
||||
return self.loop.run_in_executor(None, self._run)
|
||||
return self._loop.run_in_executor(None, self._run)
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
@@ -159,7 +164,7 @@ class DockerBase(object):
|
||||
@docker_process
|
||||
def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
return self.loop.run_in_executor(None, self._stop)
|
||||
return self._loop.run_in_executor(None, self._stop)
|
||||
|
||||
def _stop(self):
|
||||
"""Stop/remove and remove docker container.
|
||||
@@ -167,7 +172,7 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
container = self._docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -185,7 +190,7 @@ class DockerBase(object):
|
||||
@docker_process
|
||||
def remove(self):
|
||||
"""Remove docker images."""
|
||||
return self.loop.run_in_executor(None, self._remove)
|
||||
return self._loop.run_in_executor(None, self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker images.
|
||||
@@ -200,27 +205,24 @@ class DockerBase(object):
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
self._docker.images.remove(
|
||||
image=f"{self.image}:latest", force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
self._docker.images.remove(
|
||||
image=f"{self.image}:{self.version}", force=True)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
# clean metadata
|
||||
self.version = None
|
||||
self.arch = None
|
||||
|
||||
self._meta = None
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def update(self, tag):
|
||||
"""Update a docker image."""
|
||||
return self.loop.run_in_executor(None, self._update, tag)
|
||||
return self._loop.run_in_executor(None, self._update, tag)
|
||||
|
||||
def _update(self, tag):
|
||||
"""Update a docker image.
|
||||
@@ -240,10 +242,12 @@ class DockerBase(object):
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def logs(self):
|
||||
"""Return docker logs of container."""
|
||||
return self.loop.run_in_executor(None, self._logs)
|
||||
"""Return docker logs of container.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self._loop.run_in_executor(None, self._logs)
|
||||
|
||||
def _logs(self):
|
||||
"""Return docker logs of container.
|
||||
@@ -251,19 +255,19 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
container = self._docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||
_LOGGER.warning("Can't grap logs from %s: %s", self.image, err)
|
||||
|
||||
@docker_process
|
||||
def restart(self):
|
||||
"""Restart docker container."""
|
||||
return self.loop.run_in_executor(None, self._restart)
|
||||
return self._loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
@@ -271,7 +275,7 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
container = self._docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -280,7 +284,7 @@ class DockerBase(object):
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
||||
_LOGGER.warning("Can't restart %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -288,7 +292,7 @@ class DockerBase(object):
|
||||
@docker_process
|
||||
def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self.loop.run_in_executor(None, self._cleanup)
|
||||
return self._loop.run_in_executor(None, self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
@@ -296,25 +300,25 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self.dock.images.get(self.image)
|
||||
latest = self._docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return False
|
||||
|
||||
for image in self.dock.images.list(name=self.image):
|
||||
for image in self._docker.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||
self.dock.images.remove(image.id, force=True)
|
||||
self._docker.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def execute_command(self, command):
|
||||
"""Create a temporary container and run command."""
|
||||
return self.loop.run_in_executor(None, self._execute_command, command)
|
||||
return self._loop.run_in_executor(None, self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
@@ -322,3 +326,24 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def stats(self):
|
||||
"""Read and return stats from container."""
|
||||
return self._loop.run_in_executor(None, self._stats)
|
||||
|
||||
def _stats(self):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return None
|
||||
|
||||
try:
|
||||
stats = container.stats(stream=False)
|
||||
return DockerStats(stats)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||
return None
|
93
hassio/docker/network.py
Normal file
93
hassio/docker/network.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Internal network manager for HassIO."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerNetwork(object):
|
||||
"""Internal HassIO Network.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self, dock):
|
||||
"""Initialize internal hassio network."""
|
||||
self.docker = dock
|
||||
self.network = self._get_network()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of network."""
|
||||
return DOCKER_NETWORK
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
"""Return of connected containers from network."""
|
||||
return self.network.containers
|
||||
|
||||
@property
|
||||
def gateway(self):
|
||||
"""Return gateway of the network."""
|
||||
return DOCKER_NETWORK_MASK[1]
|
||||
|
||||
@property
|
||||
def supervisor(self):
|
||||
"""Return supervisor of the network."""
|
||||
return DOCKER_NETWORK_MASK[2]
|
||||
|
||||
def _get_network(self):
|
||||
"""Get HassIO network."""
|
||||
try:
|
||||
return self.docker.networks.get(DOCKER_NETWORK)
|
||||
except docker.errors.NotFound:
|
||||
_LOGGER.info("Can't find HassIO network, create new network")
|
||||
|
||||
ipam_pool = docker.types.IPAMPool(
|
||||
subnet=str(DOCKER_NETWORK_MASK),
|
||||
gateway=str(self.gateway),
|
||||
iprange=str(DOCKER_NETWORK_RANGE)
|
||||
)
|
||||
|
||||
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||
|
||||
return self.docker.networks.create(
|
||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
|
||||
enable_ipv6=False, options={
|
||||
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||
})
|
||||
|
||||
def attach_container(self, container, alias=None, ipv4=None):
|
||||
"""Attach container to hassio network.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
ipv4 = str(ipv4) if ipv4 else None
|
||||
|
||||
try:
|
||||
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't link container to hassio-net: %s", err)
|
||||
return False
|
||||
|
||||
self.network.reload()
|
||||
return True
|
||||
|
||||
def detach_default_bridge(self, container):
|
||||
"""Detach default docker bridge.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
default_network = self.docker.networks.get('bridge')
|
||||
default_network.disconnect(container)
|
||||
|
||||
except docker.errors.NotFound:
|
||||
return
|
||||
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning(
|
||||
"Can't disconnect container from default: %s", err)
|
90
hassio/docker/stats.py
Normal file
90
hassio/docker/stats.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Calc & represent docker stats data."""
|
||||
from contextlib import suppress
|
||||
|
||||
|
||||
class DockerStats(object):
|
||||
"""Hold stats data from container inside."""
|
||||
|
||||
def __init__(self, stats):
|
||||
"""Initialize docker stats."""
|
||||
self._cpu = 0.0
|
||||
self._network_rx = 0
|
||||
self._network_tx = 0
|
||||
self._blk_read = 0
|
||||
self._blk_write = 0
|
||||
|
||||
try:
|
||||
self._memory_usage = stats['memory_stats']['usage']
|
||||
self._memory_limit = stats['memory_stats']['limit']
|
||||
except KeyError:
|
||||
self._memory_usage = 0
|
||||
self._memory_limit = 0
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_cpu_percent(stats)
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_network(stats['networks'])
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_block_io(stats['blkio_stats'])
|
||||
|
||||
def _calc_cpu_percent(self, stats):
|
||||
"""Calculate CPU percent."""
|
||||
cpu_delta = stats['cpu_stats']['cpu_usage']['total_usage'] - \
|
||||
stats['precpu_stats']['cpu_usage']['total_usage']
|
||||
system_delta = stats['cpu_stats']['system_cpu_usage'] - \
|
||||
stats['precpu_stats']['system_cpu_usage']
|
||||
|
||||
if system_delta > 0.0 and cpu_delta > 0.0:
|
||||
self._cpu = (cpu_delta / system_delta) * \
|
||||
len(stats['cpu_stats']['cpu_usage']['percpu_usage']) * 100.0
|
||||
|
||||
def _calc_network(self, networks):
|
||||
"""Calculate Network IO stats."""
|
||||
for _, stats in networks.items():
|
||||
self._network_rx += stats['rx_bytes']
|
||||
self._network_tx += stats['tx_bytes']
|
||||
|
||||
def _calc_block_io(self, blkio):
|
||||
"""Calculate block IO stats."""
|
||||
for stats in blkio['io_service_bytes_recursive']:
|
||||
if stats['op'] == 'Read':
|
||||
self._blk_read += stats['value']
|
||||
elif stats['op'] == 'Write':
|
||||
self._blk_write += stats['value']
|
||||
|
||||
@property
|
||||
def cpu_percent(self):
|
||||
"""Return CPU percent."""
|
||||
return self._cpu
|
||||
|
||||
@property
|
||||
def memory_usage(self):
|
||||
"""Return memory usage."""
|
||||
return self._memory_usage
|
||||
|
||||
@property
|
||||
def memory_limit(self):
|
||||
"""Return memory limit."""
|
||||
return self._memory_limit
|
||||
|
||||
@property
|
||||
def network_rx(self):
|
||||
"""Return network rx stats."""
|
||||
return self._network_rx
|
||||
|
||||
@property
|
||||
def network_tx(self):
|
||||
"""Return network rx stats."""
|
||||
return self._network_tx
|
||||
|
||||
@property
|
||||
def blk_read(self):
|
||||
"""Return block IO read stats."""
|
||||
return self._blk_read
|
||||
|
||||
@property
|
||||
def blk_write(self):
|
||||
"""Return block IO write stats."""
|
||||
return self._blk_write
|
41
hassio/docker/supervisor.py
Normal file
41
hassio/docker/supervisor.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self._meta = container.attrs
|
||||
_LOGGER.info("Attach to supervisor %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
# if already attach
|
||||
if container in self._docker.network.containers:
|
||||
return True
|
||||
|
||||
# attach to network
|
||||
return self._docker.network.attach_container(
|
||||
container, alias=['hassio'], ipv4=self._docker.network.supervisor)
|
20
hassio/docker/utils.py
Normal file
20
hassio/docker/utils.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def docker_process(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api.lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api.lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
@@ -1,102 +1,178 @@
|
||||
"""HomeAssistant control object."""
|
||||
import asyncio
|
||||
from collections import namedtuple
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
|
||||
import aiohttp
|
||||
from aiohttp.hdrs import CONTENT_TYPE
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||
ATTR_VERSION)
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .tools import JsonConfig, convert_to_ascii
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID,
|
||||
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||
ATTR_WAIT_BOOT, HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .utils import convert_to_ascii
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||
|
||||
ConfigResult = namedtuple('ConfigResult', ['valid', 'log'])
|
||||
|
||||
class HomeAssistant(JsonConfig):
|
||||
|
||||
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, config, loop, dock, updater):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize hass object."""
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.updater = updater
|
||||
self.docker = DockerHomeAssistant(config, loop, dock, self)
|
||||
self.coresys = coresys
|
||||
self.instance = DockerHomeAssistant(coresys)
|
||||
|
||||
async def prepare(self):
|
||||
async def load(self):
|
||||
"""Prepare HomeAssistant object."""
|
||||
if not await self.docker.exists():
|
||||
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
|
||||
if self.is_custom_image:
|
||||
await self.install()
|
||||
else:
|
||||
await self.install_landingpage()
|
||||
else:
|
||||
await self.docker.attach()
|
||||
if await self.instance.attach():
|
||||
return
|
||||
|
||||
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
|
||||
await self.install_landingpage()
|
||||
|
||||
@property
|
||||
def api_ip(self):
|
||||
"""Return IP of HomeAssistant instance."""
|
||||
return self._docker.network.gateway
|
||||
|
||||
@property
|
||||
def api_port(self):
|
||||
"""Return network port to home-assistant instance."""
|
||||
return self._data[ATTR_PORT]
|
||||
|
||||
@api_port.setter
|
||||
def api_port(self, value):
|
||||
"""Set network port for home-assistant instance."""
|
||||
self._data[ATTR_PORT] = value
|
||||
|
||||
@property
|
||||
def api_password(self):
|
||||
"""Return password for home-assistant instance."""
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@api_password.setter
|
||||
def api_password(self, value):
|
||||
"""Set password for home-assistant instance."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
|
||||
@property
|
||||
def api_ssl(self):
|
||||
"""Return if we need ssl to home-assistant instance."""
|
||||
return self._data[ATTR_SSL]
|
||||
|
||||
@api_ssl.setter
|
||||
def api_ssl(self, value):
|
||||
"""Set SSL for home-assistant instance."""
|
||||
self._data[ATTR_SSL] = value
|
||||
|
||||
@property
|
||||
def api_url(self):
|
||||
"""Return API url to Home-Assistant."""
|
||||
return "{}://{}:{}".format(
|
||||
'https' if self.api_ssl else 'http', self.api_ip, self.api_port
|
||||
)
|
||||
|
||||
@property
|
||||
def watchdog(self):
|
||||
"""Return True if the watchdog should protect Home-Assistant."""
|
||||
return self._data[ATTR_WATCHDOG]
|
||||
|
||||
@watchdog.setter
|
||||
def watchdog(self, value):
|
||||
"""Return True if the watchdog should protect Home-Assistant."""
|
||||
self._data[ATTR_WATCHDOG] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
"""Return time to wait for Home-Assistant startup."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
"""Set time to wait for Home-Assistant startup."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of running homeassistant."""
|
||||
return self.docker.version
|
||||
return self.instance.version
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return last available version of homeassistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.updater.version_homeassistant
|
||||
return self._updater.version_homeassistant
|
||||
|
||||
@last_version.setter
|
||||
def last_version(self, value):
|
||||
"""Set last available version of homeassistant."""
|
||||
if value:
|
||||
self._data[ATTR_LAST_VERSION] = value
|
||||
else:
|
||||
self._data.pop(ATTR_LAST_VERSION, None)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of hass containter."""
|
||||
if ATTR_IMAGE in self._data:
|
||||
if self._data.get(ATTR_IMAGE):
|
||||
return self._data[ATTR_IMAGE]
|
||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
||||
|
||||
@image.setter
|
||||
def image(self, value):
|
||||
"""Set image name of hass containter."""
|
||||
if value:
|
||||
self._data[ATTR_IMAGE] = value
|
||||
else:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
|
||||
@property
|
||||
def is_custom_image(self):
|
||||
"""Return True if a custom image is used."""
|
||||
return ATTR_IMAGE in self._data
|
||||
return all(attr in self._data for attr in
|
||||
(ATTR_IMAGE, ATTR_LAST_VERSION))
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return extend device mapping."""
|
||||
return self._data[ATTR_DEVICES]
|
||||
def boot(self):
|
||||
"""Return True if home-assistant boot is enabled."""
|
||||
return self._data[ATTR_BOOT]
|
||||
|
||||
@devices.setter
|
||||
def devices(self, value):
|
||||
"""Set extend device mapping."""
|
||||
self._data[ATTR_DEVICES] = value
|
||||
self.save()
|
||||
@boot.setter
|
||||
def boot(self, value):
|
||||
"""Set home-assistant boot options."""
|
||||
self._data[ATTR_BOOT] = value
|
||||
|
||||
def set_custom(self, image, version):
|
||||
"""Set a custom image for homeassistant."""
|
||||
# reset
|
||||
if image is None and version is None:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
self._data.pop(ATTR_VERSION, None)
|
||||
|
||||
self.docker.image = self.image
|
||||
else:
|
||||
if image:
|
||||
self._data[ATTR_IMAGE] = image
|
||||
self.docker.image = image
|
||||
if version:
|
||||
self._data[ATTR_VERSION] = version
|
||||
self.save()
|
||||
@property
|
||||
def uuid(self):
|
||||
"""Return a UUID of this HomeAssistant."""
|
||||
return self._data[ATTR_UUID]
|
||||
|
||||
async def install_landingpage(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
while True:
|
||||
if await self.docker.install('landingpage'):
|
||||
if await self.instance.install('landingpage'):
|
||||
break
|
||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
await asyncio.sleep(60, loop=self._loop)
|
||||
|
||||
# Run landingpage after installation
|
||||
await self.start()
|
||||
|
||||
async def install(self):
|
||||
"""Install a landingpage."""
|
||||
@@ -104,83 +180,174 @@ class HomeAssistant(JsonConfig):
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.last_version:
|
||||
await self.updater.fetch_data()
|
||||
await self._updater.reload()
|
||||
|
||||
tag = self.last_version
|
||||
if tag and await self.docker.install(tag):
|
||||
if tag and await self.instance.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
await asyncio.sleep(60, loop=self._loop)
|
||||
|
||||
# store version
|
||||
# finishing
|
||||
_LOGGER.info("HomeAssistant docker now installed")
|
||||
await self.docker.cleanup()
|
||||
if self.boot:
|
||||
await self.start()
|
||||
await self.instance.cleanup()
|
||||
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
running = await self.instance.is_running()
|
||||
exists = await self.instance.exists()
|
||||
|
||||
if version == self.docker.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
if exists and version == self.instance.version:
|
||||
_LOGGER.info("Version %s is already installed", version)
|
||||
return False
|
||||
|
||||
try:
|
||||
return await self.docker.update(version)
|
||||
return await self.instance.update(version)
|
||||
finally:
|
||||
await self.docker.run()
|
||||
if running:
|
||||
await self.start()
|
||||
|
||||
def run(self):
|
||||
"""Run HomeAssistant docker.
|
||||
async def start(self):
|
||||
"""Run HomeAssistant docker."""
|
||||
if not await self.instance.run():
|
||||
return False
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.run()
|
||||
return await self._block_till_run()
|
||||
|
||||
def stop(self):
|
||||
"""Stop HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.stop()
|
||||
return self.instance.stop()
|
||||
|
||||
def restart(self):
|
||||
"""Restart HomeAssistant docker.
|
||||
async def restart(self):
|
||||
"""Restart HomeAssistant docker."""
|
||||
if not await self.instance.restart():
|
||||
return False
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.restart()
|
||||
return await self._block_till_run()
|
||||
|
||||
def logs(self):
|
||||
"""Get HomeAssistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.logs()
|
||||
return self.instance.logs()
|
||||
|
||||
def stats(self):
|
||||
"""Return stats of HomeAssistant.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.is_running()
|
||||
return self.instance.is_running()
|
||||
|
||||
def is_initialize(self):
|
||||
"""Return True if a docker container is exists.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_initialize()
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.docker.in_progress
|
||||
return self.instance.in_progress
|
||||
|
||||
async def check_config(self):
|
||||
"""Run homeassistant config check."""
|
||||
exit_code, log = await self.docker.execute_command(
|
||||
result = await self.instance.execute_command(
|
||||
"python3 -m homeassistant -c /config --script check_config"
|
||||
)
|
||||
|
||||
# if not valid
|
||||
if exit_code is None:
|
||||
return (False, "")
|
||||
if result.exit_code is None:
|
||||
return ConfigResult(False, "")
|
||||
|
||||
# parse output
|
||||
log = convert_to_ascii(log)
|
||||
if exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||
return (False, log)
|
||||
return (True, log)
|
||||
log = convert_to_ascii(result.output)
|
||||
if result.exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||
return ConfigResult(False, log)
|
||||
return ConfigResult(True, log)
|
||||
|
||||
async def check_api_state(self):
|
||||
"""Check if Home-Assistant up and running."""
|
||||
url = f"{self.api_url}/api/"
|
||||
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||
|
||||
if self.api_password:
|
||||
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||
|
||||
try:
|
||||
# pylint: disable=bad-continuation
|
||||
async with self._websession_ssl.get(
|
||||
url, headers=header, timeout=30) as request:
|
||||
status = request.status
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
return False
|
||||
|
||||
if status not in (200, 201):
|
||||
_LOGGER.warning("Home-Assistant API config missmatch")
|
||||
return True
|
||||
|
||||
async def send_event(self, event_type, event_data=None):
|
||||
"""Send event to Home-Assistant."""
|
||||
url = f"{self.api_url}/api/events/{event_type}"
|
||||
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||
|
||||
if self.api_password:
|
||||
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||
|
||||
try:
|
||||
# pylint: disable=bad-continuation
|
||||
async with self._websession_ssl.post(
|
||||
url, headers=header, timeout=30,
|
||||
json=event_data) as request:
|
||||
status = request.status
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||
_LOGGER.warning(
|
||||
"Home-Assistant event %s fails: %s", event_type, err)
|
||||
return False
|
||||
|
||||
if status not in (200, 201):
|
||||
_LOGGER.warning("Home-Assistant event %s fails", event_type)
|
||||
return False
|
||||
return True
|
||||
|
||||
async def _block_till_run(self):
|
||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||
start_time = time.monotonic()
|
||||
|
||||
def check_port():
|
||||
"""Check if port is mapped."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
result = sock.connect_ex((str(self.api_ip), self.api_port))
|
||||
sock.close()
|
||||
|
||||
if result == 0:
|
||||
return True
|
||||
return False
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
while time.monotonic() - start_time < self.wait_boot:
|
||||
if await self._loop.run_in_executor(None, check_port):
|
||||
_LOGGER.info("Detect a running Home-Assistant instance")
|
||||
return True
|
||||
await asyncio.sleep(10, loop=self._loop)
|
||||
|
||||
_LOGGER.warning("Don't wait anymore of Home-Assistant startup!")
|
||||
return False
|
||||
|
1
hassio/misc/__init__.py
Normal file
1
hassio/misc/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Special object and tools for Hass.io."""
|
42
hassio/misc/dns.py
Normal file
42
hassio/misc/dns.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Setup the internal DNS service for host applications."""
|
||||
import asyncio
|
||||
import logging
|
||||
import shlex
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
||||
|
||||
|
||||
class DNSForward(object):
|
||||
"""Manage DNS forwarding to internal DNS."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize DNS forwarding."""
|
||||
self.loop = loop
|
||||
self.proc = None
|
||||
|
||||
async def start(self):
|
||||
"""Start DNS forwarding."""
|
||||
try:
|
||||
self.proc = await asyncio.create_subprocess_exec(
|
||||
*shlex.split(COMMAND),
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
loop=self.loop
|
||||
)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't start DNS forwarding: %s", err)
|
||||
else:
|
||||
_LOGGER.info("Start DNS port forwarding for host add-ons")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop DNS forwarding."""
|
||||
if not self.proc:
|
||||
_LOGGER.warning("DNS forwarding is not running!")
|
||||
return
|
||||
|
||||
self.proc.kill()
|
||||
await self.proc.wait()
|
||||
_LOGGER.info("Stop DNS forwarding")
|
@@ -1,11 +1,12 @@
|
||||
"""Read hardware info from system."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
import pyudev
|
||||
|
||||
from .const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||
from ..const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -15,6 +16,12 @@ RE_CARDS = re.compile(r"(\d+) \[(\w*) *\]: (.*\w)")
|
||||
ASOUND_DEVICES = Path("/proc/asound/devices")
|
||||
RE_DEVICES = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)")
|
||||
|
||||
PROC_STAT = Path("/proc/stat")
|
||||
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
||||
|
||||
GPIO_DEVICES = Path("/sys/class/gpio")
|
||||
RE_TTY = re.compile(r"tty[A-Z]+")
|
||||
|
||||
|
||||
class Hardware(object):
|
||||
"""Represent a interface to procfs, sysfs and udev."""
|
||||
@@ -28,10 +35,10 @@ class Hardware(object):
|
||||
"""Return all serial and connected devices."""
|
||||
dev_list = set()
|
||||
for device in self.context.list_devices(subsystem='tty'):
|
||||
if 'ID_VENDOR' in device:
|
||||
if 'ID_VENDOR' in device or RE_TTY.search(device.device_node):
|
||||
dev_list.add(device.device_node)
|
||||
|
||||
return list(dev_list)
|
||||
return dev_list
|
||||
|
||||
@property
|
||||
def input_devices(self):
|
||||
@@ -41,7 +48,7 @@ class Hardware(object):
|
||||
if 'NAME' in device:
|
||||
dev_list.add(device['NAME'].replace('"', ''))
|
||||
|
||||
return list(dev_list)
|
||||
return dev_list
|
||||
|
||||
@property
|
||||
def disk_devices(self):
|
||||
@@ -51,7 +58,7 @@ class Hardware(object):
|
||||
if device.device_node.startswith('/dev/sd'):
|
||||
dev_list.add(device.device_node)
|
||||
|
||||
return list(dev_list)
|
||||
return dev_list
|
||||
|
||||
@property
|
||||
def audio_devices(self):
|
||||
@@ -62,8 +69,8 @@ class Hardware(object):
|
||||
with ASOUND_DEVICES.open('r') as devices_file:
|
||||
devices = devices_file.read()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound data -> %s", err)
|
||||
return
|
||||
_LOGGER.error("Can't read asound data: %s", err)
|
||||
return {}
|
||||
|
||||
audio_list = {}
|
||||
|
||||
@@ -85,3 +92,30 @@ class Hardware(object):
|
||||
continue
|
||||
|
||||
return audio_list
|
||||
|
||||
@property
|
||||
def gpio_devices(self):
|
||||
"""Return list of GPIO interface on device."""
|
||||
dev_list = set()
|
||||
for interface in GPIO_DEVICES.glob("gpio*"):
|
||||
dev_list.add(interface.name)
|
||||
|
||||
return dev_list
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
"""Return last boot time."""
|
||||
try:
|
||||
with PROC_STAT.open("r") as stat_file:
|
||||
stats = stat_file.read()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read stat data: %s", err)
|
||||
return None
|
||||
|
||||
# parse stat file
|
||||
found = RE_BOOT_TIME.search(stats)
|
||||
if not found:
|
||||
_LOGGER.error("Can't found last boot time!")
|
||||
return None
|
||||
|
||||
return datetime.utcfromtimestamp(int(found.group(1)))
|
@@ -5,7 +5,7 @@ import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .const import (
|
||||
from ..const import (
|
||||
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
|
||||
ATTR_HOSTNAME, ATTR_OS)
|
||||
|
75
hassio/misc/scheduler.py
Normal file
75
hassio/misc/scheduler.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Schedule for HassIO."""
|
||||
import logging
|
||||
from datetime import date, datetime, time, timedelta
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
INTERVAL = 'interval'
|
||||
REPEAT = 'repeat'
|
||||
CALL = 'callback'
|
||||
TASK = 'task'
|
||||
|
||||
|
||||
class Scheduler(object):
|
||||
"""Schedule task inside HassIO."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self.suspend = False
|
||||
|
||||
def register_task(self, coro_callback, interval, repeat=True):
|
||||
"""Schedule a coroutine.
|
||||
|
||||
The coroutien need to be a callback without arguments.
|
||||
"""
|
||||
task_id = hash(coro_callback)
|
||||
|
||||
# generate data
|
||||
opts = {
|
||||
CALL: coro_callback,
|
||||
INTERVAL: interval,
|
||||
REPEAT: repeat,
|
||||
}
|
||||
|
||||
# schedule task
|
||||
self._data[task_id] = opts
|
||||
self._schedule_task(interval, task_id)
|
||||
|
||||
return task_id
|
||||
|
||||
def _run_task(self, task_id):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data[task_id]
|
||||
|
||||
if not self.suspend:
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
self._schedule_task(data[INTERVAL], task_id)
|
||||
else:
|
||||
self._data.pop(task_id)
|
||||
|
||||
def _schedule_task(self, interval, task_id):
|
||||
"""Schedule a task on loop."""
|
||||
if isinstance(interval, (int, float)):
|
||||
job = self.loop.call_later(interval, self._run_task, task_id)
|
||||
elif isinstance(interval, time):
|
||||
today = datetime.combine(date.today(), interval)
|
||||
tomorrow = datetime.combine(
|
||||
date.today() + timedelta(days=1), interval)
|
||||
|
||||
# check if we run it today or next day
|
||||
if today > datetime.today():
|
||||
calc = today
|
||||
else:
|
||||
calc = tomorrow
|
||||
|
||||
job = self.loop.call_at(calc.timestamp(), self._run_task, task_id)
|
||||
else:
|
||||
_LOGGER.fatal("Unknow interval %s (type: %s) for scheduler %s",
|
||||
interval, type(interval), task_id)
|
||||
|
||||
# Store job
|
||||
self._data[task_id][TASK] = job
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,56 +0,0 @@
|
||||
"""Schedule for HassIO."""
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SEC = 'seconds'
|
||||
REPEAT = 'repeat'
|
||||
CALL = 'callback'
|
||||
TASK = 'task'
|
||||
|
||||
|
||||
class Scheduler(object):
|
||||
"""Schedule task inside HassIO."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self.suspend = False
|
||||
|
||||
def register_task(self, coro_callback, seconds, repeat=True,
|
||||
now=False):
|
||||
"""Schedule a coroutine.
|
||||
|
||||
The coroutien need to be a callback without arguments.
|
||||
"""
|
||||
idx = hash(coro_callback)
|
||||
|
||||
# generate data
|
||||
opts = {
|
||||
CALL: coro_callback,
|
||||
SEC: seconds,
|
||||
REPEAT: repeat,
|
||||
}
|
||||
self._data[idx] = opts
|
||||
|
||||
# schedule task
|
||||
if now:
|
||||
self._run_task(idx)
|
||||
else:
|
||||
task = self.loop.call_later(seconds, self._run_task, idx)
|
||||
self._data[idx][TASK] = task
|
||||
|
||||
return idx
|
||||
|
||||
def _run_task(self, idx):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data.pop(idx)
|
||||
|
||||
if not self.suspend:
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
task = self.loop.call_later(data[SEC], self._run_task, idx)
|
||||
data[TASK] = task
|
||||
self._data[idx] = data
|
45
hassio/services/__init__.py
Normal file
45
hassio/services/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Handle internal services discovery."""
|
||||
|
||||
from .mqtt import MQTTService
|
||||
from .data import ServicesData
|
||||
from .discovery import Discovery
|
||||
from ..const import SERVICE_MQTT
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
AVAILABLE_SERVICES = {
|
||||
SERVICE_MQTT: MQTTService
|
||||
}
|
||||
|
||||
|
||||
class ServiceManager(CoreSysAttributes):
|
||||
"""Handle internal services discovery."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Services handler."""
|
||||
self.coresys = coresys
|
||||
self.data = ServicesData()
|
||||
self.discovery = Discovery(coresys)
|
||||
self.services_obj = {}
|
||||
|
||||
@property
|
||||
def list_services(self):
|
||||
"""Return a list of services."""
|
||||
return list(self.services_obj.values())
|
||||
|
||||
def get(self, slug):
|
||||
"""Return service object from slug."""
|
||||
return self.services_obj.get(slug)
|
||||
|
||||
async def load(self):
|
||||
"""Load available services."""
|
||||
for slug, service in AVAILABLE_SERVICES.items():
|
||||
self.services_obj[slug] = service(self.coresys)
|
||||
|
||||
# Read exists discovery messages
|
||||
self.discovery.load()
|
||||
|
||||
def reset(self):
|
||||
"""Reset available data."""
|
||||
self.data.reset_data()
|
||||
self.discovery.load()
|
23
hassio/services/data.py
Normal file
23
hassio/services/data.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Handle service data for persistent supervisor reboot."""
|
||||
|
||||
from .validate import SCHEMA_SERVICES_FILE
|
||||
from ..const import FILE_HASSIO_SERVICES, ATTR_DISCOVERY, SERVICE_MQTT
|
||||
from ..utils.json import JsonConfig
|
||||
|
||||
|
||||
class ServicesData(JsonConfig):
|
||||
"""Class to handle services data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize services data."""
|
||||
super().__init__(FILE_HASSIO_SERVICES, SCHEMA_SERVICES_FILE)
|
||||
|
||||
@property
|
||||
def discovery(self):
|
||||
"""Return discovery data for home-assistant."""
|
||||
return self._data[ATTR_DISCOVERY]
|
||||
|
||||
@property
|
||||
def mqtt(self):
|
||||
"""Return settings for mqtt service."""
|
||||
return self._data[SERVICE_MQTT]
|
107
hassio/services/discovery.py
Normal file
107
hassio/services/discovery.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""Handle discover message for Home-Assistant."""
|
||||
import logging
|
||||
from uuid import uuid4
|
||||
|
||||
from ..const import ATTR_UUID
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
EVENT_DISCOVERY_ADD = 'hassio_discovery_add'
|
||||
EVENT_DISCOVERY_DEL = 'hassio_discovery_del'
|
||||
|
||||
|
||||
class Discovery(CoreSysAttributes):
|
||||
"""Home-Assistant Discovery handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize discovery handler."""
|
||||
self.coresys = coresys
|
||||
self.message_obj = {}
|
||||
|
||||
def load(self):
|
||||
"""Load exists discovery message into storage."""
|
||||
messages = {}
|
||||
for message in self._data:
|
||||
discovery = Message(**message)
|
||||
messages[discovery.uuid] = discovery
|
||||
|
||||
self.message_obj = messages
|
||||
|
||||
def save(self):
|
||||
"""Write discovery message into data file."""
|
||||
messages = []
|
||||
for message in self.message_obj.values():
|
||||
messages.append(message.raw())
|
||||
|
||||
self._data.clear()
|
||||
self._data.extend(messages)
|
||||
self._services.data.save_data()
|
||||
|
||||
def get(self, uuid):
|
||||
"""Return discovery message."""
|
||||
return self.message_obj.get(uuid)
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return discovery data."""
|
||||
return self._services.data.discovery
|
||||
|
||||
@property
|
||||
def list_messages(self):
|
||||
"""Return list of available discovery messages."""
|
||||
return self.message_obj.values()
|
||||
|
||||
def send(self, provider, component, platform=None, config=None):
|
||||
"""Send a discovery message to Home-Assistant."""
|
||||
message = Message(provider, component, platform, config)
|
||||
|
||||
# Allready exists?
|
||||
for exists_message in self.message_obj:
|
||||
if exists_message == message:
|
||||
_LOGGER.warning("Found douplicate discovery message from %s",
|
||||
provider)
|
||||
return exists_message
|
||||
|
||||
_LOGGER.info("Send discovery to Home-Assistant %s/%s from %s",
|
||||
component, platform, provider)
|
||||
self.message_obj[message.uuid] = message
|
||||
self.save()
|
||||
|
||||
# send event to Home-Assistant
|
||||
self._loop.create_task(self._homeassistant.send_event(
|
||||
EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid}))
|
||||
|
||||
return message
|
||||
|
||||
def remove(self, message):
|
||||
"""Remove a discovery message from Home-Assistant."""
|
||||
self.message_obj.pop(message.uuid, None)
|
||||
self.save()
|
||||
|
||||
# send event to Home-Assistant
|
||||
self._loop.create_task(self._homeassistant.send_event(
|
||||
EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid}))
|
||||
|
||||
|
||||
class Message(object):
|
||||
"""Represent a single Discovery message."""
|
||||
|
||||
def __init__(self, provider, component, platform, config, uuid=None):
|
||||
"""Initialize discovery message."""
|
||||
self.provider = provider
|
||||
self.component = component
|
||||
self.platform = platform
|
||||
self.config = config
|
||||
self.uuid = uuid or uuid4().hex
|
||||
|
||||
def raw(self):
|
||||
"""Return raw discovery message."""
|
||||
return self.__dict__
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compare with other message."""
|
||||
for attribute in ('provider', 'component', 'platform', 'config'):
|
||||
if getattr(self, attribute) != getattr(other, attribute):
|
||||
return False
|
||||
return True
|
54
hassio/services/interface.py
Normal file
54
hassio/services/interface.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Interface for single service."""
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class ServiceInterface(CoreSysAttributes):
|
||||
"""Interface class for service integration."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize service interface."""
|
||||
self.coresys = coresys
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug of this service."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return data of this service."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Return data schema of this service."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
"""Return name of service provider."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def enabled(self):
|
||||
"""Return True if the service is in use."""
|
||||
return bool(self._data)
|
||||
|
||||
def save(self):
|
||||
"""Save changes."""
|
||||
self._services.data.save_data()
|
||||
|
||||
def get_service_data(self):
|
||||
"""Return the requested service data."""
|
||||
if self.enabled:
|
||||
return self._data
|
||||
return None
|
||||
|
||||
def set_service_data(self, provider, data):
|
||||
"""Write the data into service object."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def del_service_data(self, provider):
|
||||
"""Remove the data from service object."""
|
||||
raise NotImplementedError()
|
89
hassio/services/mqtt.py
Normal file
89
hassio/services/mqtt.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Provide MQTT Service."""
|
||||
import logging
|
||||
|
||||
from .interface import ServiceInterface
|
||||
from .validate import SCHEMA_SERVICE_MQTT
|
||||
from ..const import (
|
||||
ATTR_PROVIDER, SERVICE_MQTT, ATTR_HOST, ATTR_PORT, ATTR_USERNAME,
|
||||
ATTR_PASSWORD, ATTR_PROTOCOL, ATTR_DISCOVERY_ID)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MQTTService(ServiceInterface):
|
||||
"""Provide mqtt services."""
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug of this service."""
|
||||
return SERVICE_MQTT
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return data of this service."""
|
||||
return self._services.data.mqtt
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Return data schema of this service."""
|
||||
return SCHEMA_SERVICE_MQTT
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
"""Return name of service provider."""
|
||||
return self._data.get(ATTR_PROVIDER)
|
||||
|
||||
@property
|
||||
def hass_config(self):
|
||||
"""Return Home-Assistant mqtt config."""
|
||||
if not self.enabled:
|
||||
return None
|
||||
|
||||
hass_config = {
|
||||
'host': self._data[ATTR_HOST],
|
||||
'port': self._data[ATTR_PORT],
|
||||
'protocol': self._data[ATTR_PROTOCOL]
|
||||
}
|
||||
if ATTR_USERNAME in self._data:
|
||||
hass_config['user']: self._data[ATTR_USERNAME]
|
||||
if ATTR_PASSWORD in self._data:
|
||||
hass_config['password']: self._data[ATTR_PASSWORD]
|
||||
|
||||
return hass_config
|
||||
|
||||
def set_service_data(self, provider, data):
|
||||
"""Write the data into service object."""
|
||||
if self.enabled:
|
||||
_LOGGER.error("It is already a mqtt in use from %s", self.provider)
|
||||
return False
|
||||
|
||||
self._data.update(data)
|
||||
self._data[ATTR_PROVIDER] = provider
|
||||
|
||||
if provider == 'homeassistant':
|
||||
_LOGGER.info("Use mqtt settings from Home-Assistant")
|
||||
self.save()
|
||||
return True
|
||||
|
||||
# discover mqtt to homeassistant
|
||||
message = self._services.discovery.send(
|
||||
provider, SERVICE_MQTT, None, self.hass_config)
|
||||
|
||||
self._data[ATTR_DISCOVERY_ID] = message.uuid
|
||||
self.save()
|
||||
return True
|
||||
|
||||
def del_service_data(self, provider):
|
||||
"""Remove the data from service object."""
|
||||
if not self.enabled:
|
||||
_LOGGER.warning("Can't remove not exists services.")
|
||||
return False
|
||||
|
||||
discovery_id = self._data.get(ATTR_DISCOVERY_ID)
|
||||
if discovery_id:
|
||||
self._services.discovery.remove(
|
||||
self._services.discovery.get(discovery_id))
|
||||
|
||||
self._data.clear()
|
||||
self.save()
|
||||
return True
|
44
hassio/services/validate.py
Normal file
44
hassio/services/validate.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Validate services schema."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
SERVICE_MQTT, ATTR_HOST, ATTR_PORT, ATTR_PASSWORD, ATTR_USERNAME, ATTR_SSL,
|
||||
ATTR_PROVIDER, ATTR_PROTOCOL, ATTR_DISCOVERY, ATTR_COMPONENT, ATTR_UUID,
|
||||
ATTR_PLATFORM, ATTR_CONFIG, ATTR_DISCOVERY_ID)
|
||||
from ..validate import NETWORK_PORT
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema([
|
||||
vol.Schema({
|
||||
vol.Required(ATTR_UUID): vol.Match(r"^[0-9a-f]{32}$"),
|
||||
vol.Required(ATTR_PROVIDER): vol.Coerce(str),
|
||||
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
||||
vol.Required(ATTR_PLATFORM): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Required(ATTR_CONFIG): vol.Any(None, dict),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
])
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SERVICE_MQTT = vol.Schema({
|
||||
vol.Required(ATTR_HOST): vol.Coerce(str),
|
||||
vol.Required(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_USERNAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PROTOCOL, default='3.1.1'):
|
||||
vol.All(vol.Coerce(str), vol.In(['3.1', '3.1.1'])),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_CONFIG_MQTT = SCHEMA_SERVICE_MQTT.extend({
|
||||
vol.Required(ATTR_PROVIDER): vol.Coerce(str),
|
||||
vol.Optional(ATTR_DISCOVERY_ID): vol.Match(r"^[0-9a-f]{32}$"),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_SERVICES_FILE = vol.Schema({
|
||||
vol.Optional(SERVICE_MQTT, default=dict): vol.Any({}, SCHEMA_CONFIG_MQTT),
|
||||
vol.Optional(ATTR_DISCOVERY, default=list): vol.Any([], SCHEMA_DISCOVERY),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
@@ -1,310 +1,346 @@
|
||||
"""Snapshot system control."""
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
|
||||
from .snapshot import Snapshot
|
||||
from .util import create_slug
|
||||
from .utils import create_slug
|
||||
from ..const import (
|
||||
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils.dt import utcnow
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SnapshotsManager(object):
|
||||
class SnapshotManager(CoreSysAttributes):
|
||||
"""Manage snapshots."""
|
||||
|
||||
def __init__(self, config, loop, sheduler, addons, homeassistant):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize a snapshot manager."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.sheduler = sheduler
|
||||
self.addons = addons
|
||||
self.homeassistant = homeassistant
|
||||
self.snapshots = {}
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
self.coresys = coresys
|
||||
self.snapshots_obj = {}
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
@property
|
||||
def list_snapshots(self):
|
||||
"""Return a list of all snapshot object."""
|
||||
return set(self.snapshots.values())
|
||||
return set(self.snapshots_obj.values())
|
||||
|
||||
def get(self, slug):
|
||||
"""Return snapshot object."""
|
||||
return self.snapshots.get(slug)
|
||||
return self.snapshots_obj.get(slug)
|
||||
|
||||
def _create_snapshot(self, name, sys_type):
|
||||
def _create_snapshot(self, name, sys_type, password):
|
||||
"""Initialize a new snapshot object from name."""
|
||||
date_str = datetime.utcnow().isoformat()
|
||||
date_str = utcnow().isoformat()
|
||||
slug = create_slug(name, date_str)
|
||||
tar_file = Path(self.config.path_backup, "{}.tar".format(slug))
|
||||
tar_file = Path(self._config.path_backup, f"{slug}.tar")
|
||||
|
||||
# init object
|
||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
||||
snapshot.create(slug, name, date_str, sys_type)
|
||||
snapshot = Snapshot(self.coresys, tar_file)
|
||||
snapshot.new(slug, name, date_str, sys_type, password)
|
||||
|
||||
# set general data
|
||||
snapshot.snapshot_homeassistant(self.homeassistant)
|
||||
snapshot.repositories = self.config.addons_repositories
|
||||
snapshot.store_homeassistant()
|
||||
snapshot.store_repositories()
|
||||
|
||||
return snapshot
|
||||
|
||||
def load(self):
|
||||
"""Load exists snapshots data.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.reload()
|
||||
|
||||
async def reload(self):
|
||||
"""Load exists backups."""
|
||||
self.snapshots = {}
|
||||
self.snapshots_obj = {}
|
||||
|
||||
async def _load_snapshot(tar_file):
|
||||
"""Internal function to load snapshot."""
|
||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
||||
snapshot = Snapshot(self.coresys, tar_file)
|
||||
if await snapshot.load():
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
self.snapshots_obj[snapshot.slug] = snapshot
|
||||
|
||||
tasks = [_load_snapshot(tar_file) for tar_file in
|
||||
self.config.path_backup.glob("*.tar")]
|
||||
self._config.path_backup.glob("*.tar")]
|
||||
|
||||
_LOGGER.info("Found %d snapshot files", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
def remove(self, snapshot):
|
||||
"""Remove a snapshot."""
|
||||
try:
|
||||
snapshot.tar_file.unlink()
|
||||
self.snapshots.pop(snapshot.slug, None)
|
||||
snapshot.tarfile.unlink()
|
||||
self.snapshots_obj.pop(snapshot.slug, None)
|
||||
_LOGGER.info("Removed snapshot file %s", snapshot.slug)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't remove snapshot %s -> %s", snapshot.slug, err)
|
||||
_LOGGER.error("Can't remove snapshot %s: %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def do_snapshot_full(self, name=""):
|
||||
"""Create a full snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
async def import_snapshot(self, tar_file):
|
||||
"""Check snapshot tarfile and import it."""
|
||||
snapshot = Snapshot(self.coresys, tar_file)
|
||||
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
|
||||
# Read meta data
|
||||
if not await snapshot.load():
|
||||
return None
|
||||
|
||||
# Allready exists?
|
||||
if snapshot.slug in self.snapshots_obj:
|
||||
_LOGGER.error("Snapshot %s allready exists!", snapshot.slug)
|
||||
return None
|
||||
|
||||
# Move snapshot to backup
|
||||
tar_origin = Path(self._config.path_backup, f"{snapshot.slug}.tar")
|
||||
try:
|
||||
snapshot.tarfile.rename(tar_origin)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't move snapshot file to storage: %s", err)
|
||||
return None
|
||||
|
||||
# Load new snapshot
|
||||
snapshot = Snapshot(self.coresys, tar_origin)
|
||||
if not await snapshot.load():
|
||||
return None
|
||||
_LOGGER.info("Success import %s", snapshot.slug)
|
||||
|
||||
self.snapshots_obj[snapshot.slug] = snapshot
|
||||
return snapshot
|
||||
|
||||
async def do_snapshot_full(self, name="", password=None):
|
||||
"""Create a full snapshot."""
|
||||
if self.lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return None
|
||||
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
|
||||
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
self._scheduler.suspend = True
|
||||
await self.lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# snapshot addons
|
||||
tasks = []
|
||||
for addon in self.addons.list_addons:
|
||||
if not addon.is_installed:
|
||||
continue
|
||||
tasks.append(snapshot.import_addon(addon))
|
||||
# Snapshot add-ons
|
||||
_LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
|
||||
await snapshot.store_addons()
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Full-Snapshot %s run %d addons",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
|
||||
# Snapshot folders
|
||||
_LOGGER.info("Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders()
|
||||
|
||||
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
return True
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Snapshot %s error", snapshot.slug)
|
||||
return None
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Full-Snapshot %s error -> %s", snapshot.slug, err)
|
||||
return False
|
||||
else:
|
||||
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots_obj[snapshot.slug] = snapshot
|
||||
return snapshot
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
self._scheduler.suspend = False
|
||||
self.lock.release()
|
||||
|
||||
async def do_snapshot_partial(self, name="", addons=None, folders=None):
|
||||
async def do_snapshot_partial(self, name="", addons=None, folders=None,
|
||||
password=None):
|
||||
"""Create a partial snapshot."""
|
||||
if self._lock.locked():
|
||||
if self.lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
return None
|
||||
|
||||
addons = addons or []
|
||||
folders = folders or []
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL)
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL, password)
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
self._scheduler.suspend = True
|
||||
await self.lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# snapshot addons
|
||||
tasks = []
|
||||
for slug in addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon.is_installed:
|
||||
tasks.append(snapshot.import_addon(addon))
|
||||
# Snapshot add-ons
|
||||
addon_list = []
|
||||
for addon_slug in addons:
|
||||
addon = self._addons.get(addon_slug)
|
||||
if addon and addon.is_installed:
|
||||
addon_list.append(addon)
|
||||
continue
|
||||
_LOGGER.warning("Add-on %s not found", addon_slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Partial-Snapshot %s run %d addons",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
_LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
|
||||
await snapshot.store_addons(addon_list)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Partial-Snapshot %s store folders %s",
|
||||
snapshot.slug, folders)
|
||||
_LOGGER.info("Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders(folders)
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
return True
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Snapshot %s error", snapshot.slug)
|
||||
return None
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Partial-Snapshot %s error -> %s", snapshot.slug, err)
|
||||
return False
|
||||
else:
|
||||
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots_obj[snapshot.slug] = snapshot
|
||||
return snapshot
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
self._scheduler.suspend = False
|
||||
self.lock.release()
|
||||
|
||||
async def do_restore_full(self, snapshot):
|
||||
async def do_restore_full(self, snapshot, password=None):
|
||||
"""Restore a snapshot."""
|
||||
if self._lock.locked():
|
||||
if self.lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
if snapshot.sys_type != SNAPSHOT_FULL:
|
||||
_LOGGER.error(
|
||||
"Full-Restore %s is only a partial snapshot!", snapshot.slug)
|
||||
_LOGGER.error("Restore %s is only a partial snapshot!",
|
||||
snapshot.slug)
|
||||
return False
|
||||
|
||||
if snapshot.protected and not snapshot.set_password(password):
|
||||
_LOGGER.error("Invalid password for snapshot %s", snapshot.slug)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
self._scheduler.suspend = True
|
||||
await self.lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# stop system
|
||||
tasks = []
|
||||
tasks.append(self.homeassistant.stop())
|
||||
|
||||
for addon in self.addons.list_addons:
|
||||
# Stop Home-Assistant / Add-ons
|
||||
tasks.append(self._homeassistant.stop())
|
||||
for addon in self._addons.list_addons:
|
||||
if addon.is_installed:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
if tasks:
|
||||
_LOGGER.info("Restore %s stop tasks", snapshot.slug)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
# restore folders
|
||||
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
|
||||
# Restore folders
|
||||
_LOGGER.info("Restore %s run folders", snapshot.slug)
|
||||
await snapshot.restore_folders()
|
||||
|
||||
# start homeassistant restore
|
||||
snapshot.restore_homeassistant(self.homeassistant)
|
||||
task_hass = self.loop.create_task(
|
||||
self.homeassistant.update(snapshot.homeassistant_version))
|
||||
# Start homeassistant restore
|
||||
_LOGGER.info("Restore %s run Home-Assistant", snapshot.slug)
|
||||
snapshot.restore_homeassistant()
|
||||
task_hass = self._loop.create_task(
|
||||
self._homeassistant.update(snapshot.homeassistant_version))
|
||||
|
||||
# restore repositories
|
||||
await self.addons.load_repositories(snapshot.repositories)
|
||||
# Restore repositories
|
||||
_LOGGER.info("Restore %s run Repositories", snapshot.slug)
|
||||
await snapshot.restore_repositories()
|
||||
|
||||
# restore addons
|
||||
tasks = []
|
||||
actual_addons = \
|
||||
set(addon.slug for addon in self.addons.list_addons
|
||||
if addon.is_installed)
|
||||
restore_addons = \
|
||||
set(data[ATTR_SLUG] for data in snapshot.addons)
|
||||
remove_addons = actual_addons - restore_addons
|
||||
|
||||
_LOGGER.info("Full-Restore %s restore addons %s, remove %s",
|
||||
snapshot.slug, restore_addons, remove_addons)
|
||||
|
||||
for slug in remove_addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
# Delete delta add-ons
|
||||
tasks.clear()
|
||||
for addon in self._addons.list_installed:
|
||||
if addon.slug not in snapshot.addon_list:
|
||||
tasks.append(addon.uninstall())
|
||||
else:
|
||||
_LOGGER.warning("Can't remove addon %s", slug)
|
||||
|
||||
for slug in restore_addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(snapshot.export_addon(addon))
|
||||
else:
|
||||
_LOGGER.warning("Can't restore addon %s", slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Full-Restore %s restore addons tasks %d",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
_LOGGER.info("Restore %s remove add-ons", snapshot.slug)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
# Restore add-ons
|
||||
_LOGGER.info("Restore %s old add-ons", snapshot.slug)
|
||||
await snapshot.restore_addons()
|
||||
|
||||
# finish homeassistant task
|
||||
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
|
||||
_LOGGER.info("Restore %s wait until homeassistant ready",
|
||||
snapshot.slug)
|
||||
await task_hass
|
||||
await self.homeassistant.run()
|
||||
await self._homeassistant.start()
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||
return False
|
||||
|
||||
else:
|
||||
_LOGGER.info("Full-Restore %s done", snapshot.slug)
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Full-Restore %s error -> %s", slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
self._scheduler.suspend = False
|
||||
self.lock.release()
|
||||
|
||||
async def do_restore_partial(self, snapshot, homeassistant=False,
|
||||
addons=None, folders=None):
|
||||
addons=None, folders=None, password=None):
|
||||
"""Restore a snapshot."""
|
||||
if self._lock.locked():
|
||||
if self.lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
if snapshot.protected and not snapshot.set_password(password):
|
||||
_LOGGER.error("Invalid password for snapshot %s", snapshot.slug)
|
||||
return False
|
||||
|
||||
addons = addons or []
|
||||
folders = folders or []
|
||||
|
||||
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
self._scheduler.suspend = True
|
||||
await self.lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
tasks = []
|
||||
|
||||
if FOLDER_HOMEASSISTANT in folders:
|
||||
await self.homeassistant.stop()
|
||||
# Stop Home-Assistant if they will be restored later
|
||||
if homeassistant and FOLDER_HOMEASSISTANT in folders:
|
||||
await self._homeassistant.stop()
|
||||
|
||||
# Process folders
|
||||
if folders:
|
||||
_LOGGER.info("Partial-Restore %s restore folders %s",
|
||||
snapshot.slug, folders)
|
||||
_LOGGER.info("Restore %s run folders", snapshot.slug)
|
||||
await snapshot.restore_folders(folders)
|
||||
|
||||
# Process Home-Assistant
|
||||
task_hass = None
|
||||
if homeassistant:
|
||||
snapshot.restore_homeassistant(self.homeassistant)
|
||||
tasks.append(self.homeassistant.update(
|
||||
snapshot.homeassistant_version))
|
||||
_LOGGER.info("Restore %s run Home-Assistant",
|
||||
snapshot.slug)
|
||||
snapshot.restore_homeassistant()
|
||||
task_hass = self._loop.create_task(
|
||||
self._homeassistant.update(
|
||||
snapshot.homeassistant_version))
|
||||
|
||||
# Process Add-ons
|
||||
addon_list = []
|
||||
for slug in addons:
|
||||
addon = self.addons.get(slug)
|
||||
addon = self._addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(snapshot.export_addon(addon))
|
||||
else:
|
||||
_LOGGER.warning("Can't restore addon %s", slug)
|
||||
addon_list.append(addon)
|
||||
continue
|
||||
_LOGGER.warning("Can't restore addon %s", snapshot.slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Partial-Restore %s run %d tasks",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
if addon_list:
|
||||
_LOGGER.info("Restore %s old add-ons", snapshot.slug)
|
||||
await snapshot.restore_addons(addon_list)
|
||||
|
||||
# make sure homeassistant run agen
|
||||
await self.homeassistant.run()
|
||||
if task_hass:
|
||||
_LOGGER.info("Restore %s wait for Home-Assistant",
|
||||
snapshot.slug)
|
||||
await task_hass
|
||||
await self._homeassistant.start()
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||
return False
|
||||
|
||||
else:
|
||||
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Partial-Restore %s error -> %s", slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
self._scheduler.suspend = False
|
||||
self.lock.release()
|
||||
|
@@ -1,35 +1,44 @@
|
||||
"""Represent a snapshot file."""
|
||||
import asyncio
|
||||
from base64 import b64decode, b64encode
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util import Padding
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
||||
from .util import remove_folder
|
||||
from .utils import (
|
||||
remove_folder, password_to_key, password_for_validating, key_to_iv)
|
||||
from ..const import (
|
||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
|
||||
ATTR_IMAGE)
|
||||
from ..tools import write_json_file
|
||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE,
|
||||
ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT, ATTR_CRYPTO,
|
||||
ATTR_LAST_VERSION, ATTR_PROTECTED, ATTR_WAIT_BOOT, ATTR_SIZE,
|
||||
CRYPTO_AES128)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils.json import write_json_file
|
||||
from ..utils.tar import SecureTarFile
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Snapshot(object):
|
||||
class Snapshot(CoreSysAttributes):
|
||||
"""A signle hassio snapshot."""
|
||||
|
||||
def __init__(self, config, loop, tar_file):
|
||||
def __init__(self, coresys, tar_file):
|
||||
"""Initialize a snapshot."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.tar_file = tar_file
|
||||
self.coresys = coresys
|
||||
self._tarfile = tar_file
|
||||
self._data = {}
|
||||
self._tmp = None
|
||||
self._key = None
|
||||
self._aes = None
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
@@ -51,11 +60,21 @@ class Snapshot(object):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_DATE]
|
||||
|
||||
@property
|
||||
def protected(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data.get(ATTR_PROTECTED) is not None
|
||||
|
||||
@property
|
||||
def addons(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_ADDONS]
|
||||
|
||||
@property
|
||||
def addon_list(self):
|
||||
"""Return a list of addons slugs."""
|
||||
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
|
||||
|
||||
@property
|
||||
def folders(self):
|
||||
"""Return list of saved folders."""
|
||||
@@ -76,39 +95,29 @@ class Snapshot(object):
|
||||
"""Return snapshot homeassistant version."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)
|
||||
|
||||
@homeassistant_version.setter
|
||||
def homeassistant_version(self, value):
|
||||
"""Set snapshot homeassistant version."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
|
||||
|
||||
@property
|
||||
def homeassistant_devices(self):
|
||||
"""Return snapshot homeassistant devices."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_DEVICES)
|
||||
|
||||
@homeassistant_devices.setter
|
||||
def homeassistant_devices(self, value):
|
||||
"""Set snapshot homeassistant devices."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_DEVICES] = value
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return snapshot homeassistant custom image."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_IMAGE)
|
||||
|
||||
@homeassistant_image.setter
|
||||
def homeassistant_image(self, value):
|
||||
"""Set snapshot homeassistant custom image."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
|
||||
def homeassistant(self):
|
||||
"""Return snapshot homeassistant data."""
|
||||
return self._data[ATTR_HOMEASSISTANT]
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
"""Return snapshot size."""
|
||||
if not self.tar_file.is_file():
|
||||
if not self.tarfile.is_file():
|
||||
return 0
|
||||
return self.tar_file.stat().st_size / 1048576 # calc mbyte
|
||||
return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte
|
||||
|
||||
def create(self, slug, name, date, sys_type):
|
||||
@property
|
||||
def is_new(self):
|
||||
"""Return True if there is new."""
|
||||
return not self.tarfile.exists()
|
||||
|
||||
@property
|
||||
def tarfile(self):
|
||||
"""Return path to Snapshot tarfile."""
|
||||
return self._tarfile
|
||||
|
||||
def new(self, slug, name, date, sys_type, password=None):
|
||||
"""Initialize a new snapshot."""
|
||||
# init metadata
|
||||
self._data[ATTR_SLUG] = slug
|
||||
@@ -116,62 +125,78 @@ class Snapshot(object):
|
||||
self._data[ATTR_DATE] = date
|
||||
self._data[ATTR_TYPE] = sys_type
|
||||
|
||||
# init other constructs
|
||||
self._data[ATTR_HOMEASSISTANT] = {}
|
||||
self._data[ATTR_ADDONS] = []
|
||||
self._data[ATTR_REPOSITORIES] = []
|
||||
self._data[ATTR_FOLDERS] = []
|
||||
# Add defaults
|
||||
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||
|
||||
def snapshot_homeassistant(self, homeassistant):
|
||||
"""Read all data from homeassistant object."""
|
||||
self.homeassistant_version = homeassistant.version
|
||||
self.homeassistant_devices = homeassistant.devices
|
||||
# Set password
|
||||
if password:
|
||||
self._key = password_to_key(password)
|
||||
self._aes = AES.new(
|
||||
self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
||||
self._data[ATTR_PROTECTED] = password_for_validating(password)
|
||||
self._data[ATTR_CRYPTO] = CRYPTO_AES128
|
||||
|
||||
# custom image
|
||||
if homeassistant.is_custom_image:
|
||||
self.homeassistant_image = homeassistant.image
|
||||
def set_password(self, password):
|
||||
"""Set the password for a exists snapshot."""
|
||||
if not password:
|
||||
return False
|
||||
|
||||
def restore_homeassistant(self, homeassistant):
|
||||
"""Write all data to homeassistant object."""
|
||||
homeassistant.devices = self.homeassistant_devices
|
||||
validating = password_for_validating(password)
|
||||
if validating != self._data[ATTR_PROTECTED]:
|
||||
return False
|
||||
|
||||
# custom image
|
||||
if self.homeassistant_image:
|
||||
homeassistant.set_custom(
|
||||
self.homeassistant_image, self.homeassistant_version)
|
||||
self._key = password_to_key(password)
|
||||
self._aes = AES.new(self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
||||
return True
|
||||
|
||||
def _encrypt_data(self, data):
|
||||
"""Make data secure."""
|
||||
if not self._key:
|
||||
return data
|
||||
|
||||
return b64encode(
|
||||
self._aes.encrypt(Padding.pad(data.encode(), 16))).decode()
|
||||
|
||||
def _decrypt_data(self, data):
|
||||
"""Make data readable."""
|
||||
if not self._key:
|
||||
return data
|
||||
|
||||
return Padding.unpad(
|
||||
self._aes.decrypt(b64decode(data)), 16).decode()
|
||||
|
||||
async def load(self):
|
||||
"""Read snapshot.json from tar file."""
|
||||
if not self.tar_file.is_file():
|
||||
_LOGGER.error("No tarfile %s", self.tar_file)
|
||||
if not self.tarfile.is_file():
|
||||
_LOGGER.error("No tarfile %s", self.tarfile)
|
||||
return False
|
||||
|
||||
def _load_file():
|
||||
"""Read snapshot.json."""
|
||||
with tarfile.open(self.tar_file, "r:") as snapshot:
|
||||
with tarfile.open(self.tarfile, "r:") as snapshot:
|
||||
json_file = snapshot.extractfile("./snapshot.json")
|
||||
return json_file.read()
|
||||
|
||||
# read snapshot.json
|
||||
try:
|
||||
raw = await self.loop.run_in_executor(None, _load_file)
|
||||
raw = await self._loop.run_in_executor(None, _load_file)
|
||||
except (tarfile.TarError, KeyError) as err:
|
||||
_LOGGER.error(
|
||||
"Can't read snapshot tarfile %s -> %s", self.tar_file, err)
|
||||
"Can't read snapshot tarfile %s: %s", self.tarfile, err)
|
||||
return False
|
||||
|
||||
# parse data
|
||||
try:
|
||||
raw_dict = json.loads(raw)
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.error("Can't read data for %s -> %s", self.tar_file, err)
|
||||
_LOGGER.error("Can't read data for %s: %s", self.tarfile, err)
|
||||
return False
|
||||
|
||||
# validate
|
||||
try:
|
||||
self._data = SCHEMA_SNAPSHOT(raw_dict)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate data for %s -> %s", self.tar_file,
|
||||
_LOGGER.error("Can't validate data for %s: %s", self.tarfile,
|
||||
humanize_error(raw_dict, err))
|
||||
return False
|
||||
|
||||
@@ -179,122 +204,220 @@ class Snapshot(object):
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context to open a snapshot."""
|
||||
self._tmp = TemporaryDirectory(dir=str(self.config.path_tmp))
|
||||
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))
|
||||
|
||||
# create a snapshot
|
||||
if not self.tar_file.is_file():
|
||||
if not self.tarfile.is_file():
|
||||
return self
|
||||
|
||||
# extract a exists snapshot
|
||||
def _extract_snapshot():
|
||||
"""Extract a snapshot."""
|
||||
with tarfile.open(self.tar_file, "r:") as tar:
|
||||
with tarfile.open(self.tarfile, "r:") as tar:
|
||||
tar.extractall(path=self._tmp.name)
|
||||
|
||||
await self.loop.run_in_executor(None, _extract_snapshot)
|
||||
await self._loop.run_in_executor(None, _extract_snapshot)
|
||||
|
||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||
"""Async context to close a snapshot."""
|
||||
# exists snapshot or exception on build
|
||||
if self.tar_file.is_file() or exception_type is not None:
|
||||
return self._tmp.cleanup()
|
||||
if self.tarfile.is_file() or exception_type is not None:
|
||||
self._tmp.cleanup()
|
||||
return
|
||||
|
||||
# validate data
|
||||
try:
|
||||
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Invalid data for %s -> %s", self.tar_file,
|
||||
_LOGGER.error("Invalid data for %s: %s", self.tarfile,
|
||||
humanize_error(self._data, err))
|
||||
raise ValueError("Invalid config") from None
|
||||
|
||||
# new snapshot, build it
|
||||
def _create_snapshot():
|
||||
"""Create a new snapshot."""
|
||||
with tarfile.open(self.tar_file, "w:") as tar:
|
||||
with tarfile.open(self.tarfile, "w:") as tar:
|
||||
tar.add(self._tmp.name, arcname=".")
|
||||
|
||||
if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data):
|
||||
await self.loop.run_in_executor(None, _create_snapshot)
|
||||
else:
|
||||
_LOGGER.error("Can't write snapshot.json")
|
||||
try:
|
||||
write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
|
||||
await self._loop.run_in_executor(None, _create_snapshot)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't write snapshot: %s", err)
|
||||
finally:
|
||||
self._tmp.cleanup()
|
||||
|
||||
self._tmp.cleanup()
|
||||
self._tmp = None
|
||||
async def store_addons(self, addon_list=None):
|
||||
"""Add a list of add-ons into snapshot."""
|
||||
addon_list = addon_list or self._addons.list_installed
|
||||
|
||||
async def import_addon(self, addon):
|
||||
"""Add a addon into snapshot."""
|
||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
||||
async def _addon_save(addon):
|
||||
"""Task to store a add-on into snapshot."""
|
||||
addon_file = SecureTarFile(
|
||||
Path(self._tmp.name, f"{addon.slug}.tar.gz"),
|
||||
'w', key=self._key)
|
||||
|
||||
if not await addon.snapshot(snapshot_file):
|
||||
_LOGGER.error("Can't make snapshot from %s", addon.slug)
|
||||
return False
|
||||
# Take snapshot
|
||||
if not await addon.snapshot(addon_file):
|
||||
_LOGGER.error("Can't make snapshot from %s", addon.slug)
|
||||
return
|
||||
|
||||
# store to config
|
||||
self._data[ATTR_ADDONS].append({
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
})
|
||||
# Store to config
|
||||
self._data[ATTR_ADDONS].append({
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_SIZE: addon_file.size,
|
||||
})
|
||||
|
||||
return True
|
||||
# Run tasks
|
||||
tasks = [_addon_save(addon) for addon in addon_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
async def export_addon(self, addon):
|
||||
"""Restore a addon from snapshot."""
|
||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
||||
async def restore_addons(self, addon_list=None):
|
||||
"""Restore a list add-on from snapshot."""
|
||||
if not addon_list:
|
||||
addon_list = []
|
||||
for addon_slug in self.addon_list:
|
||||
addon = self._addons.get(addon_slug)
|
||||
if addon:
|
||||
addon_list.append(addon)
|
||||
|
||||
if not await addon.restore(snapshot_file):
|
||||
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
|
||||
return False
|
||||
async def _addon_restore(addon):
|
||||
"""Task to restore a add-on into snapshot."""
|
||||
addon_file = SecureTarFile(
|
||||
Path(self._tmp.name, f"{addon.slug}.tar.gz"),
|
||||
'r', key=self._key)
|
||||
|
||||
return True
|
||||
# If exists inside snapshot
|
||||
if not addon_file.path.exists():
|
||||
_LOGGER.error("Can't find snapshot for %s", addon.slug)
|
||||
return
|
||||
|
||||
# Performe a restore
|
||||
if not await addon.restore(addon_file):
|
||||
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
|
||||
return
|
||||
|
||||
# Run tasks
|
||||
tasks = [_addon_restore(addon) for addon in addon_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
async def store_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
folder_list = folder_list or ALL_FOLDERS
|
||||
folder_list = set(folder_list or ALL_FOLDERS)
|
||||
|
||||
def _folder_save(name):
|
||||
"""Intenal function to snapshot a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||
origin_dir = Path(self.config.path_hassio, name)
|
||||
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||
origin_dir = Path(self._config.path_hassio, name)
|
||||
|
||||
# Check if exsits
|
||||
if not origin_dir.is_dir():
|
||||
_LOGGER.warning("Can't find snapshot folder %s", name)
|
||||
return
|
||||
|
||||
# Take snapshot
|
||||
try:
|
||||
with tarfile.open(snapshot_tar, "w:gz",
|
||||
compresslevel=1) as tar_file:
|
||||
_LOGGER.info("Snapshot folder %s", name)
|
||||
with SecureTarFile(tar_name, 'w', key=self._key) as tar_file:
|
||||
tar_file.add(origin_dir, arcname=".")
|
||||
|
||||
_LOGGER.info("Snapshot folder %s done", name)
|
||||
self._data[ATTR_FOLDERS].append(name)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.warning("Can't snapshot folder %s -> %s", name, err)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
|
||||
|
||||
# run tasks
|
||||
tasks = [self.loop.run_in_executor(None, _folder_save, folder)
|
||||
# Run tasks
|
||||
tasks = [self._loop.run_in_executor(None, _folder_save, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
async def restore_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
folder_list = folder_list or ALL_FOLDERS
|
||||
folder_list = set(folder_list or self.folders)
|
||||
|
||||
def _folder_restore(name):
|
||||
"""Intenal function to restore a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||
origin_dir = Path(self.config.path_hassio, name)
|
||||
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||
origin_dir = Path(self._config.path_hassio, name)
|
||||
|
||||
# clean old stuff
|
||||
# Check if exists inside snapshot
|
||||
if not tar_name.exists():
|
||||
_LOGGER.warning("Can't find restore folder %s", name)
|
||||
return
|
||||
|
||||
# Clean old stuff
|
||||
if origin_dir.is_dir():
|
||||
remove_folder(origin_dir)
|
||||
|
||||
# Performe a restore
|
||||
try:
|
||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
||||
_LOGGER.info("Restore folder %s", name)
|
||||
with SecureTarFile(tar_name, 'r', key=self._key) as tar_file:
|
||||
tar_file.extractall(path=origin_dir)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
||||
_LOGGER.info("Restore folder %s done", name)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
||||
|
||||
# run tasks
|
||||
tasks = [self.loop.run_in_executor(None, _folder_restore, folder)
|
||||
# Run tasks
|
||||
tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
def store_homeassistant(self):
|
||||
"""Read all data from homeassistant object."""
|
||||
self.homeassistant[ATTR_VERSION] = self._homeassistant.version
|
||||
self.homeassistant[ATTR_WATCHDOG] = self._homeassistant.watchdog
|
||||
self.homeassistant[ATTR_BOOT] = self._homeassistant.boot
|
||||
self.homeassistant[ATTR_WAIT_BOOT] = self._homeassistant.wait_boot
|
||||
|
||||
# Custom image
|
||||
if self._homeassistant.is_custom_image:
|
||||
self.homeassistant[ATTR_IMAGE] = self._homeassistant.image
|
||||
self.homeassistant[ATTR_LAST_VERSION] = \
|
||||
self._homeassistant.last_version
|
||||
|
||||
# API/Proxy
|
||||
self.homeassistant[ATTR_PORT] = self._homeassistant.api_port
|
||||
self.homeassistant[ATTR_SSL] = self._homeassistant.api_ssl
|
||||
self.homeassistant[ATTR_PASSWORD] = \
|
||||
self._encrypt_data(self._homeassistant.api_password)
|
||||
|
||||
def restore_homeassistant(self):
|
||||
"""Write all data to homeassistant object."""
|
||||
self._homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
|
||||
self._homeassistant.boot = self.homeassistant[ATTR_BOOT]
|
||||
self._homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
|
||||
|
||||
# Custom image
|
||||
if self.homeassistant.get(ATTR_IMAGE):
|
||||
self._homeassistant.image = self.homeassistant[ATTR_IMAGE]
|
||||
self._homeassistant.last_version = \
|
||||
self.homeassistant[ATTR_LAST_VERSION]
|
||||
|
||||
# API/Proxy
|
||||
self._homeassistant.api_port = self.homeassistant[ATTR_PORT]
|
||||
self._homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
|
||||
self._homeassistant.api_password = \
|
||||
self._decrypt_data(self.homeassistant[ATTR_PASSWORD])
|
||||
|
||||
# save
|
||||
self._homeassistant.save_data()
|
||||
|
||||
def store_repositories(self):
|
||||
"""Store repository list into snapshot."""
|
||||
self.repositories = self._config.addons_repositories
|
||||
|
||||
def restore_repositories(self):
|
||||
"""Restore repositories from snapshot.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._addons.load_repositories(self.repositories)
|
||||
|
@@ -1,21 +0,0 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import shutil
|
||||
|
||||
|
||||
def create_slug(name, date_str):
|
||||
"""Generate a hash from repository."""
|
||||
key = "{} - {}".format(date_str, name).lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def remove_folder(folder):
|
||||
"""Remove folder data but not the folder itself."""
|
||||
for obj in folder.iterdir():
|
||||
try:
|
||||
if obj.is_dir():
|
||||
shutil.rmtree(str(obj), ignore_errors=True)
|
||||
else:
|
||||
obj.unlink()
|
||||
except (OSError, shutil.Error):
|
||||
pass
|
49
hassio/snapshots/utils.py
Normal file
49
hassio/snapshots/utils.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import shutil
|
||||
import re
|
||||
|
||||
RE_DIGITS = re.compile(r"\d+")
|
||||
|
||||
|
||||
def password_to_key(password):
|
||||
"""Generate a AES Key from password."""
|
||||
password = password.encode()
|
||||
for _ in range(100):
|
||||
password = hashlib.sha256(password).digest()
|
||||
return password[:16]
|
||||
|
||||
|
||||
def password_for_validating(password):
|
||||
"""Generate a SHA256 hash from password."""
|
||||
for _ in range(100):
|
||||
password = hashlib.sha256(password.encode()).hexdigest()
|
||||
try:
|
||||
return str(sum(map(int, RE_DIGITS.findall(password))))[0]
|
||||
except (ValueError, IndexError):
|
||||
return "0"
|
||||
|
||||
|
||||
def key_to_iv(key):
|
||||
"""Generate a iv from Key."""
|
||||
for _ in range(100):
|
||||
key = hashlib.sha256(key).digest()
|
||||
return key[:16]
|
||||
|
||||
|
||||
def create_slug(name, date_str):
|
||||
"""Generate a hash from repository."""
|
||||
key = "{} - {}".format(date_str, name).lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def remove_folder(folder):
|
||||
"""Remove folder data but not the folder itself."""
|
||||
for obj in folder.iterdir():
|
||||
try:
|
||||
if obj.is_dir():
|
||||
shutil.rmtree(str(obj), ignore_errors=True)
|
||||
else:
|
||||
obj.unlink()
|
||||
except (OSError, shutil.Error):
|
||||
pass
|
@@ -4,29 +4,53 @@ import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
||||
ATTR_IMAGE, FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
from ..validate import HASS_DEVICES
|
||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE,
|
||||
ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, ATTR_SIZE,
|
||||
ATTR_LAST_VERSION, ATTR_WAIT_BOOT, ATTR_PROTECTED, ATTR_CRYPTO,
|
||||
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL, CRYPTO_AES128)
|
||||
from ..validate import NETWORK_PORT, REPOSITORIES, DOCKER_IMAGE
|
||||
|
||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||
|
||||
|
||||
def unique_addons(addons_list):
|
||||
"""Validate that a add-on is unique."""
|
||||
single = set([addon[ATTR_SLUG] for addon in addons_list])
|
||||
|
||||
if len(single) != len(addons_list):
|
||||
raise vol.Invalid("Invalid addon list on snapshot!")
|
||||
return addons_list
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||
vol.Required(ATTR_HOMEASSISTANT): vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
}),
|
||||
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
||||
vol.Inclusive(ATTR_PROTECTED, 'encrypted'):
|
||||
vol.All(vol.Coerce(str), vol.Length(min=1, max=1)),
|
||||
vol.Inclusive(ATTR_CRYPTO, 'encrypted'): CRYPTO_AES128,
|
||||
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=600):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
}, extra=vol.REMOVE_EXTRA),
|
||||
vol.Optional(ATTR_FOLDERS, default=list):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
vol.Optional(ATTR_ADDONS, default=list): vol.All([vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})],
|
||||
vol.Optional(ATTR_REPOSITORIES, default=[]): [vol.Url()],
|
||||
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
||||
}, extra=vol.REMOVE_EXTRA)], unique_addons),
|
||||
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
82
hassio/supervisor.py
Normal file
82
hassio/supervisor.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""HomeAssistant control object."""
|
||||
import logging
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .docker.supervisor import DockerSupervisor
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Supervisor(CoreSysAttributes):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize hass object."""
|
||||
self.coresys = coresys
|
||||
self.instance = DockerSupervisor(coresys)
|
||||
|
||||
async def load(self):
|
||||
"""Prepare HomeAssistant object."""
|
||||
if not await self.instance.attach():
|
||||
_LOGGER.fatal("Can't setup supervisor docker container!")
|
||||
await self.instance.cleanup()
|
||||
|
||||
@property
|
||||
def need_update(self):
|
||||
"""Return True if a update is available."""
|
||||
return self.version != self.last_version
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of running homeassistant."""
|
||||
return self.instance.version
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return last available version of homeassistant."""
|
||||
return self._updater.version_hassio
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of hass containter."""
|
||||
return self.instance.image
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of hass.io containter."""
|
||||
return self.instance.arch
|
||||
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
|
||||
if version == self._supervisor.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return
|
||||
|
||||
_LOGGER.info("Update supervisor to version %s", version)
|
||||
if await self.instance.install(version):
|
||||
self._loop.call_later(1, self._loop.stop)
|
||||
return True
|
||||
|
||||
_LOGGER.error("Update of hass.io fails!")
|
||||
return False
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.instance.in_progress
|
||||
|
||||
def logs(self):
|
||||
"""Get Supervisor docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
def stats(self):
|
||||
"""Return stats of Supervisor.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
130
hassio/tasks.py
130
hassio/tasks.py
@@ -1,29 +1,61 @@
|
||||
"""Multible tasks."""
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def api_sessions_cleanup(config):
|
||||
"""Create scheduler task for cleanup api sessions."""
|
||||
async def _api_sessions_cleanup():
|
||||
"""Cleanup old api sessions."""
|
||||
now = datetime.now()
|
||||
for session, until_valid in config.security_sessions.items():
|
||||
if now >= until_valid:
|
||||
config.drop_security_session(session)
|
||||
class Tasks(CoreSysAttributes):
|
||||
"""Handle Tasks inside HassIO."""
|
||||
|
||||
return _api_sessions_cleanup
|
||||
RUN_UPDATE_SUPERVISOR = 29100
|
||||
RUN_UPDATE_ADDONS = 57600
|
||||
|
||||
RUN_RELOAD_ADDONS = 21600
|
||||
RUN_RELOAD_SNAPSHOTS = 72000
|
||||
RUN_RELOAD_HOST_CONTROL = 72000
|
||||
RUN_RELOAD_UPDATER = 21600
|
||||
|
||||
def addons_update(loop, addons):
|
||||
"""Create scheduler task for auto update addons."""
|
||||
async def _addons_update():
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Tasks."""
|
||||
self.coresys = coresys
|
||||
self.jobs = set()
|
||||
self._data = {}
|
||||
|
||||
async def load(self):
|
||||
"""Add Tasks to scheduler."""
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._update_addons, self.RUN_UPDATE_ADDONS))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._update_supervisor, self.RUN_UPDATE_SUPERVISOR))
|
||||
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._addons.reload, self.RUN_RELOAD_ADDONS))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._updater.reload, self.RUN_RELOAD_UPDATER))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._host_control.load, self.RUN_RELOAD_HOST_CONTROL))
|
||||
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._watchdog_homeassistant_docker,
|
||||
self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._watchdog_homeassistant_api,
|
||||
self.RUN_WATCHDOG_HOMEASSISTANT_API))
|
||||
|
||||
_LOGGER.info("All core tasks are scheduled")
|
||||
|
||||
async def _update_addons(self):
|
||||
"""Check if a update is available of a addon and update it."""
|
||||
tasks = []
|
||||
for addon in addons.list_addons:
|
||||
for addon in self._addons.list_addons:
|
||||
if not addon.is_installed or not addon.auto_update:
|
||||
continue
|
||||
|
||||
@@ -38,37 +70,61 @@ def addons_update(loop, addons):
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
||||
await asyncio.wait(tasks, loop=loop)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
|
||||
return _addons_update
|
||||
|
||||
|
||||
def hassio_update(supervisor, updater):
|
||||
"""Create scheduler task for update of supervisor hassio."""
|
||||
async def _hassio_update():
|
||||
async def _update_supervisor(self):
|
||||
"""Check and run update of supervisor hassio."""
|
||||
await updater.fetch_data()
|
||||
if updater.version_hassio == supervisor.version:
|
||||
if not self._supervisor.need_update:
|
||||
return
|
||||
|
||||
# don't perform a update on beta/dev channel
|
||||
if updater.beta_channel:
|
||||
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
||||
if self._updater.beta_channel:
|
||||
_LOGGER.warning("Ignore Hass.io update on beta upstream!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new HassIO version %s.", updater.version_hassio)
|
||||
await supervisor.update(updater.version_hassio)
|
||||
_LOGGER.info("Found new Hass.io version")
|
||||
await self._supervisor.update()
|
||||
|
||||
return _hassio_update
|
||||
|
||||
|
||||
def homeassistant_watchdog(loop, homeassistant):
|
||||
"""Create scheduler task for montoring running state."""
|
||||
async def _homeassistant_watchdog():
|
||||
"""Check running state and start if they is close."""
|
||||
if homeassistant.in_progress or await homeassistant.is_running():
|
||||
async def _watchdog_homeassistant_docker(self):
|
||||
"""Check running state of docker and start if they is close."""
|
||||
# if Home-Assistant is active
|
||||
if not await self._homeassistant.is_initialize() or \
|
||||
not self._homeassistant.watchdog:
|
||||
return
|
||||
|
||||
loop.create_task(homeassistant.run())
|
||||
# if Home-Assistant is running
|
||||
if self._homeassistant.in_progress or \
|
||||
await self._homeassistant.is_running():
|
||||
return
|
||||
|
||||
return _homeassistant_watchdog
|
||||
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
|
||||
await self._homeassistant.start()
|
||||
|
||||
async def _watchdog_homeassistant_api(self):
|
||||
"""Create scheduler task for montoring running state of API.
|
||||
|
||||
Try 2 times to call API before we restart Home-Assistant. Maybe we had
|
||||
a delay in our system.
|
||||
"""
|
||||
retry_scan = self._data.get('HASS_WATCHDOG_API', 0)
|
||||
|
||||
# If Home-Assistant is active
|
||||
if not await self._homeassistant.is_initialize() or \
|
||||
not self._homeassistant.watchdog:
|
||||
return
|
||||
|
||||
# If Home-Assistant API is up
|
||||
if self._homeassistant.in_progress or \
|
||||
await self._homeassistant.check_api_state():
|
||||
return
|
||||
|
||||
# Look like we run into a problem
|
||||
retry_scan += 1
|
||||
if retry_scan == 1:
|
||||
self._data['HASS_WATCHDOG_API'] = retry_scan
|
||||
_LOGGER.warning("Watchdog miss API response from Home-Assistant")
|
||||
return
|
||||
|
||||
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
|
||||
await self._homeassistant.restart()
|
||||
self._data['HASS_WATCHDOG_API'] = 0
|
||||
|
141
hassio/tools.py
141
hassio/tools.py
@@ -1,141 +0,0 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
import socket
|
||||
import re
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||
|
||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||
|
||||
|
||||
def get_local_ip(loop):
|
||||
"""Retrieve local IP address.
|
||||
|
||||
Return a future.
|
||||
"""
|
||||
def local_ip():
|
||||
"""Return local ip."""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
# Use Google Public DNS server to determine own IP
|
||||
sock.connect(('8.8.8.8', 80))
|
||||
|
||||
return sock.getsockname()[0]
|
||||
except socket.error:
|
||||
return socket.gethostbyname(socket.gethostname())
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
return loop.run_in_executor(None, local_ip)
|
||||
|
||||
|
||||
def write_json_file(jsonfile, data):
|
||||
"""Write a json file."""
|
||||
try:
|
||||
json_str = json.dumps(data, indent=2)
|
||||
with jsonfile.open('w') as conf_file:
|
||||
conf_file.write(json_str)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def read_json_file(jsonfile):
|
||||
"""Read a json file and return a dict."""
|
||||
with jsonfile.open('r') as cfile:
|
||||
return json.loads(cfile.read())
|
||||
|
||||
|
||||
async def fetch_timezone(websession):
|
||||
"""Read timezone from freegeoip."""
|
||||
data = {}
|
||||
with suppress(aiohttp.ClientError, asyncio.TimeoutError,
|
||||
json.JSONDecodeError, KeyError):
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(FREEGEOIP_URL) as request:
|
||||
data = await request.json()
|
||||
|
||||
return data.get('time_zone', 'UTC')
|
||||
|
||||
|
||||
def convert_to_ascii(raw):
|
||||
"""Convert binary to ascii and remove colors."""
|
||||
return RE_STRING.sub("", raw.decode())
|
||||
|
||||
|
||||
class JsonConfig(object):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, json_file, schema):
|
||||
"""Initialize hass object."""
|
||||
self._file = json_file
|
||||
self._schema = schema
|
||||
self._data = {}
|
||||
|
||||
# init or load data
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
self._data = {}
|
||||
|
||||
# validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse %s -> %s",
|
||||
self._file, humanize_error(self._data, ex))
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
# validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse data -> %s",
|
||||
humanize_error(self._data, ex))
|
||||
return False
|
||||
|
||||
# write
|
||||
if not write_json_file(self._file, self._data):
|
||||
_LOGGER.error("Can't store config in %s", self._file)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class AsyncThrottle(object):
|
||||
"""
|
||||
Decorator that prevents a function from being called more than once every
|
||||
time period.
|
||||
"""
|
||||
def __init__(self, delta):
|
||||
"""Initialize async throttle."""
|
||||
self.throttle_period = delta
|
||||
self.time_of_last_call = datetime.min
|
||||
|
||||
def __call__(self, method):
|
||||
"""Throttle function"""
|
||||
async def wrapper(*args, **kwargs):
|
||||
"""Throttle function wrapper"""
|
||||
now = datetime.now()
|
||||
time_since_last_call = now - self.time_of_last_call
|
||||
|
||||
if time_since_last_call > self.throttle_period:
|
||||
self.time_of_last_call = now
|
||||
return await method(*args, **kwargs)
|
||||
|
||||
return wrapper
|
@@ -10,21 +10,28 @@ import async_timeout
|
||||
from .const import (
|
||||
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||
ATTR_BETA_CHANNEL)
|
||||
from .tools import AsyncThrottle, JsonConfig
|
||||
from .coresys import CoreSysAttributes
|
||||
from .utils import AsyncThrottle
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_UPDATER_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Updater(JsonConfig):
|
||||
class Updater(JsonConfig, CoreSysAttributes):
|
||||
"""Fetch last versions from version.json."""
|
||||
|
||||
def __init__(self, config, loop, websession):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.websession = websession
|
||||
self.coresys = coresys
|
||||
|
||||
def load(self):
|
||||
"""Update internal data.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.reload()
|
||||
|
||||
@property
|
||||
def version_homeassistant(self):
|
||||
@@ -52,10 +59,9 @@ class Updater(JsonConfig):
|
||||
def beta_channel(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
||||
self.save()
|
||||
|
||||
@AsyncThrottle(timedelta(seconds=60))
|
||||
async def fetch_data(self):
|
||||
async def reload(self):
|
||||
"""Fetch current versions from github.
|
||||
|
||||
Is a coroutine.
|
||||
@@ -63,16 +69,16 @@ class Updater(JsonConfig):
|
||||
url = URL_HASSIO_VERSION.format(self.upstream)
|
||||
try:
|
||||
_LOGGER.info("Fetch update data from %s", url)
|
||||
with async_timeout.timeout(10, loop=self.loop):
|
||||
async with self.websession.get(url) as request:
|
||||
with async_timeout.timeout(10, loop=self._loop):
|
||||
async with self._websession.get(url) as request:
|
||||
data = await request.json(content_type=None)
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s -> %s", url, err)
|
||||
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||
return
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s -> %s", url, err)
|
||||
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
|
||||
return
|
||||
|
||||
# data valid?
|
||||
@@ -83,4 +89,4 @@ class Updater(JsonConfig):
|
||||
# update versions
|
||||
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
||||
self._data[ATTR_HASSIO] = data.get('hassio')
|
||||
self.save()
|
||||
self.save_data()
|
||||
|
34
hassio/utils/__init__.py
Normal file
34
hassio/utils/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Tools file for HassIO."""
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||
|
||||
|
||||
def convert_to_ascii(raw):
|
||||
"""Convert binary to ascii and remove colors."""
|
||||
return RE_STRING.sub("", raw.decode())
|
||||
|
||||
|
||||
class AsyncThrottle(object):
|
||||
"""
|
||||
Decorator that prevents a function from being called more than once every
|
||||
time period.
|
||||
"""
|
||||
def __init__(self, delta):
|
||||
"""Initialize async throttle."""
|
||||
self.throttle_period = delta
|
||||
self.time_of_last_call = datetime.min
|
||||
|
||||
def __call__(self, method):
|
||||
"""Throttle function"""
|
||||
async def wrapper(*args, **kwargs):
|
||||
"""Throttle function wrapper"""
|
||||
now = datetime.now()
|
||||
time_since_last_call = now - self.time_of_last_call
|
||||
|
||||
if time_since_last_call > self.throttle_period:
|
||||
self.time_of_last_call = now
|
||||
return await method(*args, **kwargs)
|
||||
|
||||
return wrapper
|
83
hassio/utils/dt.py
Normal file
83
hassio/utils/dt.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import logging
|
||||
import re
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
import pytz
|
||||
|
||||
UTC = pytz.utc
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||
|
||||
# Copyright (c) Django Software Foundation and individual contributors.
|
||||
# All rights reserved.
|
||||
# https://github.com/django/django/blob/master/LICENSE
|
||||
DATETIME_RE = re.compile(
|
||||
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
|
||||
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
|
||||
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
|
||||
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
|
||||
)
|
||||
|
||||
|
||||
async def fetch_timezone(websession):
|
||||
"""Read timezone from freegeoip."""
|
||||
data = {}
|
||||
try:
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(FREEGEOIP_URL) as request:
|
||||
data = await request.json()
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
_LOGGER.warning("Can't fetch freegeoip data: %s", err)
|
||||
|
||||
except ValueError as err:
|
||||
_LOGGER.warning("Error on parse freegeoip data: %s", err)
|
||||
|
||||
return data.get('time_zone', 'UTC')
|
||||
|
||||
|
||||
# Copyright (c) Django Software Foundation and individual contributors.
|
||||
# All rights reserved.
|
||||
# https://github.com/django/django/blob/master/LICENSE
|
||||
def parse_datetime(dt_str):
|
||||
"""Parse a string and return a datetime.datetime.
|
||||
|
||||
This function supports time zone offsets. When the input contains one,
|
||||
the output uses a timezone with a fixed offset from UTC.
|
||||
Raises ValueError if the input is well formatted but not a valid datetime.
|
||||
Returns None if the input isn't well formatted.
|
||||
"""
|
||||
match = DATETIME_RE.match(dt_str)
|
||||
if not match:
|
||||
return None
|
||||
kws = match.groupdict() # type: Dict[str, Any]
|
||||
if kws['microsecond']:
|
||||
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
|
||||
tzinfo_str = kws.pop('tzinfo')
|
||||
|
||||
tzinfo = None # type: Optional[dt.tzinfo]
|
||||
if tzinfo_str == 'Z':
|
||||
tzinfo = UTC
|
||||
elif tzinfo_str is not None:
|
||||
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
||||
offset_hours = int(tzinfo_str[1:3])
|
||||
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
||||
if tzinfo_str[0] == '-':
|
||||
offset = -offset
|
||||
tzinfo = timezone(offset)
|
||||
else:
|
||||
tzinfo = None
|
||||
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
||||
kws['tzinfo'] = tzinfo
|
||||
return datetime(**kws)
|
||||
|
||||
|
||||
def utcnow():
|
||||
"""Returns current timestamp including timezone."""
|
||||
return datetime.now(UTC)
|
81
hassio/utils/json.py
Normal file
81
hassio/utils/json.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Tools file for HassIO."""
|
||||
import json
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def write_json_file(jsonfile, data):
|
||||
"""Write a json file."""
|
||||
json_str = json.dumps(data, indent=2)
|
||||
with jsonfile.open('w') as conf_file:
|
||||
conf_file.write(json_str)
|
||||
|
||||
|
||||
def read_json_file(jsonfile):
|
||||
"""Read a json file and return a dict."""
|
||||
with jsonfile.open('r') as cfile:
|
||||
return json.loads(cfile.read())
|
||||
|
||||
|
||||
class JsonConfig(object):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, json_file, schema):
|
||||
"""Initialize hass object."""
|
||||
self._file = json_file
|
||||
self._schema = schema
|
||||
self._data = {}
|
||||
|
||||
self.read_data()
|
||||
|
||||
def reset_data(self):
|
||||
"""Reset json file to default."""
|
||||
try:
|
||||
self._data = self._schema({})
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't reset %s: %s",
|
||||
self._file, humanize_error(self._data, ex))
|
||||
|
||||
def read_data(self):
|
||||
"""Read json file & validate."""
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
self._data = {}
|
||||
|
||||
# Validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse %s: %s",
|
||||
self._file, humanize_error(self._data, ex))
|
||||
|
||||
# Reset data to default
|
||||
_LOGGER.warning("Reset %s to default", self._file)
|
||||
self._data = self._schema({})
|
||||
|
||||
def save_data(self):
|
||||
"""Store data to config file."""
|
||||
# Validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse data: %s",
|
||||
humanize_error(self._data, ex))
|
||||
|
||||
# Load last valid data
|
||||
_LOGGER.warning("Reset %s to last version", self._file)
|
||||
self.read_data()
|
||||
return
|
||||
|
||||
# write
|
||||
try:
|
||||
write_json_file(self._file, self._data)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't store config in %s: %s", self._file, err)
|
88
hassio/utils/tar.py
Normal file
88
hassio/utils/tar.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""Tarfile fileobject handler for encrypted files."""
|
||||
import tarfile
|
||||
import hashlib
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Util.Padding import pad
|
||||
|
||||
BLOCK_SIZE = 16
|
||||
|
||||
MOD_READ = 'r'
|
||||
MOD_WRITE = 'w'
|
||||
|
||||
|
||||
class SecureTarFile(object):
|
||||
"""Handle encrypted files for tarfile library."""
|
||||
|
||||
def __init__(self, name, mode, key=None, gzip=True):
|
||||
"""Initialize encryption handler."""
|
||||
self._file = None
|
||||
self._mode = mode
|
||||
self._name = name
|
||||
|
||||
# Tarfile options
|
||||
self._tar = None
|
||||
self._tar_mode = f"{mode}|gz" if gzip else f"{mode}|"
|
||||
|
||||
# Encryption/Decription
|
||||
self._aes = None
|
||||
self._key = key
|
||||
|
||||
def __enter__(self):
|
||||
"""Start context manager tarfile."""
|
||||
if not self._key:
|
||||
self._tar = tarfile.open(name=str(self._name), mode=self._tar_mode)
|
||||
return self._tar
|
||||
|
||||
# Encrypted/Decryped Tarfile
|
||||
self._file = self._name.open(f"{self._mode}b")
|
||||
|
||||
# Extract IV for CBC
|
||||
if self._mode == MOD_READ:
|
||||
cbc_rand = self._file.read(16)
|
||||
else:
|
||||
cbc_rand = get_random_bytes(16)
|
||||
self._file.write(cbc_rand)
|
||||
self._aes = AES.new(
|
||||
self._key, AES.MODE_CBC, iv=_generate_iv(self._key, cbc_rand))
|
||||
|
||||
self._tar = tarfile.open(fileobj=self, mode=self._tar_mode)
|
||||
return self._tar
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
"""Close file."""
|
||||
if self._tar:
|
||||
self._tar.close()
|
||||
if self._file:
|
||||
self._file.close()
|
||||
|
||||
def write(self, data):
|
||||
"""Write data."""
|
||||
if len(data) % BLOCK_SIZE != 0:
|
||||
data = pad(data, BLOCK_SIZE)
|
||||
self._file.write(self._aes.encrypt(data))
|
||||
|
||||
def read(self, size=0):
|
||||
"""Read data."""
|
||||
return self._aes.decrypt(self._file.read(size))
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""Return path object of tarfile."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
"""Return snapshot size."""
|
||||
if not self._name.is_file():
|
||||
return 0
|
||||
return round(self._name.stat().st_size / 1048576, 2) # calc mbyte
|
||||
|
||||
|
||||
def _generate_iv(key, salt):
|
||||
"""Generate a iv from data."""
|
||||
temp_iv = key + salt
|
||||
for _ in range(100):
|
||||
temp_iv = hashlib.sha256(temp_iv).digest()
|
||||
return temp_iv[:16]
|
@@ -1,18 +1,23 @@
|
||||
"""Validate functions."""
|
||||
import voluptuous as vol
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD,
|
||||
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||
ATTR_API_ENDPOINT, ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUDIO_INPUT, ATTR_HOMEASSISTANT, ATTR_HASSIO)
|
||||
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||
ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT,
|
||||
ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_UUID)
|
||||
|
||||
|
||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
||||
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
||||
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
||||
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
REPOSITORIES = vol.All([vol.Url()], vol.Unique())
|
||||
|
||||
|
||||
def validate_timezone(timezone):
|
||||
@@ -28,11 +33,12 @@ def validate_timezone(timezone):
|
||||
return timezone
|
||||
|
||||
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def convert_to_docker_ports(data):
|
||||
"""Convert data into docker port list."""
|
||||
# dynamic ports
|
||||
if data is None:
|
||||
return
|
||||
return None
|
||||
|
||||
# single port
|
||||
if isinstance(data, int):
|
||||
@@ -55,11 +61,20 @@ DOCKER_PORTS = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
||||
vol.Match(r"^[0-9a-f]{32}$"),
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||
})
|
||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=600):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
@@ -67,19 +82,17 @@ SCHEMA_UPDATER_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||
})
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_HASSIO_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_API_ENDPOINT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_TIMEZONE, default='UTC'): validate_timezone,
|
||||
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
||||
vol.Optional(ATTR_SECURITY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TOTP): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SESSIONS, default={}):
|
||||
vol.Schema({vol.Coerce(str): vol.Coerce(str)}),
|
||||
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[
|
||||
"https://github.com/hassio-addons/repository",
|
||||
]): REPOSITORIES,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
Submodule home-assistant-polymer updated: 1aa387a72f...1a18ee2755
BIN
misc/hassio.png
BIN
misc/hassio.png
Binary file not shown.
Before Width: | Height: | Size: 42 KiB After Width: | Height: | Size: 37 KiB |
@@ -1 +1 @@
|
||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36" version="6.5.6" editor="www.draw.io" type="device"><diagram name="Page-1">5Vptc6M2EP41/ng3gHj9mPiSy820c5n6Q3sfsVBsNTJyhYid/voKkABZkOBY+KYtmYnR6pVn99ld1l6A5e74laX77a80Q2ThOdlxAb4sPC8OY/G/Erw2At9xG8GG4awR9QQr/DeSQkdKS5yhQhvIKSUc73UhpHmOINdkKWP0oA97okTfdZ9ukCFYwZSY0t9xxrdS6oZJ1/GA8GYrt469sOlYp/B5w2iZy/0WHniqr6Z7l6q15IMW2zSjh54I3C3AklHKm7vdcYlIBa2CrZl3P9LbnpuhnE+Z4DUTXlJSInXikIipt09UrCAOyF8lKOFfJVUdn4paZTdigNjtKD5ERw206DtIYKrenLJdSrrJ4m5TfX5fqX3E2Zqtmg4JS7urd9hijlb7FFbtg7A2MWjLd0S03Oo0mJAlJZTVowXYKIRQyAvO6DPq9Tj1Jc+/kutLvF4Q4+g4CqHbKkbYO6I7xNmrGKImJKCZIm09SKRuD53l+Arobc9oQjkulca6aZfuFCZupM6G9QcM/X3LcaW31WvB0e5CNGGG1vF6CE0QggRkrb7sAhhNBNCzAKBvAPiFwmfELkUOokCQ/trI+SZy3hBywAJyoYHcw9JArXaFqJpRUe9MLscQDXN5HQd+4NjB0A8DHcPQxDBwTAgDCxAmBl4oE3FINinjW7qheUruOumtjmgPPXTE/I9K/DkKZPOH6srFwZq+QDV/yBX+RJy/ygiclpwKUbfxL5Tu5RrNUavzvQ20eBxaMihHRTJ4p2yDeM9uTHUwRFKOX/TVLwFX5RK20fXeQDcB3im+deMRMSweALGfBbp/JdCj0Xxi3UX48xIMN6wSjNMEYlXuEXvBhXAJagOm+h7Sovj2fTTBaMXr0aSjMwP3fbdluKflMgybVEN3aFmA4sy347ZAoLstMJB1uPGA33JtRE3Xm4Nbbo9Yyou13NJ4VbuxeUnkqveOHouiK7EIzOO6NHh1dE/iQtc89VyFwIPfVK9YQgCJYBqGSnyPidpzqm5QnpmLCWFvqcFMfrm0qlgvvlZQUm8cvaxJrPLpRjy6wLByU9dxRSmKn6CtLFR3Rd5A/t56HS1/9224ovDKXHE/O3qQ/+zG8aWBfiKtPmjxwLR4d0Sn1i3enyVUSJ30srCJCPYcTk5zpHmb8xQ2Vl+AJXtp+WpPYdeKPa5ZUrjJMpoXhhqLbbqvbveMQlQU73sn3ZVN9lX34qr9fZMTCt07XhiBxANhEHtx7PhgpqRqyJN5bmB6ssSCI1O1nDmJ0rVOHdWlqYAkU59uc7zoXEAAOfWR4vq9Q5WqneE0Wq3Q0FJO6hdSz1ynobKxTm0U7dNMs5PYJCjk1KxYKX6WO9IMALcVOzAUyKdrRB5pgTmmuRiyppzTnRhAqo7btoitVVbrMna3xg3Bm2oup+fRvCvEnpZu5QYWiHxS0wEDNR0wkJBYqciaNJ5AUifSWOq/x1LX5OgUOk5Ity8PgO97LQshEng/L0SqvXsMPBwOpvcmBO+LWg2SiZDQMrs4Tl6FQInuz3xnIKeP5iovgLcLo9K4P5DEn8mRmTLEXqzt3hyaQ3qj0faDNPFNmjTmaz+S+icmc+pN7YVAMP6tjfNQrkcjIUzZ5fQL62uAfkH1Z4d+CThJJ4boN1TdsxLBopnY17f7yGaWOT9lP8i+YAb2TVZjYJDkK+bbuekxFp2QmwUomocevnppvQo94v9LcEpCnaOR5dgU/idjk/m9+G9oX71qUYbReBXl30s+Vf6dgXyi2f0WqlFG93szcPcP</diagram></mxfile>
|
||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36" version="7.9.5" editor="www.draw.io" type="device"><diagram name="Page-1" id="535f6c39-9b73-04c2-941c-82630de90f1a">5VrLcqM4FP0aLzsFiOcycefRVTPVqfFippdYKLYmMvII4cd8/QiQDEKQ4Bicnmp7Yevqybk691zJnoH55vDI4u36d5ogMnOs5DADX2eOY1vAER+F5VhZ3DCoDCuGE9moNizwv0j1lNYcJyjTGnJKCcdb3QhpmiLINVvMGN3rzV4o0WfdxitkGBYwJqb1T5zwtbTaflRXPCG8WsupQ8evKpYxfF0xmqdyvpkDXspXVb2J1VjyQbN1nNB9wwTuZ2DOKOXVt81hjkiBrYKt6vfQU3taN0MpH9JB+mkXkxypFftEdL17oWIEsUB+lKD4/+RUVXzJSpfdigZitoP4EBUl0KJuL4EpalPKNjGpO4tvq+Lz+0LNI9ZWTVVVSFhOszr7NeZosY1hUd6L7SYarfmGiJJdrAYTMqeEsrK1ABv5EAp7xhl9RY0aq3zJ9S/k+B14SdMOMY4ODZPE7xHRDeLsKJqo2ghUXeRe9yLp2329c1wF9LqxaXzZLpabdXUaunaY+CJ91u0/YPjvW4oLvy2OGUebC9GECVqGyy40gQ8ikJz8NS6AwUAAnREAdA0Av1L4itilyEHkCdJfGznXRM7pQg6MgJxvIPc0N1ArQyEqehTUO5PLIUTdXF6GnutZ42Do+p6OoW9i6FkmhN4IEEYGXigROiSLlPE1XdE0Jve19U5HtIEeOmD+V2G+CTxZ/KGqUrGwqs5TxR9yhL8R50epwHHOqTDVE/9G6VaO0Qt1RnMG5fKlyvOYrRDXtknxYG+6gyESc7zTBfgScFUuMTa6zhvoRiLxaeFbFp4Rw+IBELsS6O5ngR705hPLWuHPSzBsv0gw2gnEIt8itsOZCAlqAqbqnuIs+/a9N8E4mZe9SUe9Dez3w5YRnuZz369SDT2gJR4KE3ecsAU8PWyBjqzDDjvilj2GatrOFNyyG8RSUezELY1XZRgbSqJMMIPfFqcCYYBEbA4MlfkBE7WKQVyz1WmkQbbgs8gGpolwmhd0J7Tkoy62A9xAzIe6EKWJOZgwNobqTPjn80sc64Sfpl0qHjSSKzHKl1vx6ALDIppdJ2LFKHyBYyWresRyOtL8U3DS0nx3jIjlX5kr9o2l5wI3dhhemg8MpFWDLilNkcaVN9NmjRHAZITal9dnhDuJ4kifNZK5kRAe7tC+awqYs92Jzx922Kdpk2veTHzAgRoIvd4832d9InK52zrx/rjrrqE1pqduk4SmmeGvbB1vi69bRiHKsvd1RhelwarzIF6lcleHAMFSy/EDEDnA90InDC0XTJRFd2mSY3umJkUjSJK6vJsypNWltuRcmtTJsNck2Sgn2/FClez6THF50JQuV2ei9rlJjVDRUnZyGjfnZ45TUdkYp9wUp6cZtk9Ck6CQU/OKUvEz35CqAbgrqIChQD5eIvJMM8wxTUWTJeWcbkQDUlTcnX610K7Sy98t6jFuCV4VfTk9j+b1zXv7rl5OMAKRW5d4oOMSD3SklqNcwZs0HkBSK9BY6r7HUtvk6BA6XkXzztTxQYqofkH8KZIZtZgGA/f7vRm9CcHbrHSDZCIkNE8u1smrECjS45lrdZzOgqnuk8DbN+Fyc3/gOHYmRybK5RtaW58Bq0U6vWo7jCauSRO1WydXUre1ZdrRdDwJBP0/01lP+bJXCWHMLqefX7466OcV73HoF4FWOtFFv67r3FEULJiIfc19H4yZZU5P2WHs867BvsFu9AySPGK+npoefeqE7MRDwTT0cNWh9Sr0CH8VcYp8naPBZdrk/xraZP4R4g+0LY5alGHUf4vy/yWfusifgHyiWP/5rXJG/Q9DcP8f</diagram></mxfile>
|
Binary file not shown.
Before Width: | Height: | Size: 36 KiB |
@@ -1 +0,0 @@
|
||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0" version="6.5.8" editor="www.draw.io" type="device"><diagram name="Page-1">5Vxdd5s4EP01fmwOkgCbx9hp2j7sNrvpnnYfiVFsTjDyghwn++tXGMmAxileEB9O+9BjBhjM3GHmzjXKhCw2L58Sf7v+jQU0mmAreJmQmwnGU4eI/zPDa24gyM0NqyQMchMqDPfhv1QaLWndhQFNKwdyxiIebqvGJYtjuuQVm58kbF897JFF1atu/RUFhvulH0Hr9zDg69w6w25h/0zD1VpdGblevufBXz6tEraL5fUmmDwe/uW7N77yJW80XfsB25dM5OOELBLGeP5p87KgURZaFbb8vNs39h6/d0Jjfs4JOD/h2Y928tZvwyTlwnTP/YTLL8lfVWA4fRF+52u+iYQBiY8pT9gTXbCIJcISs1gcOX8Mo0gz+VG4isXmUnwzKuzzZ5rwUIT8Wu7YhEGQXWa+X4ec3m/9ZXbNvcivzCGL+b38Go7aztMGeWIb3rcMRXYV+lIyyTh8omxDefIqDpF7ySw/Q6asKxHaF/gjS9rWJewVkr5MudXRcRF28UFG/jQKBKDwVypipAe/FPUtC2N+uKIznzg3mYUmobhwFtoblvA1W7HYj+4KawcxQhgGyT0Vo5mBINkgSJ/9NB1hkDAiw0XJAVFaiyhdffk6wkDZ7oCBckGg2JbGh1uKs2b2drT0wvXAOGcbsYPGwXXWfDJbxJZPP4uSqK4ryiuZTYNKU4JhK4VFRSChkc/D52rbOhUW6e0uQ7pAwNOeZ1sLbMp2yZLKk8ptRPMjoNMc4aqj/HaBowNIxzs8C7cpwE2ckdLlLgm5uNPbMH5kvaLnDIYenmrPj9sQPuLUODIH3wzCNxVxFtdz/9llrGcexiEvtibkOiNwfpTS7KjpTVtsD085mQd+uqaBPE/slmRilm29hPyH+PzBurIcuf232LauCFH7S5XwxvpZpuQQVDKlyaPfMlNsy60AjK2mmYJrHJnLFA9kip8+ZfsP+WHdfe8+E856/kk/EOqsApOGECJS48gchGqcK2GYUm4Sw8vss7hpoT5GVDlyvM6wg6NhtdGyLQ9ZLAi4G2WF+kHMK+7qULK1gr4VBHTPkkAv6nrJt7b70iFGir1Kj/K4iC6vsWPPUGMHjgzmCxxiq/mS0jQVCfNGvvyvZOk1VxQdQFcWmlbowNRtRQfsMacc0XWNpikHHL2RcgIG/7V0mJxJWyYlFA306lSk5Rv5Jg94oq+mM66egDSqW31xSm16J9OmGTOrcWSwSEF5xMi43xGSA1FL0rTd6NQSODKIJNRvfmfJxodQvmPJGlfZoN2nZo2gEHMZorWDYJQ6UxkR1DsuRLXuN0xw2L8c2brXSGE4Ug+mW6vkHn6gdpqKIbpw7RDcVcc6JtpolGv11I1g3HAcQ+MGcGQQwBOKyBnaNU/E0XhROY4zvn2fGrfKqUZ1wrDK7TSWTXCNI4NJBWWTXOYejb6tiF7fU4jbVIHQpxDgyCB6UF/IZ4Xete3x9GK3aSnXxW3X7kzcPvHrfzdi5SAypVuVKV3itqros1EzhykyxByAoz6FylOvNbx7obI3XqANbNPG70nMahwZrFBQOBizUjkUSZjqM3VTkgAcGYQSihuXoZR5fQobBAobF6KU9RsmqCJcjlLWb6TguD6YUqaSe3h27plSyrzulDJS9ypB70qZeupGwHc9U0oZcGQQwPqf3dsoZflxFy6UkTZlwrBQ5pkSyoAjgzkFf7ovhLLbb1+/3XWfDGfVCnzubGyYCiPLlGAGPRmEESovZcXMCJAX2pqRZUo5Q1Z30hmpW4DRjXSWdYVDLzgcNcu64gVqaSrZRsotEDIlpkFPfapppH6VyftT03ojD/qqvebLjmZ1ngyWLSjCjFlPG4xEIFOCGvRkDky1TPHEy3+iSooiia2TPOLXeRVw5kqeVWoauKtXAW2oSY1U4LQ1noQ9G4SpuwXsGIRptAqnM2ScoPwzZolz0FBBouMvRTvwOT3WQJ2GywJZEHAzHLrgzIpB54wZ2a0Ys32iOaoHaQDGfHyd+rjQXWld7ZfMqwbaQb+E5Kc6s0mVzeDANsR6LNIy1fCJVDt3CUYXw5lWWWyvYaoRp85Tn8OZA8nbH39+WLCAts2YrtZTnVtuWg9Wem1pysXJTAPcsc8DvAmckPyNHM5z9ZbWo5UOgtvw+UWkzpNBOCFJ/ZKvzv7lJiqtPx8LV3l1lXpNp+VIJTaLv/mWo1b8XT3y8T8=</diagram></mxfile>
|
29
setup.py
29
setup.py
@@ -12,7 +12,7 @@ setup(
|
||||
url='https://home-assistant.io/',
|
||||
description=('Open-source private cloud os for Home-Assistant'
|
||||
' based on ResinOS'),
|
||||
long_description=('A maintenainless private cloud operator system that'
|
||||
long_description=('A maintainless private cloud operator system that'
|
||||
'setup a Home-Assistant instance. Based on ResinOS'),
|
||||
classifiers=[
|
||||
'Intended Audience :: End Users/Desktop',
|
||||
@@ -24,29 +24,30 @@ setup(
|
||||
'Topic :: Scientific/Engineering :: Atmospheric Science',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
],
|
||||
keywords=['docker', 'home-assistant', 'api'],
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
packages=[
|
||||
'hassio',
|
||||
'hassio.dock',
|
||||
'hassio.api',
|
||||
'hassio.docker',
|
||||
'hassio.addons',
|
||||
'hassio.api',
|
||||
'hassio.misc',
|
||||
'hassio.utils',
|
||||
'hassio.snapshots'
|
||||
],
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'async_timeout',
|
||||
'aiohttp',
|
||||
'docker',
|
||||
'colorlog',
|
||||
'voluptuous',
|
||||
'gitpython',
|
||||
'pyotp',
|
||||
'pyqrcode',
|
||||
'pytz',
|
||||
'pyudev'
|
||||
'async_timeout==2.0.0',
|
||||
'aiohttp==2.3.10',
|
||||
'docker==3.1.0',
|
||||
'colorlog==3.1.2',
|
||||
'voluptuous==0.11.1',
|
||||
'gitpython==2.1.8',
|
||||
'pytz==2018.3',
|
||||
'pyudev==0.21.0',
|
||||
'pycryptodome==3.4.11'
|
||||
]
|
||||
)
|
||||
|
4
tox.ini
4
tox.ini
@@ -2,8 +2,6 @@
|
||||
envlist = lint
|
||||
|
||||
[testenv]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}:{toxinidir}/hassio
|
||||
deps =
|
||||
flake8
|
||||
pylint
|
||||
@@ -13,4 +11,4 @@ basepython = python3
|
||||
ignore_errors = True
|
||||
commands =
|
||||
flake8 hassio
|
||||
pylint hassio
|
||||
pylint --rcfile pylintrc hassio
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"hassio": "0.56",
|
||||
"homeassistant": "0.51.2",
|
||||
"resinos": "1.0",
|
||||
"hassio": "0.94",
|
||||
"homeassistant": "0.63.3",
|
||||
"resinos": "1.1",
|
||||
"resinhup": "0.3",
|
||||
"generic": "0.3",
|
||||
"cluster": "0.1"
|
||||
|
Reference in New Issue
Block a user