mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-11 10:09:21 +00:00
Compare commits
378 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
47491ca55b | ||
![]() |
b06ce9b6b4 | ||
![]() |
38284e036d | ||
![]() |
27a079742d | ||
![]() |
7f33b3b5aa | ||
![]() |
261bda82db | ||
![]() |
c39d6357f3 | ||
![]() |
d1b30a0e95 | ||
![]() |
6a74893a30 | ||
![]() |
b61d5625fe | ||
![]() |
8d468328f3 | ||
![]() |
cd3b382902 | ||
![]() |
99cf44aacd | ||
![]() |
eaa489abec | ||
![]() |
46f323791d | ||
![]() |
ec72d38220 | ||
![]() |
f5b166a7f0 | ||
![]() |
8afde1e881 | ||
![]() |
f751b0e6fc | ||
![]() |
3809f20c6a | ||
![]() |
68390469df | ||
![]() |
4c122a0630 | ||
![]() |
d06696cd94 | ||
![]() |
8d094d5c70 | ||
![]() |
068c463c98 | ||
![]() |
fc95933098 | ||
![]() |
630137a576 | ||
![]() |
857f346b35 | ||
![]() |
d98b4f039f | ||
![]() |
8fee52da5e | ||
![]() |
0f9ad3658b | ||
![]() |
1155ee07e5 | ||
![]() |
fa687e982e | ||
![]() |
4e902af937 | ||
![]() |
6455ad14a7 | ||
![]() |
4753c058a3 | ||
![]() |
1567cbfe37 | ||
![]() |
3ed66c802e | ||
![]() |
980baf23a8 | ||
![]() |
d69af6a62b | ||
![]() |
863456525f | ||
![]() |
dae49df7b1 | ||
![]() |
282fc03687 | ||
![]() |
f9f7e07c52 | ||
![]() |
12a2ccf0ec | ||
![]() |
a98d76618a | ||
![]() |
7a59e7392b | ||
![]() |
446aff3fa6 | ||
![]() |
3272403141 | ||
![]() |
d1f265da9e | ||
![]() |
4915c935dd | ||
![]() |
e78d935824 | ||
![]() |
934ca64a32 | ||
![]() |
0860e6d202 | ||
![]() |
c3e1c8b58e | ||
![]() |
44e48095c7 | ||
![]() |
a13eb7841d | ||
![]() |
b5701c5878 | ||
![]() |
803eb0f8c9 | ||
![]() |
58c5ed7ba1 | ||
![]() |
c4d7d671d1 | ||
![]() |
9d88255225 | ||
![]() |
bfbc366f55 | ||
![]() |
0f30a23f3e | ||
![]() |
7e1bb42bb7 | ||
![]() |
251a43216e | ||
![]() |
4801b9903c | ||
![]() |
cd5a09938f | ||
![]() |
14bf834224 | ||
![]() |
8aec943a5c | ||
![]() |
d817e75d98 | ||
![]() |
fbd8abdcd5 | ||
![]() |
ca02977505 | ||
![]() |
6533b57c6d | ||
![]() |
0a818282d3 | ||
![]() |
ce2f5f9f7a | ||
![]() |
01f767e66c | ||
![]() |
106ab924e3 | ||
![]() |
d031594bf9 | ||
![]() |
f2f146063b | ||
![]() |
5abe7a3fb9 | ||
![]() |
f592971b6e | ||
![]() |
ed2caa0d81 | ||
![]() |
0b04c90b1f | ||
![]() |
2eac4b8d9b | ||
![]() |
143a358b0c | ||
![]() |
fa049066fc | ||
![]() |
3877dcf355 | ||
![]() |
bfa7443ae2 | ||
![]() |
253962df87 | ||
![]() |
f8fbee68f4 | ||
![]() |
3c5d4037f7 | ||
![]() |
772709dd75 | ||
![]() |
bcfd76d33c | ||
![]() |
2bbe7e7dc1 | ||
![]() |
dbcd090244 | ||
![]() |
a0a1fd4875 | ||
![]() |
d978ec00aa | ||
![]() |
e40963a686 | ||
![]() |
55ec1a84fa | ||
![]() |
cf154b57f3 | ||
![]() |
ebf4daf4cc | ||
![]() |
40e8f411ff | ||
![]() |
421b380043 | ||
![]() |
5ebf2068b2 | ||
![]() |
e5fc6846e0 | ||
![]() |
906c4e03fb | ||
![]() |
02c8baef68 | ||
![]() |
a14917e017 | ||
![]() |
7e5b2673dc | ||
![]() |
d31895123e | ||
![]() |
6c1456902e | ||
![]() |
03bed162f4 | ||
![]() |
f798e75e30 | ||
![]() |
710f8570d2 | ||
![]() |
4dfd11ffb4 | ||
![]() |
4e4368debb | ||
![]() |
30c7ddf4ef | ||
![]() |
7186f5a8c0 | ||
![]() |
f52d1c4509 | ||
![]() |
4dbece8e8e | ||
![]() |
f731c630a6 | ||
![]() |
0ac96c207e | ||
![]() |
e2a29b7290 | ||
![]() |
f107a73e28 | ||
![]() |
2c68e5801f | ||
![]() |
91502a0727 | ||
![]() |
872f1d0ae3 | ||
![]() |
3c4240a8a8 | ||
![]() |
7a470bb3ac | ||
![]() |
766a9af54e | ||
![]() |
ca303a62f2 | ||
![]() |
90030d3a28 | ||
![]() |
0ed48a7741 | ||
![]() |
a33d765776 | ||
![]() |
6bb4f0e369 | ||
![]() |
56a9f64730 | ||
![]() |
d5eb66bc0d | ||
![]() |
40343089b5 | ||
![]() |
1b887e38d6 | ||
![]() |
ba96f99cde | ||
![]() |
b7f5cc868b | ||
![]() |
c8343fdfb0 | ||
![]() |
91e4bf1676 | ||
![]() |
6dba8d4ef9 | ||
![]() |
65eaed4f90 | ||
![]() |
8233083392 | ||
![]() |
106378d1d0 | ||
![]() |
01d18d5ff3 | ||
![]() |
6d23f3bd1c | ||
![]() |
ef96579a29 | ||
![]() |
44f0a9f21a | ||
![]() |
d854307acb | ||
![]() |
334b41de71 | ||
![]() |
1da50eab7a | ||
![]() |
b119a42f4d | ||
![]() |
99aa438817 | ||
![]() |
99fa91f480 | ||
![]() |
93969d264d | ||
![]() |
711e199977 | ||
![]() |
4e645332c3 | ||
![]() |
df8afb3337 | ||
![]() |
255a33fc08 | ||
![]() |
d15b6f0294 | ||
![]() |
1aa24e40ae | ||
![]() |
c0bde4a488 | ||
![]() |
2a09b70294 | ||
![]() |
e35b0a54c1 | ||
![]() |
8287330c67 | ||
![]() |
6b16da93cd | ||
![]() |
c1cd9bba45 | ||
![]() |
e33420f26e | ||
![]() |
abd9683e11 | ||
![]() |
8cbeabbe21 | ||
![]() |
df7d988d2f | ||
![]() |
544c009b9c | ||
![]() |
b2e0babc60 | ||
![]() |
f7c79cbd3a | ||
![]() |
587e9618da | ||
![]() |
cb2dd3b81c | ||
![]() |
8d4dd7de3f | ||
![]() |
6927c989d0 | ||
![]() |
97853d1691 | ||
![]() |
0cdef0d118 | ||
![]() |
0b17ffc243 | ||
![]() |
c516d46f16 | ||
![]() |
cb8ec22b6d | ||
![]() |
4a5fbd79c1 | ||
![]() |
b636a03567 | ||
![]() |
c96faf7c0a | ||
![]() |
2e1cd4076a | ||
![]() |
9984a638ba | ||
![]() |
a492bccc03 | ||
![]() |
e7a0e0f565 | ||
![]() |
030e081d45 | ||
![]() |
8537536368 | ||
![]() |
f03f323aac | ||
![]() |
58c0c67796 | ||
![]() |
f5e196a663 | ||
![]() |
808df68e57 | ||
![]() |
fa51c2e6e9 | ||
![]() |
ba3760e770 | ||
![]() |
ad1a8557b8 | ||
![]() |
fe91f812d9 | ||
![]() |
4cc11305c7 | ||
![]() |
898c0330c8 | ||
![]() |
33e5f94f1f | ||
![]() |
da4ee63890 | ||
![]() |
d34203b133 | ||
![]() |
23addfb9a6 | ||
![]() |
81e1227a7b | ||
![]() |
75be8666a6 | ||
![]() |
6031a60084 | ||
![]() |
39d5785118 | ||
![]() |
bddcdcadb2 | ||
![]() |
3eac6a3366 | ||
![]() |
3c7b962cf9 | ||
![]() |
bd756e2a9c | ||
![]() |
e7920bee2a | ||
![]() |
ebcc21370e | ||
![]() |
34c4acf199 | ||
![]() |
47e45dfc9f | ||
![]() |
2ecea7c1b4 | ||
![]() |
5c0eccd12f | ||
![]() |
f34ab9402b | ||
![]() |
2569a82caf | ||
![]() |
4bdd256000 | ||
![]() |
6f4f6338c5 | ||
![]() |
7cb72b55a8 | ||
![]() |
1a9a08cbfb | ||
![]() |
237ee0363d | ||
![]() |
86180ddc34 | ||
![]() |
eed41d30ec | ||
![]() |
0b0fd6b910 | ||
![]() |
1f887b47ab | ||
![]() |
affd8057ca | ||
![]() |
7a8ee2c46a | ||
![]() |
35fe1f464c | ||
![]() |
0955bafebd | ||
![]() |
2e0c540c63 | ||
![]() |
6e9ef17a28 | ||
![]() |
eb3cdbfeb9 | ||
![]() |
f4cb16ad09 | ||
![]() |
956af2bd62 | ||
![]() |
b76cd5c004 | ||
![]() |
61d9301dcc | ||
![]() |
2ded05be83 | ||
![]() |
899d6766c5 | ||
![]() |
c67d57cef4 | ||
![]() |
b5cca7d341 | ||
![]() |
8919f13911 | ||
![]() |
990ae49608 | ||
![]() |
c2ba02722c | ||
![]() |
5bd1957337 | ||
![]() |
f59f0793bc | ||
![]() |
63b96700e0 | ||
![]() |
dffbcc2c7e | ||
![]() |
0dbe1ecc2a | ||
![]() |
da8526fcec | ||
![]() |
933b6f4d1e | ||
![]() |
16f2dfeebd | ||
![]() |
bc6eb5cab4 | ||
![]() |
8833845b2e | ||
![]() |
391be6afac | ||
![]() |
a4f74676b6 | ||
![]() |
600b32f75b | ||
![]() |
f199a5cf95 | ||
![]() |
5896fde441 | ||
![]() |
9998f9720f | ||
![]() |
f37589daa6 | ||
![]() |
ce2513f175 | ||
![]() |
1a4c5d24a4 | ||
![]() |
886d202f39 | ||
![]() |
5a42019ed7 | ||
![]() |
354093c121 | ||
![]() |
aa9c300d7c | ||
![]() |
d9ad5daae3 | ||
![]() |
4680ba6d0d | ||
![]() |
1423062ac3 | ||
![]() |
a036096684 | ||
![]() |
a1c443a6f2 | ||
![]() |
e6e1367cd6 | ||
![]() |
303e741289 | ||
![]() |
79cc23e273 | ||
![]() |
046ce0230a | ||
![]() |
33a66bee01 | ||
![]() |
f76749a933 | ||
![]() |
385af5bef5 | ||
![]() |
19b72b1a79 | ||
![]() |
f6048467ad | ||
![]() |
dbe6b860c7 | ||
![]() |
0a1e6b3e2d | ||
![]() |
f178dde589 | ||
![]() |
545d45ecf0 | ||
![]() |
c333f94cfa | ||
![]() |
76a999f650 | ||
![]() |
d2f8e35622 | ||
![]() |
1e4ed9c9d1 | ||
![]() |
57c21b4eb5 | ||
![]() |
088cc3ef15 | ||
![]() |
9e326f6324 | ||
![]() |
5840290df7 | ||
![]() |
58fdabb8ff | ||
![]() |
b8fc002fbc | ||
![]() |
d5e349266b | ||
![]() |
4c135dd617 | ||
![]() |
a98a0d1a1a | ||
![]() |
768b4d2b1a | ||
![]() |
ff640c598d | ||
![]() |
c76408e4e8 | ||
![]() |
2e168d089c | ||
![]() |
a61311e928 | ||
![]() |
85ffba90b2 | ||
![]() |
6623ec9bbc | ||
![]() |
7a4ed4029c | ||
![]() |
d4c52ce298 | ||
![]() |
c0aab8497f | ||
![]() |
cabe5b143f | ||
![]() |
3a791bace6 | ||
![]() |
19dfdb094b | ||
![]() |
ec5caa483e | ||
![]() |
e6967f8db5 | ||
![]() |
759356ff2e | ||
![]() |
4d8dbdb486 | ||
![]() |
cddb40a104 | ||
![]() |
36128d119a | ||
![]() |
dadb72aca8 | ||
![]() |
390d4fa6c7 | ||
![]() |
9559c39351 | ||
![]() |
9d95b70534 | ||
![]() |
3bad896978 | ||
![]() |
9f406df129 | ||
![]() |
a9b4174590 | ||
![]() |
9109e3803b | ||
![]() |
422dd78489 | ||
![]() |
9e1d6c9d2b | ||
![]() |
c8e3f2b48a | ||
![]() |
a287f52e47 | ||
![]() |
76952db3eb | ||
![]() |
871721f04b | ||
![]() |
3c0ebdf643 | ||
![]() |
0e258a4ae0 | ||
![]() |
645a8e2372 | ||
![]() |
c6cc8adbb7 | ||
![]() |
dd38c73b85 | ||
![]() |
c916314704 | ||
![]() |
e0dcce5895 | ||
![]() |
906616e224 | ||
![]() |
db20ea95d9 | ||
![]() |
d142ea5d23 | ||
![]() |
5d52404dab | ||
![]() |
43f4b36cfe | ||
![]() |
0393db19e6 | ||
![]() |
b197578df4 | ||
![]() |
ed428c0df4 | ||
![]() |
d38707821c | ||
![]() |
cfb392054e | ||
![]() |
0ea65efeb3 | ||
![]() |
c4ce7d1a74 | ||
![]() |
7ac95b98bc | ||
![]() |
f8413d8d63 | ||
![]() |
709b80b864 | ||
![]() |
b5b68c5c42 | ||
![]() |
d58e847978 | ||
![]() |
aad9ae6997 | ||
![]() |
139cf4fae4 | ||
![]() |
e01b2da223 | ||
![]() |
cbbe2d2d3c | ||
![]() |
7ca11a96b9 | ||
![]() |
3443d6d715 | ||
![]() |
99730734a0 | ||
![]() |
20fcd28dbe | ||
![]() |
76cead72e8 | ||
![]() |
a0f17ffd1d | ||
![]() |
86d92bdfa2 | ||
![]() |
25a0bc6549 | ||
![]() |
96971e7054 | ||
![]() |
2729877fbf |
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "home-assistant-polymer"]
|
||||
path = home-assistant-polymer
|
||||
url = https://github.com/home-assistant/home-assistant-polymer
|
@@ -2,7 +2,7 @@ sudo: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.5"
|
||||
- python: "3.6"
|
||||
|
||||
cache:
|
||||
directories:
|
||||
|
406
API.md
406
API.md
@@ -1,10 +1,11 @@
|
||||
# HassIO Server
|
||||
# Hass.io Server
|
||||
|
||||
## HassIO REST API
|
||||
## Hass.io RESTful API
|
||||
|
||||
Interface for HomeAssistant to controll things from supervisor.
|
||||
Interface for Home Assistant to control things from supervisor.
|
||||
|
||||
On error:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "error",
|
||||
@@ -12,7 +13,8 @@ On error:
|
||||
}
|
||||
```
|
||||
|
||||
On success
|
||||
On success:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "ok",
|
||||
@@ -20,179 +22,435 @@ On success
|
||||
}
|
||||
```
|
||||
|
||||
### HassIO
|
||||
### Hass.io
|
||||
|
||||
- `/supervisor/ping`
|
||||
- GET `/supervisor/ping`
|
||||
- GET `/supervisor/info`
|
||||
|
||||
- `/supervisor/info`
|
||||
The addons from `addons` are only installed one.
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"current": "CURRENT_VERSION",
|
||||
"beta": "true|false",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"beta_channel": "true|false",
|
||||
"timezone": "TIMEZONE",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"version": "CURRENT_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"dedicated": "bool",
|
||||
"description": "description"
|
||||
"description": "description",
|
||||
"repository": "12345678|null",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"logo": "bool",
|
||||
"state": "started|stopped",
|
||||
}
|
||||
],
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"beta_channel": "true|false",
|
||||
"timezone": "TIMEZONE",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/reload`
|
||||
|
||||
Reload addons/version.
|
||||
|
||||
- GET `/supervisor/logs`
|
||||
|
||||
Output is the raw docker log.
|
||||
|
||||
### Security
|
||||
|
||||
- GET `/security/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"initialize": "bool",
|
||||
"totp": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/totp`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
Return QR-Code
|
||||
|
||||
- POST `/security/session`
|
||||
```json
|
||||
{
|
||||
"password": "xy",
|
||||
"totp": "null|123456"
|
||||
}
|
||||
```
|
||||
|
||||
### Backup/Snapshot
|
||||
|
||||
- GET `/snapshots`
|
||||
|
||||
```json
|
||||
{
|
||||
"snapshots": [
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"date": "ISO",
|
||||
"name": "Custom name"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- `/supervisor/update`
|
||||
Optional:
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
"name": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- `/supervisor/option`
|
||||
- POST `/snapshots/new/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"beta": "true|false"
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
- `/supervisor/reload`
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
Reload addons/version.
|
||||
- GET `/snapshots/{slug}/info`
|
||||
|
||||
- `/supervisor/logs`
|
||||
```json
|
||||
{
|
||||
"slug": "SNAPSHOT ID",
|
||||
"type": "full|partial",
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"homeassistant": {
|
||||
"version": "INSTALLED_HASS_VERSION",
|
||||
"devices": []
|
||||
},
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
"folders": ["NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
Output the raw docker log
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
|
||||
- `/host/shutdown`
|
||||
- POST `/host/reload`
|
||||
- POST `/host/shutdown`
|
||||
- POST `/host/reboot`
|
||||
- GET `/host/info`
|
||||
|
||||
- `/host/reboot`
|
||||
|
||||
- `/host/info`
|
||||
See HostControll info command.
|
||||
```json
|
||||
{
|
||||
"os": "",
|
||||
"type": "",
|
||||
"version": "",
|
||||
"current": "",
|
||||
"level": "",
|
||||
"last_version": "",
|
||||
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
|
||||
"hostname": "",
|
||||
"os": "",
|
||||
"audio": {
|
||||
"input": "0,0",
|
||||
"output": "0,0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- `/host/update`
|
||||
- POST `/host/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"audio_input": "0,0",
|
||||
"audio_output": "0,0"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/host/hardware`
|
||||
```json
|
||||
{
|
||||
"serial": ["/dev/xy"],
|
||||
"input": ["Input device name"],
|
||||
"disk": ["/dev/sdax"],
|
||||
"audio": {
|
||||
"CARD_ID": {
|
||||
"name": "xy",
|
||||
"type": "microphone",
|
||||
"devices": {
|
||||
"DEV_ID": "type of device"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Network
|
||||
|
||||
- `/network/info`
|
||||
- GET `/network/info`
|
||||
|
||||
- `/network/options`
|
||||
```json
|
||||
{
|
||||
"hostname": "",
|
||||
"mode": "dhcp|fixed",
|
||||
"ssid": "",
|
||||
"ip": "",
|
||||
"netmask": "",
|
||||
"gateway": ""
|
||||
"hostname": ""
|
||||
}
|
||||
```
|
||||
|
||||
### HomeAssistant
|
||||
- POST `/network/options`
|
||||
|
||||
- `/homeassistant/info`
|
||||
```json
|
||||
{
|
||||
"hostname": "",
|
||||
}
|
||||
```
|
||||
|
||||
### Home Assistant
|
||||
|
||||
- GET `/homeassistant/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"current": "CURRENT_VERSION"
|
||||
"last_version": "LAST_VERSION",
|
||||
"devices": [""],
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image"
|
||||
}
|
||||
```
|
||||
|
||||
- `/homeassistant/update`
|
||||
- POST `/homeassistant/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- `/homeassistant/logs`
|
||||
- GET `/homeassistant/logs`
|
||||
|
||||
Output the raw docker log
|
||||
Output is the raw Docker log.
|
||||
|
||||
### REST API addons
|
||||
- POST `/homeassistant/restart`
|
||||
- POST `/homeassistant/options`
|
||||
|
||||
- `/addons/{addon}/info`
|
||||
```json
|
||||
{
|
||||
"version": "VERSION",
|
||||
"current": "CURRENT_VERSION",
|
||||
"state": "started|stopped",
|
||||
"devices": [],
|
||||
"image": "Optional|null",
|
||||
"last_version": "Optional for custom image|null"
|
||||
}
|
||||
```
|
||||
|
||||
Image with `null` and last_version with `null` reset this options.
|
||||
|
||||
### RESTful for API addons
|
||||
|
||||
- GET `/addons`
|
||||
|
||||
Get all available addons.
|
||||
|
||||
```json
|
||||
{
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"repository": "core|local|REP_ID",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"build": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"devices": ["/dev/xy"],
|
||||
"url": "null|url",
|
||||
"logo": "bool",
|
||||
"audio": "bool",
|
||||
"hassio_api": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
{
|
||||
"slug": "12345678",
|
||||
"name": "Repitory Name|unknown",
|
||||
"source": "URL_OF_REPOSITORY",
|
||||
"url": "WEBSITE|REPOSITORY",
|
||||
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/reload`
|
||||
- GET `/addons/{addon}/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"description": "description",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
"repository": "12345678|null",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
"state": "none|started|stopped",
|
||||
"boot": "auto|manual",
|
||||
"build": "bool",
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"host_network": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"devices": ["/dev/xy"],
|
||||
"logo": "bool",
|
||||
"hassio_api": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/logo`
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"boot": "auto|manual",
|
||||
"auto_update": "bool",
|
||||
"network": {
|
||||
"CONTAINER": "port|[ip, port]"
|
||||
},
|
||||
"options": {},
|
||||
"audio_output": "null|0,0",
|
||||
"audio_input": "null|0,0"
|
||||
}
|
||||
```
|
||||
|
||||
- `/addons/{addon}/options`
|
||||
```json
|
||||
{
|
||||
"boot": "auto|manual",
|
||||
"options": {},
|
||||
}
|
||||
```
|
||||
For reset custom network/audio settings, set it `null`.
|
||||
|
||||
- `/addons/{addon}/start`
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
- `/addons/{addon}/stop`
|
||||
- POST `/addons/{addon}/stop`
|
||||
|
||||
- POST `/addons/{addon}/install`
|
||||
|
||||
- `/addons/{addon}/install`
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- `/addons/{addon}/uninstall`
|
||||
- POST `/addons/{addon}/uninstall`
|
||||
|
||||
- POST `/addons/{addon}/update`
|
||||
|
||||
- `/addons/{addon}/update`
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- `/addons/{addon}/logs`
|
||||
- GET `/addons/{addon}/logs`
|
||||
|
||||
Output the raw docker log
|
||||
Output is the raw Docker log.
|
||||
|
||||
## Host Controll
|
||||
- POST `/addons/{addon}/restart`
|
||||
|
||||
Communicate over unix socket with a host daemon.
|
||||
## Host Control
|
||||
|
||||
Communicate over UNIX socket with a host daemon.
|
||||
|
||||
- commands
|
||||
|
||||
```
|
||||
# info
|
||||
-> {'os', 'version', 'current', 'level', 'hostname'}
|
||||
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
||||
# reboot
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
|
||||
# hostname xy
|
||||
|
||||
# network info
|
||||
# network hostname xy
|
||||
-> {}
|
||||
# network wlan ssd xy
|
||||
# network wlan password xy
|
||||
# network int ip xy
|
||||
@@ -200,10 +458,14 @@ Communicate over unix socket with a host daemon.
|
||||
# network int route xy
|
||||
```
|
||||
|
||||
level:
|
||||
- 1: power functions
|
||||
- 2: host update
|
||||
- 4: network functions
|
||||
Features:
|
||||
|
||||
- shutdown
|
||||
- reboot
|
||||
- update
|
||||
- hostname
|
||||
- network_info
|
||||
- network_control
|
||||
|
||||
Answer:
|
||||
```
|
||||
|
218
LICENSE
218
LICENSE
@@ -1,29 +1,201 @@
|
||||
BSD 3-Clause License
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright (c) 2017, Pascal Vizeli
|
||||
All rights reserved.
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
1. Definitions.
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017 Pascal Vizeli
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
3
MANIFEST.in
Normal file
3
MANIFEST.in
Normal file
@@ -0,0 +1,3 @@
|
||||
include LICENSE.md
|
||||
graft hassio
|
||||
recursive-exclude * *.py[co]
|
58
README.md
58
README.md
@@ -1,56 +1,14 @@
|
||||
# HassIO
|
||||
First private cloud solution for home automation.
|
||||
# Hass.io
|
||||
|
||||
It is a docker image (supervisor) they manage HomeAssistant docker and give a interface to controll itself over UI. It have a own eco system with addons to extend the functionality in a easy way.
|
||||
### First private cloud solution for home automation.
|
||||
|
||||
[HassIO-Addons](https://github.com/pvizeli/hassio-addons) | [HassIO-Build](https://github.com/pvizeli/hassio-build)
|
||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
||||
|
||||
**HassIO is at the moment on development and not ready to use productive!**
|
||||

|
||||
|
||||
## Feature in progress
|
||||
- Backup/Restore
|
||||
- MQTT addon
|
||||
- DHCP-Server addon
|
||||
- [Hass.io Addons](https://github.com/home-assistant/hassio-addons)
|
||||
- [Hass.io Build](https://github.com/home-assistant/hassio-build)
|
||||
|
||||
# HomeAssistant
|
||||
## Installation
|
||||
|
||||
## SSL
|
||||
|
||||
All addons they can create SSL certs do that in same schema. So you can put follow lines to your `configuration.yaml`.
|
||||
```yaml
|
||||
http:
|
||||
ssl_certificate: /ssl/fullchain.pem
|
||||
ssl_key: /ssl/privkey.pem
|
||||
```
|
||||
|
||||
# Hardware Image
|
||||
The image is based on ResinOS and Yocto Linux. It comes with the HassIO supervisor pre-installed. This includes support to update the supervisor over the air. After flashing your host OS will not require any more maintenance! The image does not include Home Assistant, instead it will downloaded when the image boots up for the first time.
|
||||
|
||||
Download can be found here: https://drive.google.com/drive/folders/0B2o1Uz6l1wVNbFJnb2gwNXJja28?usp=sharing
|
||||
|
||||
After extracting the archive, flash it to a drive using [Etcher](https://etcher.io/).
|
||||
|
||||
## History
|
||||
- **0.1**: First techpreview with dumy supervisor (ResinOS 2.0.0-RC5)
|
||||
- **0.2**: Fix some bugs and update it to HassIO 0.2
|
||||
- **0.3**: Update HostControll and feature for HassIO 0.3 (ResinOS 2.0.0 / need reflash)
|
||||
- **0.4**: Update HostControll and bring resinos OTA (resinhub) back (ResinOS 2.0.0-rev3)
|
||||
|
||||
## Configuring the image
|
||||
You can configure the WiFi network that the image should connect to after flashing using [`resin-device-toolbox`](https://resinos.io/docs/raspberrypi3/gettingstarted/#install-resin-device-toolbox).
|
||||
|
||||
## Developer access to ResinOS host
|
||||
Create an `authorized_keys` file in the boot partition of your SD card with your public key. After a boot it, you can acces your device as root over ssh on port 22222.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Read logoutput from supervisor:
|
||||
```bash
|
||||
journalctl -f -u resin-supervisor.service
|
||||
docker logs homeassistant
|
||||
```
|
||||
|
||||
## Install on a own System
|
||||
|
||||
We have a installer to install HassIO on own linux device without our hardware image:
|
||||
https://github.com/pvizeli/hassio-build/tree/master/install
|
||||
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Main file for HassIO."""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
@@ -17,16 +18,27 @@ if __name__ == "__main__":
|
||||
exit(1)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
hassio = core.HassIO(loop)
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hassio setup")
|
||||
config = bootstrap.initialize_system_data()
|
||||
hassio = core.HassIO(loop, config)
|
||||
|
||||
bootstrap.migrate_system_env(config)
|
||||
|
||||
_LOGGER.info("Run Hassio setup")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
|
||||
_LOGGER.info("Start Hassio task")
|
||||
_LOGGER.info("Start Hassio")
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
||||
|
||||
_LOGGER.info("Run Hassio loop")
|
||||
loop.run_forever()
|
||||
|
||||
_LOGGER.info("Cleanup system")
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hassio")
|
||||
|
@@ -1,164 +1,133 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from .data import AddonsData
|
||||
from .git import AddonsRepo
|
||||
from ..const import STATE_STOPPED, STATE_STARTED
|
||||
from ..dock.addon import DockerAddon
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import Data
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
|
||||
class AddonManager(AddonsData):
|
||||
|
||||
class AddonManager(object):
|
||||
"""Manage addons inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config)
|
||||
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.dock = dock
|
||||
self.repo = AddonsRepo(config, loop)
|
||||
self.dockers = {}
|
||||
self.data = Data(config)
|
||||
self.addons = {}
|
||||
self.repositories = {}
|
||||
|
||||
async def prepare(self, arch):
|
||||
@property
|
||||
def list_addons(self):
|
||||
"""Return a list of all addons."""
|
||||
return list(self.addons.values())
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of addon repositories."""
|
||||
return list(self.repositories.values())
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return a adddon from slug."""
|
||||
return self.addons.get(addon_slug)
|
||||
|
||||
async def prepare(self):
|
||||
"""Startup addon management."""
|
||||
self.arch = arch
|
||||
self.data.reload()
|
||||
|
||||
# load addon repository
|
||||
if await self.repo.load():
|
||||
self.read_addons_repo()
|
||||
# init hassio built-in repositories
|
||||
repositories = \
|
||||
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# load installed addons
|
||||
for addon in self.list_installed:
|
||||
self.dockers[addon] = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
await self.dockers[addon].attach()
|
||||
# init custom repositories & load addons
|
||||
await self.load_repositories(repositories)
|
||||
|
||||
async def reload(self):
|
||||
"""Update addons from repo and reload list."""
|
||||
if not await self.repo.pull():
|
||||
return
|
||||
self.read_addons_repo()
|
||||
|
||||
# remove stalled addons
|
||||
for addon in self.list_removed:
|
||||
_LOGGER.warning("Dedicated addon '%s' found!", addon)
|
||||
|
||||
async def auto_boot(self, start_type):
|
||||
"""Boot addons with mode auto."""
|
||||
boot_list = self.list_startup(start_type)
|
||||
tasks = []
|
||||
|
||||
for addon in boot_list:
|
||||
tasks.append(self.loop.create_task(self.start(addon)))
|
||||
|
||||
_LOGGER.info("Startup %s run %d addons", start_type, len(tasks))
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def install(self, addon, version=None):
|
||||
"""Install a addon."""
|
||||
if not self.exists_addon(addon):
|
||||
_LOGGER.error("Addon %s not exists for install", addon)
|
||||
return False
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
|
||||
if self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is already installed", addon)
|
||||
return False
|
||||
# update addons
|
||||
await self.load_addons()
|
||||
|
||||
if not os.path.isdir(self.path_data(addon)):
|
||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||
self.path_data(addon))
|
||||
os.mkdir(self.path_data(addon))
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories)
|
||||
|
||||
addon_docker = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.config, self.loop, self.data, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories[url] = repository
|
||||
|
||||
version = version or self.get_version(addon)
|
||||
if not await addon_docker.install(version):
|
||||
return False
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.config.add_addon_repository(url)
|
||||
|
||||
self.dockers[addon] = addon_docker
|
||||
self.set_addon_install(addon, version)
|
||||
return True
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def uninstall(self, addon):
|
||||
"""Remove a addon."""
|
||||
if not self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is already uninstalled", addon)
|
||||
return False
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories.pop(url).remove()
|
||||
self.config.drop_addon_repository(url)
|
||||
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
# update data
|
||||
self.data.reload()
|
||||
await self.load_addons()
|
||||
|
||||
if not await self.dockers[addon].remove():
|
||||
return False
|
||||
async def load_addons(self):
|
||||
"""Update/add internal addon store."""
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
if os.path.isdir(self.path_data(addon)):
|
||||
_LOGGER.info("Remove Home-Assistant addon data folder %s",
|
||||
self.path_data(addon))
|
||||
shutil.rmtree(self.path_data(addon))
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons)
|
||||
del_addons = set(self.addons) - all_addons
|
||||
|
||||
self.dockers.pop(addon)
|
||||
self.set_addon_uninstall(addon)
|
||||
return True
|
||||
_LOGGER.info("Load addons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
|
||||
async def state(self, addon):
|
||||
"""Return running state of addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(
|
||||
self.config, self.loop, self.dock, self.data, addon_slug)
|
||||
|
||||
if await self.dockers[addon].is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
tasks.append(addon.load())
|
||||
self.addons[addon_slug] = addon
|
||||
|
||||
async def start(self, addon):
|
||||
"""Set options and start addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
if not self.write_addon_options(addon):
|
||||
_LOGGER.error("Can't write options for addon %s", addon)
|
||||
return False
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons.pop(addon_slug)
|
||||
|
||||
return await self.dockers[addon].run()
|
||||
async def auto_boot(self, stage):
|
||||
"""Boot addons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
|
||||
async def stop(self, addon):
|
||||
"""Stop addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].stop()
|
||||
|
||||
async def update(self, addon, version=None):
|
||||
"""Update addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
version = version or self.get_version(addon)
|
||||
is_running = self.dockers[addon].is_running()
|
||||
|
||||
# update
|
||||
if await self.dockers[addon].update(version):
|
||||
self.set_addon_update(addon, version)
|
||||
if is_running:
|
||||
await self.start(addon)
|
||||
return True
|
||||
return False
|
||||
|
||||
async def logs(self, addon):
|
||||
"""Return addons log output."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].logs()
|
||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
612
hassio/addons/addon.py
Normal file
612
hassio/addons/addon.py
Normal file
@@ -0,0 +1,612 @@
|
||||
"""Init file for HassIO addons."""
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import (
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, MAP_VOLUME)
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT)
|
||||
from .util import check_installed
|
||||
from ..dock.addon import DockerAddon
|
||||
from ..tools import write_json_file, read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
RE_WEBUI = re.compile(r"^(.*\[HOST\]:)\[PORT:(\d+)\](.*)$")
|
||||
|
||||
|
||||
class Addon(object):
|
||||
"""Hold data for addon inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock, data, slug):
|
||||
"""Initialize data holder."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.data = data
|
||||
self._id = slug
|
||||
|
||||
self.addon_docker = DockerAddon(config, loop, dock, self)
|
||||
|
||||
async def load(self):
|
||||
"""Async initialize of object."""
|
||||
if self.is_installed:
|
||||
await self.addon_docker.attach()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug/id of addon."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return addon data from system or cache."""
|
||||
return self.data.system.get(self._id, self.data.cache.get(self._id))
|
||||
|
||||
@property
|
||||
def is_installed(self):
|
||||
"""Return True if a addon is installed."""
|
||||
return self._id in self.data.system
|
||||
|
||||
@property
|
||||
def is_detached(self):
|
||||
"""Return True if addon is detached."""
|
||||
return self._id not in self.data.cache
|
||||
|
||||
@property
|
||||
def version_installed(self):
|
||||
"""Return installed version."""
|
||||
return self.data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
|
||||
def _set_install(self, version):
|
||||
"""Set addon as installed."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
}
|
||||
self.data.save()
|
||||
|
||||
def _set_uninstall(self):
|
||||
"""Set addon as uninstalled."""
|
||||
self.data.system.pop(self._id, None)
|
||||
self.data.user.pop(self._id, None)
|
||||
self.data.save()
|
||||
|
||||
def _set_update(self, version):
|
||||
"""Update version of addon."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id][ATTR_VERSION] = version
|
||||
self.data.save()
|
||||
|
||||
def _restore_data(self, user, system):
|
||||
"""Restore data to addon."""
|
||||
self.data.user[self._id] = deepcopy(user)
|
||||
self.data.system[self._id] = deepcopy(system)
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Return options with local changes."""
|
||||
if self.is_installed:
|
||||
return {
|
||||
**self.data.system[self._id][ATTR_OPTIONS],
|
||||
**self.data.user[self._id][ATTR_OPTIONS],
|
||||
}
|
||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
@options.setter
|
||||
def options(self, value):
|
||||
"""Store user addon options."""
|
||||
self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_BOOT]
|
||||
return self._mesh[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value):
|
||||
"""Store user boot options."""
|
||||
self.data.user[self._id][ATTR_BOOT] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def auto_update(self):
|
||||
"""Return if auto update is enable."""
|
||||
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value):
|
||||
"""Set auto update."""
|
||||
self.data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of addon."""
|
||||
return self._mesh[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self._mesh[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Return description of addon."""
|
||||
return self._mesh[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def repository(self):
|
||||
"""Return repository of addon."""
|
||||
return self._mesh[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return version of addon."""
|
||||
if self._id in self.data.cache:
|
||||
return self.data.cache[self._id][ATTR_VERSION]
|
||||
return self.version_installed
|
||||
|
||||
@property
|
||||
def startup(self):
|
||||
"""Return startup type of addon."""
|
||||
return self._mesh.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Return ports of addon."""
|
||||
if self.network_mode != 'bridge' or ATTR_PORTS not in self._mesh:
|
||||
return
|
||||
|
||||
if not self.is_installed or \
|
||||
ATTR_NETWORK not in self.data.user[self._id]:
|
||||
return self._mesh[ATTR_PORTS]
|
||||
return self.data.user[self._id][ATTR_NETWORK]
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value):
|
||||
"""Set custom ports of addon."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
else:
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
|
||||
self.data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def webui(self):
|
||||
"""Return URL to webui or None."""
|
||||
if ATTR_WEBUI not in self._mesh:
|
||||
return
|
||||
|
||||
webui = self._mesh[ATTR_WEBUI]
|
||||
dock_port = RE_WEBUI.sub(r"\2", webui)
|
||||
if self.ports is None:
|
||||
real_port = dock_port
|
||||
else:
|
||||
real_port = self.ports.get("{}/tcp".format(dock_port), dock_port)
|
||||
|
||||
# for interface config or port lists
|
||||
if isinstance(real_port, (tuple, list)):
|
||||
real_port = real_port[-1]
|
||||
|
||||
return RE_WEBUI.sub(r"\g<1>{}\g<3>".format(real_port), webui)
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
"""Return network mode of addon."""
|
||||
if self._mesh[ATTR_HOST_NETWORK]:
|
||||
return 'host'
|
||||
return 'bridge'
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return devices of addon."""
|
||||
return self._mesh.get(ATTR_DEVICES)
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs of addon."""
|
||||
return self._mesh.get(ATTR_TMPFS)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment of addon."""
|
||||
return self._mesh.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@property
|
||||
def privileged(self):
|
||||
"""Return list of privilege."""
|
||||
return self._mesh.get(ATTR_PRIVILEGED)
|
||||
|
||||
@property
|
||||
def use_hassio_api(self):
|
||||
"""Return True if the add-on access to hassio api."""
|
||||
return self._mesh[ATTR_HASSIO_API]
|
||||
|
||||
@property
|
||||
def with_audio(self):
|
||||
"""Return True if the add-on access to audio."""
|
||||
return self._mesh[ATTR_AUDIO]
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA config for output or None."""
|
||||
if not self.with_audio:
|
||||
return
|
||||
|
||||
setting = self.config.audio_output
|
||||
if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]:
|
||||
setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
return setting
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set/remove custom audio output settings."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||
else:
|
||||
self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA config for input or None."""
|
||||
if not self.with_audio:
|
||||
return
|
||||
|
||||
setting = self.config.audio_input
|
||||
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
||||
setting = self.data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
return setting
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set/remove custom audio input settings."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||
else:
|
||||
self.data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of addon."""
|
||||
return self._mesh.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def with_logo(self):
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self):
|
||||
"""Return list of supported arch."""
|
||||
return self._mesh[ATTR_ARCH]
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of addon."""
|
||||
addon_data = self._mesh
|
||||
|
||||
# Repository with dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.config.arch)
|
||||
|
||||
# local build
|
||||
return "{}/{}-addon-{}".format(
|
||||
addon_data[ATTR_REPOSITORY], self.config.arch,
|
||||
addon_data[ATTR_SLUG])
|
||||
|
||||
@property
|
||||
def need_build(self):
|
||||
"""Return True if this addon need a local build."""
|
||||
return ATTR_IMAGE not in self._mesh
|
||||
|
||||
@property
|
||||
def map_volumes(self):
|
||||
"""Return a dict of {volume: policy} from addon."""
|
||||
volumes = {}
|
||||
for volume in self._mesh[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
volumes[result.group(1)] = result.group(2) or 'ro'
|
||||
|
||||
return volumes
|
||||
|
||||
@property
|
||||
def path_data(self):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return Path(self.config.path_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
"""Return addon data path external for docker."""
|
||||
return PurePath(self.config.path_extern_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_options(self):
|
||||
"""Return path to addons options."""
|
||||
return Path(self.path_data, "options.json")
|
||||
|
||||
@property
|
||||
def path_location(self):
|
||||
"""Return path to this addon."""
|
||||
return Path(self._mesh[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_logo(self):
|
||||
"""Return path to addon logo."""
|
||||
return Path(self.path_location, 'logo.png')
|
||||
|
||||
def write_options(self):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.schema
|
||||
options = self.options
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
|
||||
humanize_error(options, ex))
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Create a schema for addon options."""
|
||||
raw_schema = self._mesh[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
|
||||
def test_udpate_schema(self):
|
||||
"""Check if the exists config valid after update."""
|
||||
if not self.is_installed or self.is_detached:
|
||||
return True
|
||||
|
||||
# load next schema
|
||||
new_raw_schema = self.data.cache[self._id][ATTR_SCHEMA]
|
||||
default_options = self.data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
# if disabled
|
||||
if isinstance(new_raw_schema, bool):
|
||||
return True
|
||||
|
||||
# merge options
|
||||
options = {
|
||||
**self.data.user[self._id][ATTR_OPTIONS],
|
||||
**default_options,
|
||||
}
|
||||
|
||||
# create voluptuous
|
||||
new_schema = \
|
||||
vol.Schema(vol.All(dict, validate_options(new_raw_schema)))
|
||||
|
||||
# validate
|
||||
try:
|
||||
new_schema(options)
|
||||
except vol.Invalid:
|
||||
return False
|
||||
return True
|
||||
|
||||
async def install(self, version=None):
|
||||
"""Install a addon."""
|
||||
if self.config.arch not in self.supported_arch:
|
||||
_LOGGER.error(
|
||||
"Addon %s not supported on %s", self._id, self.config.arch)
|
||||
return False
|
||||
|
||||
if self.is_installed:
|
||||
_LOGGER.error("Addon %s is already installed", self._id)
|
||||
return False
|
||||
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||
self.path_data.mkdir()
|
||||
|
||||
version = version or self.last_version
|
||||
if not await self.addon_docker.install(version):
|
||||
return False
|
||||
|
||||
self._set_install(version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
"""Remove a addon."""
|
||||
if not await self.addon_docker.remove():
|
||||
return False
|
||||
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Remove Home-Assistant addon data folder %s", self.path_data)
|
||||
shutil.rmtree(str(self.path_data))
|
||||
|
||||
self._set_uninstall()
|
||||
return True
|
||||
|
||||
async def state(self):
|
||||
"""Return running state of addon."""
|
||||
if not self.is_installed:
|
||||
return STATE_NONE
|
||||
|
||||
if await self.addon_docker.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
@check_installed
|
||||
def start(self):
|
||||
"""Set options and start addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.addon_docker.run()
|
||||
|
||||
@check_installed
|
||||
def stop(self):
|
||||
"""Stop addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.addon_docker.stop()
|
||||
|
||||
@check_installed
|
||||
async def update(self, version=None):
|
||||
"""Update addon."""
|
||||
version = version or self.last_version
|
||||
|
||||
if version == self.version_installed:
|
||||
_LOGGER.warning(
|
||||
"Addon %s is already installed in %s", self._id, version)
|
||||
return False
|
||||
|
||||
if not await self.addon_docker.update(version):
|
||||
return False
|
||||
|
||||
self._set_update(version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
def restart(self):
|
||||
"""Restart addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.addon_docker.restart()
|
||||
|
||||
@check_installed
|
||||
def logs(self):
|
||||
"""Return addons log output.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.addon_docker.logs()
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
"""Snapshot a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.addon_docker.export_image(Path(temp, "image.tar")):
|
||||
return False
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.data.user.get(self._id, {}),
|
||||
ATTR_SYSTEM: self.data.system.get(self._id, {}),
|
||||
ATTR_VERSION: self.version_installed,
|
||||
ATTR_STATE: await self.state(),
|
||||
}
|
||||
|
||||
# store local configs/state
|
||||
if not write_json_file(Path(temp, "addon.json"), data):
|
||||
_LOGGER.error("Can't write addon.json for %s", self._id)
|
||||
return False
|
||||
|
||||
# write into tarfile
|
||||
def _create_tar():
|
||||
"""Write tar inside loop."""
|
||||
with tarfile.open(tar_file, "w:gz",
|
||||
compresslevel=1) as snapshot:
|
||||
snapshot.add(temp, arcname=".")
|
||||
snapshot.add(self.path_data, arcname="data")
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _create_tar)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def restore(self, tar_file):
|
||||
"""Restore a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
# extract snapshot
|
||||
def _extract_tar():
|
||||
"""Extract tar snapshot."""
|
||||
with tarfile.open(tar_file, "r:gz") as snapshot:
|
||||
snapshot.extractall(path=Path(temp))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _extract_tar)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
# read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't read addon.json -> %s", err)
|
||||
|
||||
# validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate %s, snapshot data -> %s",
|
||||
self._id, humanize_error(data, err))
|
||||
return False
|
||||
|
||||
# restore data / reload addon
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
|
||||
|
||||
# check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if version != self.addon_docker.version:
|
||||
image_file = Path(temp, "image.tar")
|
||||
if image_file.is_file():
|
||||
await self.addon_docker.import_image(image_file, version)
|
||||
else:
|
||||
if await self.addon_docker.install(version):
|
||||
await self.addon_docker.cleanup()
|
||||
else:
|
||||
await self.addon_docker.stop()
|
||||
|
||||
# restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
if self.path_data.is_dir():
|
||||
shutil.rmtree(str(self.path_data), ignore_errors=True)
|
||||
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data -> %s", err)
|
||||
return False
|
||||
|
||||
# run addon
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
return await self.start()
|
||||
|
||||
return True
|
12
hassio/addons/built-in.json
Normal file
12
hassio/addons/built-in.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"local": {
|
||||
"name": "Local Add-Ons",
|
||||
"url": "https://home-assistant.io/hassio",
|
||||
"maintainer": "By our self"
|
||||
},
|
||||
"core": {
|
||||
"name": "Built-in Add-Ons",
|
||||
"url": "https://home-assistant.io/addons",
|
||||
"maintainer": "Home Assistant authors"
|
||||
}
|
||||
}
|
@@ -1,247 +1,165 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import copy
|
||||
import logging
|
||||
import glob
|
||||
import json
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import validate_options, SCHEMA_ADDON_CONFIG
|
||||
from .util import extract_hash_from_path
|
||||
from .validate import (
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG,
|
||||
MAP_VOLUME)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
|
||||
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||
ATTR_PORTS, BOOT_AUTO, DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA,
|
||||
ATTR_IMAGE, ATTR_DEDICATED)
|
||||
from ..config import Config
|
||||
from ..tools import read_json_file, write_json_file
|
||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
from ..tools import JsonConfig, read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ADDONS_REPO_PATTERN = "{}/*/config.json"
|
||||
SYSTEM = "system"
|
||||
USER = "user"
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
|
||||
|
||||
class AddonsData(Config):
|
||||
class Data(JsonConfig):
|
||||
"""Hold data for addons inside HassIO."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS)
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
|
||||
self.config = config
|
||||
self._addons_data = self._data.get(SYSTEM, {})
|
||||
self._user_data = self._data.get(USER, {})
|
||||
self._current_data = {}
|
||||
self.arch = None
|
||||
self._repositories = {}
|
||||
self._cache = {}
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
self._data = {
|
||||
USER: self._user_data,
|
||||
SYSTEM: self._addons_data,
|
||||
}
|
||||
super().save()
|
||||
@property
|
||||
def user(self):
|
||||
"""Return local addon user data."""
|
||||
return self._data[ATTR_USER]
|
||||
|
||||
def read_addons_repo(self):
|
||||
@property
|
||||
def system(self):
|
||||
"""Return local addon data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""Return addon data from cache/repositories."""
|
||||
return self._cache
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return addon data from repositories."""
|
||||
return self._repositories
|
||||
|
||||
def reload(self):
|
||||
"""Read data from addons repository."""
|
||||
self._current_data = {}
|
||||
self._cache = {}
|
||||
self._repositories = {}
|
||||
|
||||
self._read_addons_folder(self.config.path_addons_repo)
|
||||
self._read_addons_folder(self.config.path_addons_custom)
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_core, REPOSITORY_CORE)
|
||||
|
||||
def _read_addons_folder(self, folder):
|
||||
# read local repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
|
||||
# read custom git repositories
|
||||
for repository_element in self.config.path_addons_git.iterdir():
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
# update local data
|
||||
self._merge_config()
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
|
||||
# exists repository json
|
||||
repository_file = Path(path, "repository.json")
|
||||
try:
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(
|
||||
read_json_file(repository_file)
|
||||
)
|
||||
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read repository information from %s",
|
||||
repository_file)
|
||||
return
|
||||
|
||||
except vol.Invalid:
|
||||
_LOGGER.warning("Repository parse error %s", repository_file)
|
||||
return
|
||||
|
||||
# process data
|
||||
self._repositories[slug] = repository_info
|
||||
self._read_addons_folder(path, slug)
|
||||
|
||||
def _read_addons_folder(self, path, repository):
|
||||
"""Read data from addons folder."""
|
||||
pattern = ADDONS_REPO_PATTERN.format(folder)
|
||||
|
||||
for addon in glob.iglob(pattern):
|
||||
for addon in path.glob("**/config.json"):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
|
||||
# validate
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
self._current_data[addon_config[ATTR_SLUG]] = addon_config
|
||||
|
||||
except (OSError, KeyError):
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(
|
||||
repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s -> %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed addons."""
|
||||
return set(self._addons_data.keys())
|
||||
|
||||
@property
|
||||
def list(self):
|
||||
"""Return a list of available addons."""
|
||||
data = []
|
||||
all_addons = {**self._addons_data, **self._current_data}
|
||||
dedicated = self.list_removed
|
||||
|
||||
for addon, values in all_addons.items():
|
||||
i_version = self._user_data.get(addon, {}).get(ATTR_VERSION)
|
||||
|
||||
data.append({
|
||||
ATTR_NAME: values[ATTR_NAME],
|
||||
ATTR_SLUG: values[ATTR_SLUG],
|
||||
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON],
|
||||
ATTR_VERSION: values[ATTR_VERSION],
|
||||
ATTR_INSTALLED: i_version,
|
||||
ATTR_DEDICATED: addon in dedicated,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
def list_startup(self, start_type):
|
||||
"""Get list of installed addon with need start by type."""
|
||||
addon_list = set()
|
||||
for addon in self._addons_data.keys():
|
||||
if self.get_boot(addon) != BOOT_AUTO:
|
||||
continue
|
||||
|
||||
try:
|
||||
if self._addons_data[addon][ATTR_STARTUP] == start_type:
|
||||
addon_list.add(addon)
|
||||
except KeyError:
|
||||
_LOGGER.warning("Orphaned addon detect %s", addon)
|
||||
continue
|
||||
|
||||
return addon_list
|
||||
|
||||
@property
|
||||
def list_removed(self):
|
||||
"""Return local addons they not support from repo."""
|
||||
addon_list = set()
|
||||
for addon in self._addons_data.keys():
|
||||
if addon not in self._current_data:
|
||||
addon_list.add(addon)
|
||||
|
||||
return addon_list
|
||||
|
||||
def exists_addon(self, addon):
|
||||
"""Return True if a addon exists."""
|
||||
return addon in self._current_data or addon in self._addons_data
|
||||
|
||||
def is_installed(self, addon):
|
||||
"""Return True if a addon is installed."""
|
||||
return addon in self._addons_data
|
||||
|
||||
def version_installed(self, addon):
|
||||
"""Return installed version."""
|
||||
if ATTR_VERSION not in self._user_data[addon]:
|
||||
return self._addons_data[addon][ATTR_VERSION]
|
||||
|
||||
return self._user_data[addon][ATTR_VERSION]
|
||||
|
||||
def set_addon_install(self, addon, version):
|
||||
"""Set addon as installed."""
|
||||
self._addons_data[addon] = self._current_data[addon]
|
||||
self._user_data[addon] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
}
|
||||
self.save()
|
||||
|
||||
def set_addon_uninstall(self, addon):
|
||||
"""Set addon as uninstalled."""
|
||||
self._addons_data.pop(addon, None)
|
||||
self._user_data.pop(addon, None)
|
||||
self.save()
|
||||
|
||||
def set_addon_update(self, addon, version):
|
||||
"""Update version of addon."""
|
||||
self._addons_data[addon] = self._current_data[addon]
|
||||
self._user_data[addon][ATTR_VERSION] = version
|
||||
self.save()
|
||||
|
||||
def set_options(self, addon, options):
|
||||
"""Store user addon options."""
|
||||
self._user_data[addon][ATTR_OPTIONS] = options
|
||||
self.save()
|
||||
|
||||
def set_boot(self, addon, boot):
|
||||
"""Store user boot options."""
|
||||
self._user_data[addon][ATTR_BOOT] = boot
|
||||
self.save()
|
||||
|
||||
def get_options(self, addon):
|
||||
"""Return options with local changes."""
|
||||
return {
|
||||
**self._addons_data[addon][ATTR_OPTIONS],
|
||||
**self._user_data[addon][ATTR_OPTIONS],
|
||||
}
|
||||
|
||||
def get_boot(self, addon):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self._user_data[addon]:
|
||||
return self._user_data[addon][ATTR_BOOT]
|
||||
|
||||
return self._addons_data[addon][ATTR_BOOT]
|
||||
|
||||
def get_name(self, addon):
|
||||
"""Return name of addon."""
|
||||
return self._addons_data[addon][ATTR_NAME]
|
||||
|
||||
def get_description(self, addon):
|
||||
"""Return description of addon."""
|
||||
return self._addons_data[addon][ATTR_DESCRIPTON]
|
||||
|
||||
def get_version(self, addon):
|
||||
"""Return version of addon."""
|
||||
if addon not in self._current_data:
|
||||
return self.version_installed(addon)
|
||||
return self._current_data[addon][ATTR_VERSION]
|
||||
|
||||
def get_ports(self, addon):
|
||||
"""Return ports of addon."""
|
||||
return self._addons_data[addon].get(ATTR_PORTS)
|
||||
|
||||
def get_image(self, addon):
|
||||
"""Return image name of addon."""
|
||||
addon_data = self._addons_data.get(addon, self._current_data[addon])
|
||||
|
||||
if ATTR_IMAGE not in addon_data:
|
||||
return "{}/{}-addon-{}".format(DOCKER_REPO, self.arch, addon)
|
||||
|
||||
return addon_data[ATTR_IMAGE]
|
||||
|
||||
def need_config(self, addon):
|
||||
"""Return True if config map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_CONFIG]
|
||||
|
||||
def need_ssl(self, addon):
|
||||
"""Return True if ssl map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_SSL]
|
||||
|
||||
def path_data(self, addon):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return "{}/{}".format(self.config.path_addons_data, addon)
|
||||
|
||||
def path_data_docker(self, addon):
|
||||
"""Return addon data path external for docker."""
|
||||
return "{}/{}".format(self.config.path_addons_data_docker, addon)
|
||||
|
||||
def path_addon_options(self, addon):
|
||||
"""Return path to addons options."""
|
||||
return "{}/options.json".format(self.path_data(addon))
|
||||
|
||||
def write_addon_options(self, addon):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.get_schema(addon)
|
||||
options = self.get_options(addon)
|
||||
|
||||
def _set_builtin_repositories(self):
|
||||
"""Add local built-in repository into dataset."""
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_addon_options(addon), options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", addon,
|
||||
humanize_error(options, ex))
|
||||
builtin_file = Path(__file__).parent.joinpath('built-in.json')
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read built-in.json -> %s", err)
|
||||
return
|
||||
|
||||
return False
|
||||
# core repository
|
||||
self._repositories[REPOSITORY_CORE] = \
|
||||
builtin_data[REPOSITORY_CORE]
|
||||
|
||||
def get_schema(self, addon):
|
||||
"""Create a schema for addon options."""
|
||||
raw_schema = self._addons_data[addon][ATTR_SCHEMA]
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
|
||||
schema = vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
return schema
|
||||
def _merge_config(self):
|
||||
"""Update local config if they have update.
|
||||
|
||||
It need to be the same version as the local version is for merge.
|
||||
"""
|
||||
have_change = False
|
||||
|
||||
for addon in set(self.system):
|
||||
# detached
|
||||
if addon not in self._cache:
|
||||
continue
|
||||
|
||||
cache = self._cache[addon]
|
||||
data = self.system[addon]
|
||||
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
|
||||
if data != cache:
|
||||
self.system[addon] = copy.deepcopy(cache)
|
||||
have_change = True
|
||||
|
||||
if have_change:
|
||||
self.save()
|
||||
|
@@ -1,38 +1,43 @@
|
||||
"""Init file for HassIO addons git."""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import git
|
||||
|
||||
from .util import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AddonsRepo(object):
|
||||
class GitRepo(object):
|
||||
"""Manage addons git repo."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize docker base wrapper."""
|
||||
def __init__(self, config, loop, path, url):
|
||||
"""Initialize git base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.repo = None
|
||||
self.path = path
|
||||
self.url = url
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
async def load(self):
|
||||
"""Init git addon repo."""
|
||||
if not os.path.isdir(self.config.path_addons_repo):
|
||||
if not self.path.is_dir():
|
||||
return await self.clone()
|
||||
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Load addons repository")
|
||||
_LOGGER.info("Load addon %s repository", self.path)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo, self.config.path_addons_repo)
|
||||
None, git.Repo, str(self.path))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't load addons repo: %s.", err)
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -41,13 +46,13 @@ class AddonsRepo(object):
|
||||
"""Clone git addon repo."""
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Clone addons repository")
|
||||
_LOGGER.info("Clone addon %s repository", self.url)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo.clone_from, URL_HASSIO_ADDONS,
|
||||
self.config.path_addons_repo)
|
||||
None, git.Repo.clone_from, self.url, str(self.path))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't clone addons repo: %s.", err)
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -60,12 +65,43 @@ class AddonsRepo(object):
|
||||
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Pull addons repository")
|
||||
_LOGGER.info("Pull addon %s repository", self.url)
|
||||
await self.loop.run_in_executor(
|
||||
None, self.repo.remotes.origin.pull)
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't pull addons repo: %s.", err)
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.exc.GitCommandError) as err:
|
||||
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""HassIO addons repository."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize git hassio addon repository."""
|
||||
super().__init__(
|
||||
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
|
||||
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom addons repository."""
|
||||
|
||||
def __init__(self, config, loop, url):
|
||||
"""Initialize git hassio addon repository."""
|
||||
path = Path(config.path_addons_git, get_hash_from_repository(url))
|
||||
|
||||
super().__init__(config, loop, path, url)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a custom addon."""
|
||||
if self.path.is_dir():
|
||||
_LOGGER.info("Remove custom addon repository %s", self.url)
|
||||
|
||||
def log_err(funct, path, _):
|
||||
"""Log error."""
|
||||
_LOGGER.warning("Can't remove %s", path)
|
||||
|
||||
shutil.rmtree(str(self.path), onerror=log_err)
|
||||
|
71
hassio/addons/repository.py
Normal file
71
hassio/addons/repository.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Represent a HassIO repository."""
|
||||
from .git import GitRepoHassIO, GitRepoCustom
|
||||
from .util import get_hash_from_repository
|
||||
from ..const import (
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
|
||||
class Repository(object):
|
||||
"""Repository in HassIO."""
|
||||
|
||||
def __init__(self, config, loop, data, repository):
|
||||
"""Initialize repository object."""
|
||||
self.data = data
|
||||
self.source = None
|
||||
self.git = None
|
||||
|
||||
if repository == REPOSITORY_LOCAL:
|
||||
self._id = repository
|
||||
elif repository == REPOSITORY_CORE:
|
||||
self._id = repository
|
||||
self.git = GitRepoHassIO(config, loop)
|
||||
else:
|
||||
self._id = get_hash_from_repository(repository)
|
||||
self.git = GitRepoCustom(config, loop, repository)
|
||||
self.source = repository
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return data struct repository."""
|
||||
return self.data.repositories.get(self._id, {})
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug of repository."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of repository."""
|
||||
return self._mesh.get(ATTR_NAME, UNKNOWN)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of repository."""
|
||||
return self._mesh.get(ATTR_URL, self.source)
|
||||
|
||||
@property
|
||||
def maintainer(self):
|
||||
"""Return url of repository."""
|
||||
return self._mesh.get(ATTR_MAINTAINER, UNKNOWN)
|
||||
|
||||
async def load(self):
|
||||
"""Load addon repository."""
|
||||
if self.git:
|
||||
return await self.git.load()
|
||||
return True
|
||||
|
||||
async def update(self):
|
||||
"""Update addon repository."""
|
||||
if self.git:
|
||||
return await self.git.pull()
|
||||
return True
|
||||
|
||||
def remove(self):
|
||||
"""Remove addon repository."""
|
||||
if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL):
|
||||
raise RuntimeError("Can't remove built-in repositories!")
|
||||
|
||||
self.git.remove()
|
35
hassio/addons/util.py
Normal file
35
hassio/addons/util.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_hash_from_repository(name):
|
||||
"""Generate a hash from repository."""
|
||||
key = name.lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def extract_hash_from_path(path):
|
||||
"""Extract repo id from path."""
|
||||
repo_dir = path.parts[-1]
|
||||
|
||||
if not RE_SHA1.match(repo_dir):
|
||||
return get_hash_from_repository(repo_dir)
|
||||
return repo_dir
|
||||
|
||||
|
||||
def check_installed(method):
|
||||
"""Wrap function with check if addon is installed."""
|
||||
async def wrap_check(addon, *args, **kwargs):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
||||
return False
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
@@ -3,9 +3,19 @@ import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||
ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||
ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER, STARTUP_BEFORE, BOOT_AUTO,
|
||||
BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE)
|
||||
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE,
|
||||
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE,
|
||||
BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER,
|
||||
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||
|
||||
|
||||
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
@@ -13,8 +23,34 @@ V_FLOAT = 'float'
|
||||
V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
V_PORT = 'port'
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL, V_PORT])
|
||||
|
||||
ARCH_ALL = [
|
||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
||||
]
|
||||
|
||||
STARTUP_ALL = [
|
||||
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION
|
||||
]
|
||||
|
||||
PRIVILEGED_ALL = [
|
||||
"NET_ADMIN",
|
||||
"SYS_ADMIN",
|
||||
"SYS_RAWIO"
|
||||
]
|
||||
|
||||
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
if value == "before":
|
||||
return STARTUP_SERVICES
|
||||
if value == "after":
|
||||
return STARTUP_APPLICATION
|
||||
return value
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL])
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
@@ -22,20 +58,78 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE]),
|
||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): dict,
|
||||
vol.Optional(ATTR_MAP_CONFIG, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_TMPFS):
|
||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): {
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
||||
])
|
||||
},
|
||||
], vol.Schema({vol.Coerce(str): ADDON_ELEMENT}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default={}): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||
},
|
||||
vol.Optional(ATTR_SYSTEM, default={}): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
@@ -53,11 +147,14 @@ def validate_options(raw_schema):
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value
|
||||
options[key] = _nested_validate(typ[0], value)
|
||||
# nested value list
|
||||
options[key] = _nested_validate_list(typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = _nested_validate_dict(typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value)
|
||||
options[key] = _single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
@@ -68,9 +165,13 @@ def validate_options(raw_schema):
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(typ, value):
|
||||
def _single_validate(typ, value, key):
|
||||
"""Validate a single element."""
|
||||
try:
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||
|
||||
if typ == V_STR:
|
||||
return str(value)
|
||||
elif typ == V_INT:
|
||||
@@ -83,14 +184,16 @@ def _single_validate(typ, value):
|
||||
return vol.Email()(value)
|
||||
elif typ == V_URL:
|
||||
return vol.Url()(value)
|
||||
elif typ == V_PORT:
|
||||
return NETWORK_PORT(value)
|
||||
|
||||
raise vol.Invalid("Fatal error for {}.".format(value))
|
||||
except TypeError:
|
||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||
except ValueError:
|
||||
raise vol.Invalid(
|
||||
"Type {} error for {}.".format(typ, value)) from None
|
||||
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
|
||||
|
||||
|
||||
def _nested_validate(typ, data_list):
|
||||
def _nested_validate_list(typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
@@ -101,12 +204,25 @@ def _nested_validate(typ, data_list):
|
||||
for c_key, c_value in element.items():
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid(
|
||||
"Unknown nested options {}.".format(c_key))
|
||||
"Unknown nested options {}".format(c_key))
|
||||
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value)
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
options.append(c_options)
|
||||
# normal list
|
||||
else:
|
||||
options.append(_single_validate(typ, element))
|
||||
options.append(_single_validate(typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _nested_validate_dict(typ, data_dict, key):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
for c_key, c_value in data_dict.items():
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid("Unknow nested dict options {}".format(c_key))
|
||||
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
return options
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Init file for HassIO rest api."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
@@ -8,6 +9,8 @@ from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .supervisor import APISupervisor
|
||||
from .security import APISecurity
|
||||
from .snapshots import APISnapshots
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,32 +28,38 @@ class RestAPI(object):
|
||||
self._handler = None
|
||||
self.server = None
|
||||
|
||||
def register_host(self, host_controll):
|
||||
"""Register hostcontroll function."""
|
||||
api_host = APIHost(self.config, self.loop, host_controll)
|
||||
def register_host(self, host_control, hardware):
|
||||
"""Register hostcontrol function."""
|
||||
api_host = APIHost(self.config, self.loop, host_control, hardware)
|
||||
|
||||
self.webapp.router.add_get('/host/info', api_host.info)
|
||||
self.webapp.router.add_get('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_get('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_get('/host/update', api_host.update)
|
||||
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_post('/host/update', api_host.update)
|
||||
self.webapp.router.add_post('/host/options', api_host.options)
|
||||
|
||||
def register_network(self, host_controll):
|
||||
def register_network(self, host_control):
|
||||
"""Register network function."""
|
||||
api_net = APINetwork(self.config, self.loop, host_controll)
|
||||
api_net = APINetwork(self.config, self.loop, host_control)
|
||||
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_get('/network/options', api_net.options)
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
|
||||
def register_supervisor(self, supervisor, addons):
|
||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||
updater):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, supervisor, addons)
|
||||
self.config, self.loop, supervisor, snapshots, addons,
|
||||
host_control, updater)
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get('/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_get('/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_get(
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||
|
||||
@@ -59,24 +68,74 @@ class RestAPI(object):
|
||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
|
||||
def register_addons(self, addons):
|
||||
"""Register homeassistant function."""
|
||||
api_addons = APIAddons(self.config, self.loop, addons)
|
||||
|
||||
self.webapp.router.add_get('/addons', api_addons.list)
|
||||
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
||||
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_get(
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
self.webapp.router.add_get(
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/uninstall', api_addons.uninstall)
|
||||
self.webapp.router.add_get('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_get('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_get('/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_get(
|
||||
self.webapp.router.add_post('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/restart', api_addons.restart)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||
|
||||
def register_security(self):
|
||||
"""Register security function."""
|
||||
api_security = APISecurity(self.config, self.loop)
|
||||
|
||||
self.webapp.router.add_get('/security/info', api_security.info)
|
||||
self.webapp.router.add_post('/security/options', api_security.options)
|
||||
self.webapp.router.add_post('/security/totp', api_security.totp)
|
||||
self.webapp.router.add_post('/security/session', api_security.session)
|
||||
|
||||
def register_snapshots(self, snapshots):
|
||||
"""Register snapshots function."""
|
||||
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
|
||||
|
||||
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
||||
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
||||
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/remove', api_snapshots.remove)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial)
|
||||
|
||||
def register_panel(self):
|
||||
"""Register panel for homeassistant."""
|
||||
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
|
||||
|
||||
def get_panel(request):
|
||||
"""Return file response with panel."""
|
||||
return web.FileResponse(panel)
|
||||
|
||||
self.webapp.router.add_get('/panel', get_panel)
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
|
@@ -7,8 +7,14 @@ from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_CURRENT, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
STATE_STOPPED, STATE_STARTED, BOOT_AUTO, BOOT_MANUAL)
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||
BOOT_AUTO, BOOT_MANUAL, CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
||||
from ..validate import DOCKER_PORTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -16,8 +22,11 @@ SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL])
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
})
|
||||
|
||||
|
||||
@@ -32,46 +41,120 @@ class APIAddons(object):
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
addon = request.match_info.get('addon')
|
||||
|
||||
# check data
|
||||
if not self.addons.exists_addon(addon):
|
||||
addon = self.addons.get(request.match_info.get('addon'))
|
||||
if not addon:
|
||||
raise RuntimeError("Addon not exists")
|
||||
if check_installed and not self.addons.is_installed(addon):
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
raise RuntimeError("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@staticmethod
|
||||
def _pretty_devices(addon):
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return all addons / repositories ."""
|
||||
data_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.addons.list_repositories:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: data_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload all addons data."""
|
||||
await asyncio.shield(self.addons.reload(), loop=self.loop)
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return addon information."""
|
||||
addon = self._extract_addon(request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
info = {
|
||||
ATTR_VERSION: self.addons.version_installed(addon),
|
||||
ATTR_CURRENT: self.addons.get_version(addon),
|
||||
ATTR_STATE: await self.addons.state(addon),
|
||||
ATTR_BOOT: self.addons.get_boot(addon),
|
||||
ATTR_OPTIONS: self.addons.get_options(addon),
|
||||
return {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LAST_VERSION: addon.last_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_HOST_NETWORK: addon.network_mode == 'host',
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
}
|
||||
return info
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Store user options for addon."""
|
||||
addon = self._extract_addon(request)
|
||||
options_schema = self.addons.get_schema(addon)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): options_schema,
|
||||
vol.Optional(ATTR_OPTIONS): addon.schema,
|
||||
})
|
||||
|
||||
addon_config = await api_validate(addon_schema, request)
|
||||
body = await api_validate(addon_schema, request)
|
||||
|
||||
if ATTR_OPTIONS in addon_config:
|
||||
self.addons.set_options(addon, addon_config[ATTR_OPTIONS])
|
||||
if ATTR_BOOT in addon_config:
|
||||
self.addons.set_options(addon, addon_config[ATTR_BOOT])
|
||||
if ATTR_OPTIONS in body:
|
||||
addon.options = body[ATTR_OPTIONS]
|
||||
if ATTR_BOOT in body:
|
||||
addon.boot = body[ATTR_BOOT]
|
||||
if ATTR_AUTO_UPDATE in body:
|
||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||
if ATTR_NETWORK in body:
|
||||
addon.ports = body[ATTR_NETWORK]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
return True
|
||||
|
||||
@@ -80,66 +163,83 @@ class APIAddons(object):
|
||||
"""Install addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
version = body.get(
|
||||
ATTR_VERSION, self.addons.get_version(addon))
|
||||
version = body.get(ATTR_VERSION, addon.last_version)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.install(addon, version), loop=self.loop)
|
||||
addon.install(version=version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
addon = self._extract_addon(request)
|
||||
def uninstall(self, request):
|
||||
"""Uninstall addon.
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.uninstall(addon), loop=self.loop)
|
||||
Return a coroutine.
|
||||
"""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def start(self, request):
|
||||
"""Start addon."""
|
||||
def start(self, request):
|
||||
"""Start addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if await self.addons.state(addon) == STATE_STARTED:
|
||||
raise RuntimeError("Addon is already running")
|
||||
|
||||
# validate options
|
||||
# check options
|
||||
options = addon.options
|
||||
try:
|
||||
schema = self.addons.get_schema(addon)
|
||||
options = self.addons.get_options(addon)
|
||||
schema(options)
|
||||
addon.schema(options)
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.start(addon), loop=self.loop)
|
||||
return asyncio.shield(addon.start(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def stop(self, request):
|
||||
"""Stop addon."""
|
||||
def stop(self, request):
|
||||
"""Stop addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if await self.addons.state(addon) == STATE_STOPPED:
|
||||
raise RuntimeError("Addon is already stoped")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.stop(addon), loop=self.loop)
|
||||
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request)
|
||||
version = body.get(
|
||||
ATTR_VERSION, self.addons.get_version(addon))
|
||||
version = body.get(ATTR_VERSION, addon.last_version)
|
||||
|
||||
if version == self.addons.version_installed(addon):
|
||||
raise RuntimeError("Version is already in use")
|
||||
if version == addon.version_installed:
|
||||
raise RuntimeError("Version %s is already in use", version)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.update(addon, version), loop=self.loop)
|
||||
addon.update(version=version), loop=self.loop)
|
||||
|
||||
@api_process_raw
|
||||
def logs(self, request):
|
||||
"""Return logs from addon."""
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
addon = self._extract_addon(request)
|
||||
return self.addons.logs(addon)
|
||||
return asyncio.shield(addon.restart(), loop=self.loop)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return logs from addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request):
|
||||
"""Return logo from addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
raise RuntimeError("No image found!")
|
||||
|
||||
with addon.path_logo.open('rb') as png:
|
||||
return png.read()
|
||||
|
@@ -5,10 +5,21 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import ATTR_VERSION, ATTR_CURRENT
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
||||
CONTENT_TYPE_BINARY)
|
||||
from ..validate import HASS_DEVICES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
@@ -26,29 +37,49 @@ class APIHomeAssistant(object):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
info = {
|
||||
return {
|
||||
ATTR_VERSION: self.homeassistant.version,
|
||||
ATTR_CURRENT: self.config.current_homeassistant,
|
||||
ATTR_LAST_VERSION: self.homeassistant.last_version,
|
||||
ATTR_IMAGE: self.homeassistant.image,
|
||||
ATTR_DEVICES: self.homeassistant.devices,
|
||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
||||
}
|
||||
|
||||
return info
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set homeassistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_DEVICES in body:
|
||||
self.homeassistant.devices = body[ATTR_DEVICES]
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.homeassistant.set_custom(
|
||||
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
||||
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
"""Update homeassistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.current_homeassistant)
|
||||
|
||||
if self.homeassistant.in_progress:
|
||||
raise RuntimeError("Other task is in progress")
|
||||
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
|
||||
|
||||
if version == self.homeassistant.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.update(version), loop=self.loop)
|
||||
|
||||
@api_process_raw
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart homeassistant.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return homeassistant docker logs.
|
||||
|
||||
|
@@ -1,61 +1,96 @@
|
||||
"""Init file for HassIO host rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process_hostcontroll, api_process, api_validate
|
||||
from ..const import ATTR_VERSION
|
||||
from .util import api_process_hostcontrol, api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT)
|
||||
from ..validate import ALSA_CHANNEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
})
|
||||
|
||||
|
||||
class APIHost(object):
|
||||
"""Handle rest api for host functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll):
|
||||
def __init__(self, config, loop, host_control, hardware):
|
||||
"""Initialize host rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
self.host_control = host_control
|
||||
self.local_hw = hardware
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
if not self.host_controll.active:
|
||||
info = {
|
||||
'os': UNKNOWN,
|
||||
'version': UNKNOWN,
|
||||
'current': UNKNOWN,
|
||||
'level': 0,
|
||||
'hostname': UNKNOWN,
|
||||
}
|
||||
return info
|
||||
return {
|
||||
ATTR_TYPE: self.host_control.type,
|
||||
ATTR_VERSION: self.host_control.version,
|
||||
ATTR_LAST_VERSION: self.host_control.last_version,
|
||||
ATTR_FEATURES: self.host_control.features,
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
ATTR_OS: self.host_control.os_info,
|
||||
}
|
||||
|
||||
return await self.host_controll.info()
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Process host options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
@api_process_hostcontroll
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self.config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
return True
|
||||
|
||||
@api_process_hostcontrol
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return self.host_controll.reboot()
|
||||
"""Reboot host.
|
||||
|
||||
@api_process_hostcontroll
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.host_control.reboot()
|
||||
|
||||
@api_process_hostcontrol
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return self.host_controll.shutdown()
|
||||
"""Poweroff host.
|
||||
|
||||
@api_process_hostcontroll
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.host_control.shutdown()
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION)
|
||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
||||
|
||||
if version == self.host_controll.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
if version == self.host_control.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await self.host_controll.host_update(version=version)
|
||||
return await asyncio.shield(
|
||||
self.host_control.update(version=version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def hardware(self, request):
|
||||
"""Return local hardware infos."""
|
||||
return {
|
||||
ATTR_SERIAL: self.local_hw.serial_devices,
|
||||
ATTR_INPUT: self.local_hw.input_devices,
|
||||
ATTR_DISK: self.local_hw.disk_devices,
|
||||
ATTR_AUDIO: self.local_hw.audio_devices,
|
||||
}
|
||||
|
@@ -1,26 +1,43 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
import logging
|
||||
|
||||
from .util import api_process_hostcontroll
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_hostcontrol, api_validate
|
||||
from ..const import ATTR_HOSTNAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APINetwork(object):
|
||||
"""Handle rest api for network functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll):
|
||||
def __init__(self, config, loop, host_control):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
self.host_control = host_control
|
||||
|
||||
@api_process_hostcontroll
|
||||
def info(self, request):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show network settings."""
|
||||
pass
|
||||
return {
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
}
|
||||
|
||||
@api_process_hostcontroll
|
||||
def options(self, request):
|
||||
@api_process_hostcontrol
|
||||
async def options(self, request):
|
||||
"""Edit network settings."""
|
||||
pass
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
if self.host_control.hostname != body[ATTR_HOSTNAME]:
|
||||
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||
|
||||
return True
|
||||
|
102
hassio/api/security.py
Normal file
102
hassio/api/security.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Init file for HassIO security rest api."""
|
||||
from datetime import datetime, timedelta
|
||||
import io
|
||||
import logging
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
import pyotp
|
||||
import pyqrcode
|
||||
|
||||
from .util import api_process, api_validate, hash_password
|
||||
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_PASSWORD = vol.Schema({
|
||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
|
||||
vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APISecurity(object):
|
||||
"""Handle rest api for security functions."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize security rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
|
||||
def _check_password(self, body):
|
||||
"""Check if password is valid and security is initialize."""
|
||||
if not self.config.security_initialize:
|
||||
raise RuntimeError("First set a password")
|
||||
|
||||
password = hash_password(body[ATTR_PASSWORD])
|
||||
if password != self.config.security_password:
|
||||
raise RuntimeError("Wrong password")
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_INITIALIZE: self.config.security_initialize,
|
||||
ATTR_TOTP: self.config.security_totp is not None,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set options / password."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
|
||||
if self.config.security_initialize:
|
||||
raise RuntimeError("Password is already set!")
|
||||
|
||||
self.config.security_password = hash_password(body[ATTR_PASSWORD])
|
||||
self.config.security_initialize = True
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def totp(self, request):
|
||||
"""Set and initialze TOTP."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
self._check_password(body)
|
||||
|
||||
# generate TOTP
|
||||
totp_init_key = pyotp.random_base32()
|
||||
totp = pyotp.TOTP(totp_init_key)
|
||||
|
||||
# init qrcode
|
||||
buff = io.BytesIO()
|
||||
|
||||
qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO"))
|
||||
qrcode.svg(buff)
|
||||
|
||||
# finish
|
||||
self.config.security_totp = totp_init_key
|
||||
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
|
||||
|
||||
@api_process
|
||||
async def session(self, request):
|
||||
"""Set and initialze session."""
|
||||
body = await api_validate(SCHEMA_SESSION, request)
|
||||
self._check_password(body)
|
||||
|
||||
# check TOTP
|
||||
if self.config.security_totp:
|
||||
totp = pyotp.TOTP(self.config.security_totp)
|
||||
if body[ATTR_TOTP] != totp.now():
|
||||
raise RuntimeError("Invalid TOTP token!")
|
||||
|
||||
# create session
|
||||
valid_until = datetime.now() + timedelta(days=1)
|
||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
||||
|
||||
# store session
|
||||
self.config.add_security_session(session, valid_until)
|
||||
return {ATTR_SESSION: session}
|
138
hassio/api/snapshots.py
Normal file
138
hassio/api/snapshots.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""Init file for HassIO snapshot rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||
ATTR_DEVICES, ATTR_SNAPSHOTS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
})
|
||||
|
||||
|
||||
class APISnapshots(object):
|
||||
"""Handle rest api for snapshot functions."""
|
||||
|
||||
def __init__(self, config, loop, snapshots):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.snapshots = snapshots
|
||||
|
||||
def _extract_snapshot(self, request):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
|
||||
if not snapshot:
|
||||
raise RuntimeError("Snapshot not exists")
|
||||
return snapshot
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return snapshot list."""
|
||||
data_snapshots = []
|
||||
for snapshot in self.snapshots.list_snapshots:
|
||||
data_snapshots.append({
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_SNAPSHOTS: data_snapshots,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload snapshot list."""
|
||||
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return snapshot info."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
|
||||
data_addons = []
|
||||
for addon_data in snapshot.addons:
|
||||
data_addons.append({
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_TYPE: snapshot.sys_type,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_SIZE: snapshot.size,
|
||||
ATTR_HOMEASSISTANT: {
|
||||
ATTR_VERSION: snapshot.homeassistant_version,
|
||||
ATTR_DEVICES: snapshot.homeassistant_devices,
|
||||
},
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: snapshot.repositories,
|
||||
ATTR_FOLDERS: snapshot.folders,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def snapshot_full(self, request):
|
||||
"""Full-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_full(**body), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def snapshot_partial(self, request):
|
||||
"""Partial-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
def restore_full(self, request):
|
||||
"""Full-Restore a snapshot.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return asyncio.shield(
|
||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self.loop
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return self.snapshots.remove(snapshot)
|
@@ -6,13 +6,19 @@ import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_CURRENT, ATTR_BETA, HASSIO_VERSION)
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, CONTENT_TYPE_BINARY)
|
||||
from ..validate import validate_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA): vol.Boolean(),
|
||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -23,12 +29,16 @@ SCHEMA_VERSION = vol.Schema({
|
||||
class APISupervisor(object):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, supervisor, addons):
|
||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||
host_control, updater):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.supervisor = supervisor
|
||||
self.addons = addons
|
||||
self.snapshots = snapshots
|
||||
self.host_control = host_control
|
||||
self.updater = updater
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
@@ -38,32 +48,55 @@ class APISupervisor(object):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
info = {
|
||||
list_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_CURRENT: self.config.current_hassio,
|
||||
ATTR_BETA: self.config.upstream_beta,
|
||||
ATTR_ADDONS: self.addons.list,
|
||||
ATTR_LAST_VERSION: self.updater.version_hassio,
|
||||
ATTR_BETA_CHANNEL: self.updater.beta_channel,
|
||||
ATTR_ARCH: self.config.arch,
|
||||
ATTR_TIMEZONE: self.config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
||||
}
|
||||
return info
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA in body:
|
||||
self.config.upstream_beta = body[ATTR_BETA]
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||
|
||||
return self.config.save()
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.addons.load_repositories(new))
|
||||
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.current_hassio)
|
||||
version = body.get(ATTR_VERSION, self.updater.version_hassio)
|
||||
|
||||
if version == self.supervisor.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.supervisor.update(version), loop=self.loop)
|
||||
@@ -71,7 +104,12 @@ class APISupervisor(object):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload addons, config ect."""
|
||||
tasks = [self.addons.reload(), self.config.fetch_update_infos()]
|
||||
tasks = [
|
||||
self.addons.reload(),
|
||||
self.snapshots.reload(),
|
||||
self.updater.fetch_data(),
|
||||
self.host_control.load()
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
||||
|
||||
@@ -81,7 +119,7 @@ class APISupervisor(object):
|
||||
|
||||
return True
|
||||
|
||||
@api_process_raw
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return supervisor docker logs.
|
||||
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Init file for HassIO util for rest api."""
|
||||
import json
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
@@ -8,7 +9,8 @@ import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR)
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,6 +34,8 @@ def api_process(method):
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
if isinstance(answer, web.Response):
|
||||
return answer
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
@@ -39,11 +43,11 @@ def api_process(method):
|
||||
return wrap_api
|
||||
|
||||
|
||||
def api_process_hostcontroll(method):
|
||||
"""Wrap HostControll calls to rest api."""
|
||||
async def wrap_hostcontroll(api, *args, **kwargs):
|
||||
def api_process_hostcontrol(method):
|
||||
"""Wrap HostControl calls to rest api."""
|
||||
async def wrap_hostcontrol(api, *args, **kwargs):
|
||||
"""Return host information."""
|
||||
if not api.host_controll.active:
|
||||
if not api.host_control.active:
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
try:
|
||||
@@ -59,29 +63,37 @@ def api_process_hostcontroll(method):
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
|
||||
return wrap_hostcontroll
|
||||
return wrap_hostcontrol
|
||||
|
||||
|
||||
def api_process_raw(method):
|
||||
"""Wrap function with raw output to rest api."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
message = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
message = str(err).encode()
|
||||
def api_process_raw(content):
|
||||
"""Wrap content_type into function."""
|
||||
def wrap_method(method):
|
||||
"""Wrap function with raw output to rest api."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
msg_data = await method(api, *args, **kwargs)
|
||||
msg_type = content
|
||||
except RuntimeError as err:
|
||||
msg_data = str(err).encode()
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
|
||||
return web.Response(body=message)
|
||||
return web.Response(body=msg_data, content_type=msg_type)
|
||||
|
||||
return wrap_api
|
||||
return wrap_api
|
||||
return wrap_method
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return a API error message."""
|
||||
if message:
|
||||
_LOGGER.error(message)
|
||||
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
})
|
||||
}, status=400)
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
@@ -101,3 +113,9 @@ async def api_validate(schema, request):
|
||||
raise RuntimeError(humanize_error(data, ex)) from None
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def hash_password(password):
|
||||
"""Hash and salt our passwords."""
|
||||
key = ")*()*SALT_HASSIO2123{}6554547485HSKA!!*JSLAfdasda$".format(password)
|
||||
return hashlib.sha256(key.encode()).hexdigest()
|
||||
|
@@ -1,8 +1,8 @@
|
||||
"""Bootstrap HassIO."""
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import signal
|
||||
from pathlib import Path
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
@@ -12,35 +12,67 @@ from .config import CoreConfig
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_system_data(websession):
|
||||
def initialize_system_data():
|
||||
"""Setup default config and create folders."""
|
||||
config = CoreConfig(websession)
|
||||
config = CoreConfig()
|
||||
|
||||
# homeassistant config folder
|
||||
if not os.path.isdir(config.path_config):
|
||||
if not config.path_config.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home-Assistant config folder %s", config.path_config)
|
||||
os.mkdir(config.path_config)
|
||||
config.path_config.mkdir()
|
||||
|
||||
# homeassistant ssl folder
|
||||
if not os.path.isdir(config.path_ssl):
|
||||
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
||||
os.mkdir(config.path_ssl)
|
||||
# hassio ssl folder
|
||||
if not config.path_ssl.is_dir():
|
||||
_LOGGER.info("Create hassio ssl folder %s", config.path_ssl)
|
||||
config.path_ssl.mkdir()
|
||||
|
||||
# homeassistant addon data folder
|
||||
if not os.path.isdir(config.path_addons_data):
|
||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||
config.path_addons_data)
|
||||
os.mkdir(config.path_addons_data)
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create hassio addon data folder %s", config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not os.path.isdir(config.path_addons_custom):
|
||||
_LOGGER.info("Create Home-Assistant addon custom folder %s",
|
||||
config.path_addons_custom)
|
||||
os.mkdir(config.path_addons_custom)
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info("Create hassio addon local repository folder %s",
|
||||
config.path_addons_local)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info("Create hassio addon git repositories folder %s",
|
||||
config.path_addons_git)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
if not config.path_tmp.is_dir():
|
||||
_LOGGER.info("Create hassio temp folder %s", config.path_tmp)
|
||||
config.path_tmp.mkdir(parents=True)
|
||||
|
||||
# hassio backup folder
|
||||
if not config.path_backup.is_dir():
|
||||
_LOGGER.info("Create hassio backup folder %s", config.path_backup)
|
||||
config.path_backup.mkdir()
|
||||
|
||||
# share folder
|
||||
if not config.path_share.is_dir():
|
||||
_LOGGER.info("Create hassio share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def migrate_system_env(config):
|
||||
"""Cleanup some stuff after update."""
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
if old_build.is_dir():
|
||||
try:
|
||||
old_build.rmdir()
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't cleanup old addons build dir.")
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
@@ -76,8 +108,7 @@ def check_environment():
|
||||
_LOGGER.fatal("Can't find %s in env!", key)
|
||||
return False
|
||||
|
||||
mode = os.stat(SOCKET_DOCKER)[stat.ST_MODE]
|
||||
if not stat.S_ISSOCK(mode):
|
||||
if not SOCKET_DOCKER.is_socket():
|
||||
_LOGGER.fatal("Can't find docker socket!")
|
||||
return False
|
||||
|
||||
|
297
hassio/config.py
297
hassio/config.py
@@ -1,175 +1,240 @@
|
||||
"""Bootstrap HassIO."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE
|
||||
from .tools import (
|
||||
fetch_current_versions, write_json_file, read_json_file)
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
||||
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_API_ENDPOINT,
|
||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT)
|
||||
from .tools import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HOMEASSISTANT_CONFIG = "{}/homeassistant"
|
||||
HOMEASSISTANT_IMAGE = 'homeassistant_image'
|
||||
HOMEASSISTANT_CURRENT = 'homeassistant_current'
|
||||
DATETIME_FORMAT = "%Y%m%d %H:%M:%S"
|
||||
|
||||
HASSIO_SSL = "{}/ssl"
|
||||
HASSIO_CURRENT = 'hassio_current'
|
||||
HASSIO_CLEANUP = 'hassio_cleanup'
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
|
||||
ADDONS_REPO = "{}/addons"
|
||||
ADDONS_DATA = "{}/addons_data"
|
||||
ADDONS_CUSTOM = "{}/addons_custom"
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
|
||||
UPSTREAM_BETA = 'upstream_beta'
|
||||
ADDONS_CORE = PurePath("addons/core")
|
||||
ADDONS_LOCAL = PurePath("addons/local")
|
||||
ADDONS_GIT = PurePath("addons/git")
|
||||
ADDONS_DATA = PurePath("addons/data")
|
||||
|
||||
API_ENDPOINT = 'api_endpoint'
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
|
||||
|
||||
class Config(object):
|
||||
"""Hold all config data."""
|
||||
|
||||
def __init__(self, config_file):
|
||||
"""Initialize config object."""
|
||||
self._filename = config_file
|
||||
self._data = {}
|
||||
|
||||
# init or load data
|
||||
if os.path.isfile(self._filename):
|
||||
try:
|
||||
self._data = read_json_file(self._filename)
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't read %s", self._filename)
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
if not write_json_file(self._filename, self._data):
|
||||
_LOGGER.exception("Can't store config in %s", self._filename)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class CoreConfig(Config):
|
||||
class CoreConfig(JsonConfig):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self, websession):
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
self.websession = websession
|
||||
|
||||
super().__init__(FILE_HASSIO_CONFIG)
|
||||
|
||||
# init data
|
||||
if not self._data:
|
||||
self._data.update({
|
||||
HOMEASSISTANT_IMAGE: os.environ['HOMEASSISTANT_REPOSITORY'],
|
||||
UPSTREAM_BETA: False,
|
||||
})
|
||||
self.save()
|
||||
|
||||
async def fetch_update_infos(self):
|
||||
"""Read current versions from web."""
|
||||
current = await fetch_current_versions(
|
||||
self.websession, beta=self.upstream_beta)
|
||||
|
||||
if current:
|
||||
self._data.update({
|
||||
HOMEASSISTANT_CURRENT: current.get('homeassistant_tag'),
|
||||
HASSIO_CURRENT: current.get('hassio_tag'),
|
||||
})
|
||||
self.save()
|
||||
return True
|
||||
|
||||
return False
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||
self.arch = None
|
||||
|
||||
@property
|
||||
def api_endpoint(self):
|
||||
"""Return IP address of api endpoint."""
|
||||
return self._data[API_ENDPOINT]
|
||||
return self._data[ATTR_API_ENDPOINT]
|
||||
|
||||
@api_endpoint.setter
|
||||
def api_endpoint(self, value):
|
||||
"""Store IP address of api endpoint."""
|
||||
self._data[API_ENDPOINT] = value
|
||||
self._data[ATTR_API_ENDPOINT] = value
|
||||
|
||||
@property
|
||||
def upstream_beta(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
return self._data.get(UPSTREAM_BETA, False)
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
return self._data[ATTR_TIMEZONE]
|
||||
|
||||
@upstream_beta.setter
|
||||
def upstream_beta(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[UPSTREAM_BETA] = bool(value)
|
||||
|
||||
@property
|
||||
def hassio_cleanup(self):
|
||||
"""Return Version they need to cleanup."""
|
||||
return self._data.get(HASSIO_CLEANUP)
|
||||
|
||||
@hassio_cleanup.setter
|
||||
def hassio_cleanup(self, version):
|
||||
"""Set or remove cleanup flag."""
|
||||
if version is None:
|
||||
self._data.pop(HASSIO_CLEANUP, None)
|
||||
else:
|
||||
self._data[HASSIO_CLEANUP] = version
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return docker homeassistant repository."""
|
||||
return self._data.get(HOMEASSISTANT_IMAGE)
|
||||
def path_hassio(self):
|
||||
"""Return hassio data path."""
|
||||
return HASSIO_DATA
|
||||
|
||||
@property
|
||||
def current_homeassistant(self):
|
||||
"""Actual version of homeassistant."""
|
||||
return self._data.get(HOMEASSISTANT_CURRENT)
|
||||
|
||||
@property
|
||||
def current_hassio(self):
|
||||
"""Actual version of hassio."""
|
||||
return self._data.get(HASSIO_CURRENT)
|
||||
|
||||
@property
|
||||
def path_hassio_docker(self):
|
||||
def path_extern_hassio(self):
|
||||
"""Return hassio data path extern for docker."""
|
||||
return os.environ['SUPERVISOR_SHARE']
|
||||
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
||||
|
||||
@property
|
||||
def path_config_docker(self):
|
||||
def path_extern_config(self):
|
||||
"""Return config path extern for docker."""
|
||||
return HOMEASSISTANT_CONFIG.format(self.path_hassio_docker)
|
||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
||||
|
||||
@property
|
||||
def path_config(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return HOMEASSISTANT_CONFIG.format(HASSIO_SHARE)
|
||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
||||
|
||||
@property
|
||||
def path_ssl_docker(self):
|
||||
def path_extern_ssl(self):
|
||||
"""Return SSL path extern for docker."""
|
||||
return HASSIO_SSL.format(self.path_hassio_docker)
|
||||
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
||||
|
||||
@property
|
||||
def path_ssl(self):
|
||||
"""Return SSL path inside supervisor."""
|
||||
return HASSIO_SSL.format(HASSIO_SHARE)
|
||||
return Path(HASSIO_DATA, HASSIO_SSL)
|
||||
|
||||
@property
|
||||
def path_addons_repo(self):
|
||||
"""Return git repo path for addons."""
|
||||
return ADDONS_REPO.format(HASSIO_SHARE)
|
||||
def path_addons_core(self):
|
||||
"""Return git path for core addons."""
|
||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
||||
|
||||
@property
|
||||
def path_addons_custom(self):
|
||||
def path_addons_git(self):
|
||||
"""Return path for git addons."""
|
||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
||||
|
||||
@property
|
||||
def path_addons_local(self):
|
||||
"""Return path for customs addons."""
|
||||
return ADDONS_CUSTOM.format(HASSIO_SHARE)
|
||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_extern_addons_local(self):
|
||||
"""Return path for customs addons."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_addons_data(self):
|
||||
"""Return root addon data folder."""
|
||||
return ADDONS_DATA.format(HASSIO_SHARE)
|
||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_addons_data_docker(self):
|
||||
def path_extern_addons_data(self):
|
||||
"""Return root addon data folder extern for docker."""
|
||||
return ADDONS_DATA.format(self.path_hassio_docker)
|
||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_tmp(self):
|
||||
"""Return hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
return Path(HASSIO_DATA, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_backup(self):
|
||||
"""Return root backup data folder extern for docker."""
|
||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_share(self):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder extern for docker."""
|
||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def addons_repositories(self):
|
||||
"""Return list of addons custom repositories."""
|
||||
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
||||
|
||||
def add_addon_repository(self, repo):
|
||||
"""Add a custom repository to list."""
|
||||
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||
self.save()
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_initialize(self):
|
||||
"""Return is security was initialize."""
|
||||
return self._data[ATTR_SECURITY]
|
||||
|
||||
@security_initialize.setter
|
||||
def security_initialize(self, value):
|
||||
"""Set is security initialize."""
|
||||
self._data[ATTR_SECURITY] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_totp(self):
|
||||
"""Return the TOTP key."""
|
||||
return self._data.get(ATTR_TOTP)
|
||||
|
||||
@security_totp.setter
|
||||
def security_totp(self, value):
|
||||
"""Set the TOTP key."""
|
||||
self._data[ATTR_TOTP] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_password(self):
|
||||
"""Return the password key."""
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@security_password.setter
|
||||
def security_password(self, value):
|
||||
"""Set the password key."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_sessions(self):
|
||||
"""Return api sessions."""
|
||||
return {
|
||||
session: datetime.strptime(until, DATETIME_FORMAT) for
|
||||
session, until in self._data[ATTR_SESSIONS].items()
|
||||
}
|
||||
|
||||
def add_security_session(self, session, valid):
|
||||
"""Set the a new session."""
|
||||
self._data[ATTR_SESSIONS].update(
|
||||
{session: valid.strftime(DATETIME_FORMAT)}
|
||||
)
|
||||
self.save()
|
||||
|
||||
def drop_security_session(self, session):
|
||||
"""Delete the a session."""
|
||||
self._data[ATTR_SESSIONS].pop(session, None)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA audio output card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_OUTPUT)
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set ALSA audio output card,dev."""
|
||||
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA audio input card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_INPUT)
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set ALSA audio input card,dev."""
|
||||
self._data[ATTR_AUDIO_INPUT] = value
|
||||
self.save()
|
||||
|
127
hassio/const.py
127
hassio/const.py
@@ -1,28 +1,40 @@
|
||||
"""Const file for HassIO."""
|
||||
HASSIO_VERSION = '0.14'
|
||||
from pathlib import Path
|
||||
|
||||
URL_HASSIO_VERSION = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version.json'
|
||||
URL_HASSIO_VERSION_BETA = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version_beta.json'
|
||||
HASSIO_VERSION = '0.52'
|
||||
|
||||
URL_HASSIO_ADDONS = 'https://github.com/pvizeli/hassio-addons'
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/{}/version.json')
|
||||
|
||||
DOCKER_REPO = "pvizeli"
|
||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||
|
||||
HASSIO_SHARE = "/data"
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
||||
RUN_WATCHDOG_HOMEASSISTANT = 15
|
||||
RUN_CLEANUP_API_SESSIONS = 900
|
||||
|
||||
RESTART_EXIT_CODE = 100
|
||||
|
||||
FILE_HASSIO_ADDONS = "{}/addons.json".format(HASSIO_SHARE)
|
||||
FILE_HASSIO_CONFIG = "{}/config.json".format(HASSIO_SHARE)
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
|
||||
SOCKET_DOCKER = "/var/run/docker.sock"
|
||||
SOCKET_HC = "/var/run/hassio-hc.sock"
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||
|
||||
LABEL_VERSION = 'io.hass.version'
|
||||
LABEL_ARCH = 'io.hass.arch'
|
||||
LABEL_TYPE = 'io.hass.type'
|
||||
|
||||
META_ADDON = 'addon'
|
||||
META_SUPERVISOR = 'supervisor'
|
||||
META_HOMEASSISTANT = 'homeassistant'
|
||||
|
||||
JSON_RESULT = 'result'
|
||||
JSON_DATA = 'data'
|
||||
@@ -31,27 +43,81 @@ JSON_MESSAGE = 'message'
|
||||
RESULT_ERROR = 'error'
|
||||
RESULT_OK = 'ok'
|
||||
|
||||
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||
CONTENT_TYPE_PNG = 'image/png'
|
||||
|
||||
ATTR_DATE = 'date'
|
||||
ATTR_ARCH = 'arch'
|
||||
ATTR_HOSTNAME = 'hostname'
|
||||
ATTR_TIMEZONE = 'timezone'
|
||||
ATTR_OS = 'os'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_SOURCE = 'source'
|
||||
ATTR_FEATURES = 'features'
|
||||
ATTR_ADDONS = 'addons'
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_CURRENT = 'current'
|
||||
ATTR_BETA = 'beta'
|
||||
ATTR_LAST_VERSION = 'last_version'
|
||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_SLUG = 'slug'
|
||||
ATTR_DESCRIPTON = 'description'
|
||||
ATTR_STARTUP = 'startup'
|
||||
ATTR_BOOT = 'boot'
|
||||
ATTR_PORTS = 'ports'
|
||||
ATTR_MAP_CONFIG = 'map_config'
|
||||
ATTR_MAP_SSL = 'map_ssl'
|
||||
ATTR_MAP = 'map'
|
||||
ATTR_WEBUI = 'webui'
|
||||
ATTR_OPTIONS = 'options'
|
||||
ATTR_INSTALLED = 'installed'
|
||||
ATTR_DEDICATED = 'dedicated'
|
||||
ATTR_DETACHED = 'detached'
|
||||
ATTR_STATE = 'state'
|
||||
ATTR_SCHEMA = 'schema'
|
||||
ATTR_IMAGE = 'image'
|
||||
ATTR_LOGO = 'logo'
|
||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||
ATTR_REPOSITORY = 'repository'
|
||||
ATTR_REPOSITORIES = 'repositories'
|
||||
ATTR_URL = 'url'
|
||||
ATTR_MAINTAINER = 'maintainer'
|
||||
ATTR_PASSWORD = 'password'
|
||||
ATTR_TOTP = 'totp'
|
||||
ATTR_INITIALIZE = 'initialize'
|
||||
ATTR_SESSION = 'session'
|
||||
ATTR_SESSIONS = 'sessions'
|
||||
ATTR_LOCATON = 'location'
|
||||
ATTR_BUILD = 'build'
|
||||
ATTR_DEVICES = 'devices'
|
||||
ATTR_ENVIRONMENT = 'environment'
|
||||
ATTR_HOST_NETWORK = 'host_network'
|
||||
ATTR_NETWORK = 'network'
|
||||
ATTR_TMPFS = 'tmpfs'
|
||||
ATTR_PRIVILEGED = 'privileged'
|
||||
ATTR_USER = 'user'
|
||||
ATTR_SYSTEM = 'system'
|
||||
ATTR_SNAPSHOTS = 'snapshots'
|
||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||
ATTR_HASSIO = 'hassio'
|
||||
ATTR_HASSIO_API = 'hassio_api'
|
||||
ATTR_FOLDERS = 'folders'
|
||||
ATTR_SIZE = 'size'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_TIMEOUT = 'timeout'
|
||||
ATTR_AUTO_UPDATE = 'auto_update'
|
||||
ATTR_CUSTOM = 'custom'
|
||||
ATTR_AUDIO = 'audio'
|
||||
ATTR_AUDIO_INPUT = 'audio_input'
|
||||
ATTR_AUDIO_OUTPUT = 'audio_output'
|
||||
ATTR_INPUT = 'input'
|
||||
ATTR_OUTPUT = 'output'
|
||||
ATTR_DISK = 'disk'
|
||||
ATTR_SERIAL = 'serial'
|
||||
ATTR_SECURITY = 'security'
|
||||
ATTR_API_ENDPOINT = 'api_endpoint'
|
||||
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
|
||||
STARTUP_BEFORE = 'before'
|
||||
STARTUP_AFTER = 'after'
|
||||
STARTUP_INITIALIZE = 'initialize'
|
||||
STARTUP_SYSTEM = 'system'
|
||||
STARTUP_SERVICES = 'services'
|
||||
STARTUP_APPLICATION = 'application'
|
||||
STARTUP_ONCE = 'once'
|
||||
|
||||
BOOT_AUTO = 'auto'
|
||||
@@ -59,3 +125,26 @@ BOOT_MANUAL = 'manual'
|
||||
|
||||
STATE_STARTED = 'started'
|
||||
STATE_STOPPED = 'stopped'
|
||||
STATE_NONE = 'none'
|
||||
|
||||
MAP_CONFIG = 'config'
|
||||
MAP_SSL = 'ssl'
|
||||
MAP_ADDONS = 'addons'
|
||||
MAP_BACKUP = 'backup'
|
||||
MAP_SHARE = 'share'
|
||||
|
||||
ARCH_ARMHF = 'armhf'
|
||||
ARCH_AARCH64 = 'aarch64'
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_I386 = 'i386'
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
|
||||
FOLDER_HOMEASSISTANT = 'homeassistant'
|
||||
FOLDER_SHARE = 'share'
|
||||
FOLDER_ADDONS = 'addons/local'
|
||||
FOLDER_SSL = 'ssl'
|
||||
|
||||
SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
|
189
hassio/core.py
189
hassio/core.py
@@ -5,17 +5,24 @@ import logging
|
||||
import aiohttp
|
||||
import docker
|
||||
|
||||
from . import bootstrap
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .host_controll import HostControll
|
||||
from .host_control import HostControl
|
||||
from .const import (
|
||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, STARTUP_AFTER, STARTUP_BEFORE)
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
||||
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
||||
RUN_UPDATE_ADDONS_TASKS)
|
||||
from .hardware import Hardware
|
||||
from .homeassistant import HomeAssistant
|
||||
from .scheduler import Scheduler
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .tools import get_arch_from_image, get_local_ip
|
||||
from .snapshots import SnapshotsManager
|
||||
from .updater import Updater
|
||||
from .tasks import (
|
||||
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
||||
from .tools import get_local_ip, fetch_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,130 +30,150 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class HassIO(object):
|
||||
"""Main object of hassio."""
|
||||
|
||||
def __init__(self, loop):
|
||||
def __init__(self, loop, config):
|
||||
"""Initialize hassio object."""
|
||||
self.exit_code = 0
|
||||
self.loop = loop
|
||||
self.websession = aiohttp.ClientSession(loop=self.loop)
|
||||
self.config = bootstrap.initialize_system_data(self.websession)
|
||||
self.scheduler = Scheduler(self.loop)
|
||||
self.api = RestAPI(self.config, self.loop)
|
||||
self.config = config
|
||||
self.websession = aiohttp.ClientSession(loop=loop)
|
||||
self.updater = Updater(config, loop, self.websession)
|
||||
self.scheduler = Scheduler(loop)
|
||||
self.api = RestAPI(config, loop)
|
||||
self.hardware = Hardware()
|
||||
self.dock = docker.DockerClient(
|
||||
base_url="unix:/{}".format(SOCKET_DOCKER), version='auto')
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(
|
||||
self.config, self.loop, self.dock, self)
|
||||
self.homeassistant = DockerHomeAssistant(
|
||||
self.config, self.loop, self.dock)
|
||||
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
|
||||
|
||||
# init HostControll
|
||||
self.host_controll = HostControll(self.loop)
|
||||
# init homeassistant
|
||||
self.homeassistant = HomeAssistant(
|
||||
config, loop, self.dock, self.updater)
|
||||
|
||||
# init HostControl
|
||||
self.host_control = HostControl(loop)
|
||||
|
||||
# init addon system
|
||||
self.addons = AddonManager(self.config, self.loop, self.dock)
|
||||
self.addons = AddonManager(config, loop, self.dock)
|
||||
|
||||
# init snapshot system
|
||||
self.snapshots = SnapshotsManager(
|
||||
config, loop, self.scheduler, self.addons, self.homeassistant)
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
await self.supervisor.attach()
|
||||
if not await self.supervisor.attach():
|
||||
_LOGGER.fatal("Can't attach to supervisor docker container!")
|
||||
await self.supervisor.cleanup()
|
||||
|
||||
# set running arch
|
||||
self.config.arch = self.supervisor.arch
|
||||
|
||||
# set api endpoint
|
||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
||||
|
||||
# hostcontroll
|
||||
host_info = await self.host_controll.info()
|
||||
if host_info:
|
||||
self.host_controll.version = host_info.get('version')
|
||||
_LOGGER.info(
|
||||
"Connected to HostControll. OS: %s Version: %s Hostname: %s "
|
||||
"Feature-lvl: %d", host_info.get('os'),
|
||||
host_info.get('version'), host_info.get('hostname'),
|
||||
host_info.get('level', 0))
|
||||
# update timezone
|
||||
if self.config.timezone == 'UTC':
|
||||
self.config.timezone = await fetch_timezone(self.websession)
|
||||
|
||||
# rest api views
|
||||
self.api.register_host(self.host_controll)
|
||||
self.api.register_network(self.host_controll)
|
||||
self.api.register_supervisor(self.supervisor, self.addons)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
# hostcontrol
|
||||
await self.host_control.load()
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.config.fetch_update_infos, RUN_UPDATE_INFO_TASKS,
|
||||
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
||||
|
||||
# rest api views
|
||||
self.api.register_host(self.host_control, self.hardware)
|
||||
self.api.register_network(self.host_control)
|
||||
self.api.register_supervisor(
|
||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||
self.updater)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
self.api.register_security()
|
||||
self.api.register_snapshots(self.snapshots)
|
||||
self.api.register_panel()
|
||||
|
||||
# schedule api session cleanup
|
||||
self.scheduler.register_task(
|
||||
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
|
||||
now=True)
|
||||
|
||||
# first start of supervisor?
|
||||
if not await self.homeassistant.exists():
|
||||
_LOGGER.info("No HomeAssistant docker found.")
|
||||
await self._setup_homeassistant()
|
||||
# Load homeassistant
|
||||
await self.homeassistant.prepare()
|
||||
|
||||
# Load addons
|
||||
arch = get_arch_from_image(self.supervisor.image)
|
||||
await self.addons.prepare(arch)
|
||||
await self.addons.prepare()
|
||||
|
||||
# schedule addon update task
|
||||
self.scheduler.register_task(
|
||||
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
|
||||
self.scheduler.register_task(
|
||||
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
self._hassio_update, RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
hassio_update(self.supervisor, self.updater),
|
||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
|
||||
# schedule snapshot update tasks
|
||||
self.scheduler.register_task(
|
||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
await asyncio.wait(
|
||||
[hassio_update(self.supervisor, self.updater)()],
|
||||
loop=self.loop
|
||||
)
|
||||
|
||||
# start api
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
||||
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
return
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
return
|
||||
|
||||
# start addon mark as before
|
||||
await self.addons.auto_boot(STARTUP_BEFORE)
|
||||
# start addon mark as system
|
||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
# start addon mark as services
|
||||
await self.addons.auto_boot(STARTUP_SERVICES)
|
||||
|
||||
# start addon mark as after
|
||||
await self.addons.auto_boot(STARTUP_AFTER)
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
|
||||
# start addon mark as application
|
||||
await self.addons.auto_boot(STARTUP_APPLICATION)
|
||||
|
||||
finally:
|
||||
# schedule homeassistant watchdog
|
||||
self.scheduler.register_task(
|
||||
homeassistant_watchdog(self.loop, self.homeassistant),
|
||||
RUN_WATCHDOG_HOMEASSISTANT)
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.homeassistant.version == 'landingpage':
|
||||
self.loop.create_task(self.homeassistant.install())
|
||||
|
||||
async def stop(self, exit_code=0):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.stop()
|
||||
self.scheduler.suspend = True
|
||||
|
||||
# process stop task pararell
|
||||
tasks = [self.websession.close(), self.api.stop()]
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
# process stop tasks
|
||||
self.websession.close()
|
||||
await self.api.stop()
|
||||
|
||||
self.exit_code = exit_code
|
||||
self.loop.stop()
|
||||
|
||||
async def _setup_homeassistant(self):
|
||||
"""Install a homeassistant docker container."""
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.config.current_homeassistant:
|
||||
await self.config.fetch_update_infos()
|
||||
|
||||
tag = self.config.current_homeassistant
|
||||
if tag and await self.homeassistant.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on setup HomeAssistant. Retry in 60.")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed.")
|
||||
|
||||
async def _hassio_update(self):
|
||||
"""Check and run update of supervisor hassio."""
|
||||
if self.config.current_hassio == self.supervisor.version:
|
||||
return
|
||||
|
||||
_LOGGER.info(
|
||||
"Found new HassIO version %s.", self.config.current_hassio)
|
||||
await self.supervisor.update(self.config.current_hassio)
|
||||
|
@@ -5,7 +5,7 @@ import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..tools import get_version_from_env
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -13,18 +13,19 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerBase(object):
|
||||
"""Docker hassio wrapper."""
|
||||
|
||||
def __init__(self, config, loop, dock, image=None):
|
||||
def __init__(self, config, loop, dock, image=None, timeout=30):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.dock = dock
|
||||
self.image = image
|
||||
self.container = None
|
||||
self.timeout = timeout
|
||||
self.version = None
|
||||
self.arch = None
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return None
|
||||
|
||||
@@ -33,6 +34,24 @@ class DockerBase(object):
|
||||
"""Return True if a task is in progress."""
|
||||
return self._lock.locked()
|
||||
|
||||
def process_metadata(self, metadata, force=False):
|
||||
"""Read metadata and set it to object."""
|
||||
# read image
|
||||
if not self.image:
|
||||
self.image = metadata['Config']['Image']
|
||||
|
||||
# read version
|
||||
need_version = force or not self.version
|
||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
||||
elif need_version:
|
||||
_LOGGER.warning("Can't read version from %s", self.name)
|
||||
|
||||
# read arch
|
||||
need_arch = force or not self.arch
|
||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||
|
||||
async def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
if self._lock.locked():
|
||||
@@ -52,12 +71,12 @@ class DockerBase(object):
|
||||
image = self.dock.images.pull("{}:{}".format(self.image, tag))
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||
_LOGGER.info("Tag image %s with version %s as latest",
|
||||
self.image, self.version)
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
@@ -73,8 +92,7 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.dock.images.get(self.image)
|
||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||
self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -92,17 +110,21 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
else:
|
||||
self.container.reload()
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
image = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
return self.container.status == 'running'
|
||||
# container is not running
|
||||
if container.status != 'running':
|
||||
return False
|
||||
|
||||
# we run on a old image, stop and start it
|
||||
if container.image.id != image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
@@ -119,17 +141,17 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.image = self.container.attrs['Config']['Image']
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
_LOGGER.info("Attach to image %s with version %s",
|
||||
self.image, self.version)
|
||||
except (docker.errors.DockerException, KeyError):
|
||||
_LOGGER.fatal(
|
||||
"Can't attach to %s docker container!", self.docker_name)
|
||||
if self.image:
|
||||
obj_data = self.dock.images.get(self.image).attrs
|
||||
else:
|
||||
obj_data = self.dock.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self.process_metadata(obj_data)
|
||||
_LOGGER.info(
|
||||
"Attach to image %s with version %s", self.image, self.version)
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
@@ -163,23 +185,22 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return
|
||||
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
|
||||
self.container.reload()
|
||||
if self.container.status == 'running':
|
||||
if container.status == 'running':
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.container.stop()
|
||||
container.stop(timeout=self.timeout)
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.container.remove(force=True)
|
||||
|
||||
self.container = None
|
||||
_LOGGER.info("Clean %s docker application", self.image)
|
||||
container.remove(force=True)
|
||||
|
||||
async def remove(self):
|
||||
"""Remove docker container."""
|
||||
"""Remove docker images."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute remove while a task is in progress")
|
||||
return False
|
||||
@@ -188,25 +209,32 @@ class DockerBase(object):
|
||||
return await self.loop.run_in_executor(None, self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker container.
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
self._stop()
|
||||
# cleanup container
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove docker %s with latest and %s",
|
||||
self.image, self.version)
|
||||
_LOGGER.info(
|
||||
"Remove docker %s with latest and %s", self.image, self.version)
|
||||
|
||||
try:
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
# clean metadata
|
||||
self.version = None
|
||||
self.arch = None
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
@@ -223,29 +251,31 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.version)
|
||||
was_running = self._is_running()
|
||||
|
||||
_LOGGER.info("Update docker %s with %s:%s",
|
||||
old_image, self.image, tag)
|
||||
_LOGGER.info(
|
||||
"Update docker %s with %s:%s", self.version, self.image, tag)
|
||||
|
||||
# update docker image
|
||||
if self._install(tag):
|
||||
_LOGGER.info("Cleanup old %s docker", old_image)
|
||||
self._stop()
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning(
|
||||
"Can't remove old image %s -> %s", old_image, err)
|
||||
return True
|
||||
if not self._install(tag):
|
||||
return False
|
||||
|
||||
return False
|
||||
# run or cleanup container
|
||||
if was_running:
|
||||
self._run()
|
||||
else:
|
||||
self._stop()
|
||||
|
||||
# cleanup images
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
async def logs(self):
|
||||
"""Return docker logs of container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute logs while a task is in progress")
|
||||
return False
|
||||
return b""
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._logs)
|
||||
@@ -255,10 +285,69 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
return
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return self.container.logs(tail=100, stdout=True, stderr=True)
|
||||
return container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||
|
||||
async def restart(self):
|
||||
"""Restart docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute restart while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute cleanup while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
await self.loop.run_in_executor(None, self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return
|
||||
|
||||
for image in self.dock.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||
self.dock.images.remove(image.id, force=True)
|
||||
|
@@ -1,30 +1,128 @@
|
||||
"""Init file for HassIO addon docker object."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env
|
||||
from .util import dockerfile_template
|
||||
from ..const import (
|
||||
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
|
||||
|
||||
class DockerAddon(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, addons_data, addon):
|
||||
def __init__(self, config, loop, dock, addon):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(
|
||||
config, loop, dock, image=addons_data.get_image(addon))
|
||||
config, loop, dock, image=addon.image, timeout=addon.timeout)
|
||||
self.addon = addon
|
||||
self.addons_data = addons_data
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addon)
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of addon."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
if self.addon.with_audio:
|
||||
addon_env.update({
|
||||
'ALSA_OUTPUT': self.addon.audio_output,
|
||||
'ALSA_INPUT': self.addon.audio_input,
|
||||
})
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
'TZ': self.config.timezone,
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# use audio devices
|
||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Return None if no devices is present
|
||||
if devices:
|
||||
return devices
|
||||
return None
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": "{}".format(options)}
|
||||
return None
|
||||
|
||||
@property
|
||||
def mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
if not self.addon.use_hassio_api:
|
||||
return None
|
||||
|
||||
return {
|
||||
'hassio': self.config.api_endpoint,
|
||||
}
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
}}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_config): {
|
||||
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_ssl): {
|
||||
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_addons_local): {
|
||||
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_backup): {
|
||||
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_share): {
|
||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
@@ -32,59 +130,161 @@ class DockerAddon(DockerBase):
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return
|
||||
return True
|
||||
|
||||
# cleanup old container
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
# volumes
|
||||
volumes = {
|
||||
self.addons_data.path_data_docker(self.addon): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
}}
|
||||
if self.addons_data.need_config(self.addon):
|
||||
volumes.update({
|
||||
self.config.path_config_docker: {
|
||||
'bind': '/config', 'mode': 'rw'
|
||||
}})
|
||||
if self.addons_data.need_ssl(self.addon):
|
||||
volumes.update({
|
||||
self.config.path_ssl_docker: {
|
||||
'bind': '/ssl', 'mode': 'rw'
|
||||
}})
|
||||
# write config
|
||||
if not self.addon.write_options():
|
||||
return False
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.docker_name,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
network_mode='bridge',
|
||||
ports=self.addons_data.get_ports(self.addon),
|
||||
volumes=volumes,
|
||||
network_mode=self.addon.network_mode,
|
||||
ports=self.addon.ports,
|
||||
extra_hosts=self.mapping,
|
||||
devices=self.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs
|
||||
)
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start docker addon %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
def _install(self, tag):
|
||||
"""Pull docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_dir = Path(self.config.path_tmp, self.addon.slug)
|
||||
try:
|
||||
# prepare temporary addon build folder
|
||||
try:
|
||||
source = self.addon.path_location
|
||||
shutil.copytree(str(source), str(build_dir))
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't copy %s to temporary build folder -> %s",
|
||||
source, err)
|
||||
return False
|
||||
|
||||
# prepare Dockerfile
|
||||
try:
|
||||
dockerfile_template(
|
||||
Path(build_dir, 'Dockerfile'), self.config.arch,
|
||||
tag, META_ADDON)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't prepare dockerfile -> %s", err)
|
||||
|
||||
# run docker build
|
||||
try:
|
||||
build_tag = "{}:{}".format(self.image, tag)
|
||||
|
||||
_LOGGER.info("Start build %s on %s", build_tag, build_dir)
|
||||
image = self.dock.images.build(
|
||||
path=str(build_dir), tag=build_tag, pull=True)
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
|
||||
except (docker.errors.DockerException, TypeError) as err:
|
||||
_LOGGER.error("Can't build %s -> %s", build_tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Build %s done", build_tag)
|
||||
return True
|
||||
|
||||
finally:
|
||||
shutil.rmtree(str(build_dir), ignore_errors=True)
|
||||
|
||||
async def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute export while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(
|
||||
None, self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
_LOGGER.info("Attach to image %s with version %s",
|
||||
self.image, self.version)
|
||||
except (docker.errors.DockerException, KeyError):
|
||||
pass
|
||||
image = self.dock.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image.stream():
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
return True
|
||||
|
||||
async def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute import while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(
|
||||
None, self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.dock.api.load_image(read_tar)
|
||||
|
||||
image = self.dock.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Addons prepare some thing on start and that is normaly not repeatable.
|
||||
Need run inside executor.
|
||||
"""
|
||||
self._stop()
|
||||
return self._run()
|
||||
|
@@ -4,7 +4,6 @@ import logging
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,15 +13,28 @@ HASS_DOCKER_NAME = 'homeassistant'
|
||||
class DockerHomeAssistant(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
def __init__(self, config, loop, dock, data):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(config, loop, dock, image=config.homeassistant_image)
|
||||
super().__init__(config, loop, dock, image=data.image)
|
||||
self.data = data
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into docker."""
|
||||
if not self.data.devices:
|
||||
return
|
||||
|
||||
devices = []
|
||||
for device in self.data.devices:
|
||||
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
|
||||
|
||||
return devices
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
@@ -31,47 +43,35 @@ class DockerHomeAssistant(DockerBase):
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
# cleanup old container
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.docker_name,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self.config.api_endpoint,
|
||||
'TZ': self.config.timezone,
|
||||
},
|
||||
volumes={
|
||||
self.config.path_config_docker:
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
self.config.path_ssl_docker:
|
||||
{'bind': '/ssl', 'mode': 'rw'},
|
||||
str(self.config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
})
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start homeassistant %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update homeassistant docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._update, tag):
|
||||
await self.loop.run_in_executor(None, self._run)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
@@ -2,8 +2,6 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
@@ -13,14 +11,13 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerSupervisor(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, hassio, image=None):
|
||||
def __init__(self, config, loop, dock, stop_callback, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config, loop, dock, image=image)
|
||||
|
||||
self.hassio = hassio
|
||||
self.stop_callback = stop_callback
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
@@ -31,41 +28,14 @@ class DockerSupervisor(DockerBase):
|
||||
return False
|
||||
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
old_version = self.version
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.config.hassio_cleanup = old_version
|
||||
self.loop.create_task(self.hassio.stop(RESTART_EXIT_CODE))
|
||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def cleanup(self):
|
||||
"""Check if old supervisor version exists and cleanup."""
|
||||
if not self.config.hassio_cleanup:
|
||||
return
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._cleanup):
|
||||
self.config.hassio_cleanup = None
|
||||
|
||||
def _cleanup(self):
|
||||
"""Remove old image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.config.hassio_cleanup)
|
||||
|
||||
_LOGGER.info("Old supervisor docker found %s", old_image)
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove old image %s -> %s", old_image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
@@ -81,3 +51,7 @@ class DockerSupervisor(DockerBase):
|
||||
async def remove(self):
|
||||
"""Remove docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def restart(self):
|
||||
"""Restart docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
42
hassio/dock/util.py
Normal file
42
hassio/dock/util.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import re
|
||||
|
||||
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
||||
|
||||
|
||||
HASSIO_BASE_IMAGE = {
|
||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
||||
ARCH_I386: "homeassistant/i386-base:latest",
|
||||
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
||||
}
|
||||
|
||||
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
|
||||
|
||||
|
||||
def dockerfile_template(dockerfile, arch, version, meta_type):
|
||||
"""Prepare a Hass.IO dockerfile."""
|
||||
buff = []
|
||||
hassio_image = HASSIO_BASE_IMAGE[arch]
|
||||
custom_image = re.compile(r"^#{}:FROM".format(arch))
|
||||
|
||||
# read docker
|
||||
with dockerfile.open('r') as dock_input:
|
||||
for line in dock_input:
|
||||
line = TMPL_IMAGE.sub(hassio_image, line)
|
||||
line = custom_image.sub("FROM", line)
|
||||
buff.append(line)
|
||||
|
||||
# add metadata
|
||||
buff.append(create_metadata(version, arch, meta_type))
|
||||
|
||||
# write docker
|
||||
with dockerfile.open('w') as dock_output:
|
||||
dock_output.writelines(buff)
|
||||
|
||||
|
||||
def create_metadata(version, arch, meta_type):
|
||||
"""Generate docker label layer for hassio."""
|
||||
return ('LABEL io.hass.version="{}" '
|
||||
'io.hass.arch="{}" '
|
||||
'io.hass.type="{}"').format(version, arch, meta_type)
|
87
hassio/hardware.py
Normal file
87
hassio/hardware.py
Normal file
@@ -0,0 +1,87 @@
|
||||
"""Read hardware info from system."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
import pyudev
|
||||
|
||||
from .const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ASOUND_CARDS = Path("/proc/asound/cards")
|
||||
RE_CARDS = re.compile(r"(\d+) \[(\w*) *\]: (.*\w)")
|
||||
|
||||
ASOUND_DEVICES = Path("/proc/asound/devices")
|
||||
RE_DEVICES = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)")
|
||||
|
||||
|
||||
class Hardware(object):
|
||||
"""Represent a interface to procfs, sysfs and udev."""
|
||||
|
||||
def __init__(self):
|
||||
"""Init hardware object."""
|
||||
self.context = pyudev.Context()
|
||||
|
||||
@property
|
||||
def serial_devices(self):
|
||||
"""Return all serial and connected devices."""
|
||||
dev_list = set()
|
||||
for device in self.context.list_devices(subsystem='tty'):
|
||||
if 'ID_VENDOR' in device:
|
||||
dev_list.add(device.device_node)
|
||||
|
||||
return list(dev_list)
|
||||
|
||||
@property
|
||||
def input_devices(self):
|
||||
"""Return all input devices."""
|
||||
dev_list = set()
|
||||
for device in self.context.list_devices(subsystem='input'):
|
||||
if 'NAME' in device:
|
||||
dev_list.add(device['NAME'].replace('"', ''))
|
||||
|
||||
return list(dev_list)
|
||||
|
||||
@property
|
||||
def disk_devices(self):
|
||||
"""Return all disk devices."""
|
||||
dev_list = set()
|
||||
for device in self.context.list_devices(subsystem='block'):
|
||||
if device.device_node.startswith('/dev/sd'):
|
||||
dev_list.add(device.device_node)
|
||||
|
||||
return list(dev_list)
|
||||
|
||||
@property
|
||||
def audio_devices(self):
|
||||
"""Return all available audio interfaces."""
|
||||
try:
|
||||
with ASOUND_CARDS.open('r') as cards_file:
|
||||
cards = cards_file.read()
|
||||
with ASOUND_DEVICES.open('r') as devices_file:
|
||||
devices = devices_file.read()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound data -> %s", err)
|
||||
return
|
||||
|
||||
audio_list = {}
|
||||
|
||||
# parse cards
|
||||
for match in RE_CARDS.finditer(cards):
|
||||
audio_list[match.group(1)] = {
|
||||
ATTR_NAME: match.group(3),
|
||||
ATTR_TYPE: match.group(2),
|
||||
ATTR_DEVICES: {},
|
||||
}
|
||||
|
||||
# parse devices
|
||||
for match in RE_DEVICES.finditer(devices):
|
||||
try:
|
||||
audio_list[match.group(1)][ATTR_DEVICES][match.group(2)] = \
|
||||
match.group(3)
|
||||
except KeyError:
|
||||
_LOGGER.warning("Wrong audio device found %s", match.group(0))
|
||||
continue
|
||||
|
||||
return audio_list
|
164
hassio/homeassistant.py
Normal file
164
hassio/homeassistant.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""HomeAssistant control object."""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||
ATTR_VERSION)
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .tools import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HomeAssistant(JsonConfig):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, config, loop, dock, updater):
|
||||
"""Initialize hass object."""
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.updater = updater
|
||||
self.docker = DockerHomeAssistant(config, loop, dock, self)
|
||||
|
||||
async def prepare(self):
|
||||
"""Prepare HomeAssistant object."""
|
||||
if not await self.docker.exists():
|
||||
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
|
||||
if self.is_custom_image:
|
||||
await self.install()
|
||||
else:
|
||||
await self.install_landingpage()
|
||||
else:
|
||||
await self.docker.attach()
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of running homeassistant."""
|
||||
return self.docker.version
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return last available version of homeassistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.updater.version_homeassistant
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of hass containter."""
|
||||
if ATTR_IMAGE in self._data:
|
||||
return self._data[ATTR_IMAGE]
|
||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
||||
|
||||
@property
|
||||
def is_custom_image(self):
|
||||
"""Return True if a custom image is used."""
|
||||
return ATTR_IMAGE in self._data
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return extend device mapping."""
|
||||
return self._data[ATTR_DEVICES]
|
||||
|
||||
@devices.setter
|
||||
def devices(self, value):
|
||||
"""Set extend device mapping."""
|
||||
self._data[ATTR_DEVICES] = value
|
||||
self.save()
|
||||
|
||||
def set_custom(self, image, version):
|
||||
"""Set a custom image for homeassistant."""
|
||||
# reset
|
||||
if image is None and version is None:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
self._data.pop(ATTR_VERSION, None)
|
||||
|
||||
self.docker.image = self.image
|
||||
else:
|
||||
if image:
|
||||
self._data[ATTR_IMAGE] = image
|
||||
self.docker.image = image
|
||||
if version:
|
||||
self._data[ATTR_VERSION] = version
|
||||
self.save()
|
||||
|
||||
async def install_landingpage(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
while True:
|
||||
if await self.docker.install('landingpage'):
|
||||
break
|
||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
|
||||
async def install(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.last_version:
|
||||
await self.updater.fetch_data()
|
||||
|
||||
tag = self.last_version
|
||||
if tag and await self.docker.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed")
|
||||
await self.docker.cleanup()
|
||||
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
|
||||
if version == self.docker.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return False
|
||||
|
||||
return await self.docker.update(version)
|
||||
|
||||
def run(self):
|
||||
"""Run HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.run()
|
||||
|
||||
def stop(self):
|
||||
"""Stop HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.stop()
|
||||
|
||||
def restart(self):
|
||||
"""Restart HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.restart()
|
||||
|
||||
def logs(self):
|
||||
"""Get HomeAssistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.logs()
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.is_running()
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.docker.in_progress
|
124
hassio/host_control.py
Normal file
124
hassio/host_control.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""Host control for HassIO."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .const import (
|
||||
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
|
||||
ATTR_HOSTNAME, ATTR_OS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 15
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
FEATURES_SHUTDOWN = 'shutdown'
|
||||
FEATURES_REBOOT = 'reboot'
|
||||
FEATURES_UPDATE = 'update'
|
||||
FEATURES_HOSTNAME = 'hostname'
|
||||
FEATURES_NETWORK_INFO = 'network_info'
|
||||
FEATURES_NETWORK_CONTROL = 'network_control'
|
||||
|
||||
|
||||
class HostControl(object):
|
||||
"""Client for host control."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize HostControl socket client."""
|
||||
self.loop = loop
|
||||
self.active = False
|
||||
self.version = UNKNOWN
|
||||
self.last_version = UNKNOWN
|
||||
self.type = UNKNOWN
|
||||
self.features = []
|
||||
self.hostname = UNKNOWN
|
||||
self.os_info = UNKNOWN
|
||||
|
||||
if SOCKET_HC.is_socket():
|
||||
self.active = True
|
||||
|
||||
async def _send_command(self, command):
|
||||
"""Send command to host.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
if not self.active:
|
||||
return
|
||||
|
||||
reader, writer = await asyncio.open_unix_connection(
|
||||
str(SOCKET_HC), loop=self.loop)
|
||||
|
||||
try:
|
||||
# send
|
||||
_LOGGER.info("Send '%s' to HostControl.", command)
|
||||
|
||||
with async_timeout.timeout(TIMEOUT, loop=self.loop):
|
||||
writer.write("{}\n".format(command).encode())
|
||||
data = await reader.readline()
|
||||
|
||||
response = data.decode().rstrip()
|
||||
_LOGGER.info("Receive from HostControl: %s.", response)
|
||||
|
||||
if response == "OK":
|
||||
return True
|
||||
elif response == "ERROR":
|
||||
return False
|
||||
elif response == "WRONG":
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return json.loads(response)
|
||||
except json.JSONDecodeError:
|
||||
_LOGGER.warning("Json parse error from HostControl '%s'.",
|
||||
response)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Timeout from HostControl!")
|
||||
|
||||
finally:
|
||||
writer.close()
|
||||
|
||||
async def load(self):
|
||||
"""Load Info from host.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
info = await self._send_command("info")
|
||||
if not info:
|
||||
return
|
||||
|
||||
self.version = info.get(ATTR_VERSION, UNKNOWN)
|
||||
self.last_version = info.get(ATTR_LAST_VERSION, UNKNOWN)
|
||||
self.type = info.get(ATTR_TYPE, UNKNOWN)
|
||||
self.features = info.get(ATTR_FEATURES, [])
|
||||
self.hostname = info.get(ATTR_HOSTNAME, UNKNOWN)
|
||||
self.os_info = info.get(ATTR_OS, UNKNOWN)
|
||||
|
||||
def reboot(self):
|
||||
"""Reboot the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("reboot")
|
||||
|
||||
def shutdown(self):
|
||||
"""Shutdown the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("shutdown")
|
||||
|
||||
def update(self, version=None):
|
||||
"""Update the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("update {}".format(version))
|
||||
return self._send_command("update")
|
||||
|
||||
def set_hostname(self, hostname):
|
||||
"""Update hostname on host."""
|
||||
return self._send_command("hostname {}".format(hostname))
|
@@ -1,102 +0,0 @@
|
||||
"""Host controll for HassIO."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .const import SOCKET_HC
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 15
|
||||
|
||||
LEVEL_POWER = 1
|
||||
LEVEL_UPDATE_HOST = 2
|
||||
LEVEL_NETWORK = 4
|
||||
|
||||
|
||||
class HostControll(object):
|
||||
"""Client for host controll."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize HostControll socket client."""
|
||||
self.loop = loop
|
||||
self.active = False
|
||||
self.version = None
|
||||
|
||||
mode = os.stat(SOCKET_HC)[stat.ST_MODE]
|
||||
if stat.S_ISSOCK(mode):
|
||||
self.active = True
|
||||
|
||||
async def _send_command(self, command):
|
||||
"""Send command to host.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
if not self.active:
|
||||
return
|
||||
|
||||
reader, writer = await asyncio.open_unix_connection(
|
||||
SOCKET_HC, loop=self.loop)
|
||||
|
||||
try:
|
||||
# send
|
||||
_LOGGER.info("Send '%s' to HostControll.", command)
|
||||
|
||||
with async_timeout.timeout(TIMEOUT, loop=self.loop):
|
||||
writer.write("{}\n".format(command).encode())
|
||||
data = await reader.readline()
|
||||
|
||||
response = data.decode()
|
||||
_LOGGER.debug("Receive from HostControll: %s.", response)
|
||||
|
||||
if response == "OK":
|
||||
return True
|
||||
elif response == "ERROR":
|
||||
return False
|
||||
elif response == "WRONG":
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return json.loads(response)
|
||||
except json.JSONDecodeError:
|
||||
_LOGGER.warning("Json parse error from HostControll.")
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Timeout from HostControll!")
|
||||
|
||||
finally:
|
||||
writer.close()
|
||||
|
||||
def info(self):
|
||||
"""Return Info from host.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("info")
|
||||
|
||||
def reboot(self):
|
||||
"""Reboot the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("reboot")
|
||||
|
||||
def shutdown(self):
|
||||
"""Shutdown the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("shutdown")
|
||||
|
||||
def host_update(self, version=None):
|
||||
"""Update the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("host-update {}".format(version))
|
||||
return self._send_command("host-update")
|
1
hassio/panel/hassio-main.html
Normal file
1
hassio/panel/hassio-main.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/panel/hassio-main.html.gz
Normal file
BIN
hassio/panel/hassio-main.html.gz
Normal file
Binary file not shown.
@@ -16,11 +16,7 @@ class Scheduler(object):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self._stop = False
|
||||
|
||||
def stop(self):
|
||||
"""Stop to execute tasks in scheduler."""
|
||||
self._stop = True
|
||||
self.suspend = False
|
||||
|
||||
def register_task(self, coro_callback, seconds, repeat=True,
|
||||
now=False):
|
||||
@@ -51,11 +47,8 @@ class Scheduler(object):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data.pop(idx)
|
||||
|
||||
# stop execute tasks
|
||||
if self._stop:
|
||||
return
|
||||
|
||||
self.loop.create_task(data[CALL]())
|
||||
if not self.suspend:
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
task = self.loop.call_later(data[SEC], self._run_task, idx)
|
||||
|
310
hassio/snapshots/__init__.py
Normal file
310
hassio/snapshots/__init__.py
Normal file
@@ -0,0 +1,310 @@
|
||||
"""Snapshot system control."""
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
|
||||
from .snapshot import Snapshot
|
||||
from .util import create_slug
|
||||
from ..const import (
|
||||
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SnapshotsManager(object):
|
||||
"""Manage snapshots."""
|
||||
|
||||
def __init__(self, config, loop, sheduler, addons, homeassistant):
|
||||
"""Initialize a snapshot manager."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.sheduler = sheduler
|
||||
self.addons = addons
|
||||
self.homeassistant = homeassistant
|
||||
self.snapshots = {}
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
@property
|
||||
def list_snapshots(self):
|
||||
"""Return a list of all snapshot object."""
|
||||
return set(self.snapshots.values())
|
||||
|
||||
def get(self, slug):
|
||||
"""Return snapshot object."""
|
||||
return self.snapshots.get(slug)
|
||||
|
||||
def _create_snapshot(self, name, sys_type):
|
||||
"""Initialize a new snapshot object from name."""
|
||||
date_str = datetime.utcnow().isoformat()
|
||||
slug = create_slug(name, date_str)
|
||||
tar_file = Path(self.config.path_backup, "{}.tar".format(slug))
|
||||
|
||||
# init object
|
||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
||||
snapshot.create(slug, name, date_str, sys_type)
|
||||
|
||||
# set general data
|
||||
snapshot.snapshot_homeassistant(self.homeassistant)
|
||||
snapshot.repositories = self.config.addons_repositories
|
||||
|
||||
return snapshot
|
||||
|
||||
async def reload(self):
|
||||
"""Load exists backups."""
|
||||
self.snapshots = {}
|
||||
|
||||
async def _load_snapshot(tar_file):
|
||||
"""Internal function to load snapshot."""
|
||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
||||
if await snapshot.load():
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
|
||||
tasks = [_load_snapshot(tar_file) for tar_file in
|
||||
self.config.path_backup.glob("*.tar")]
|
||||
|
||||
_LOGGER.info("Found %d snapshot files", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
def remove(self, snapshot):
|
||||
"""Remove a snapshot."""
|
||||
try:
|
||||
snapshot.tar_file.unlink()
|
||||
self.snapshots.pop(snapshot.slug, None)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't remove snapshot %s -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def do_snapshot_full(self, name=""):
|
||||
"""Create a full snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
|
||||
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# snapshot addons
|
||||
tasks = []
|
||||
for addon in self.addons.list_addons:
|
||||
if not addon.is_installed:
|
||||
continue
|
||||
tasks.append(snapshot.import_addon(addon))
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Full-Snapshot %s run %d addons",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders()
|
||||
|
||||
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Full-Snapshot %s error -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_snapshot_partial(self, name="", addons=None, folders=None):
|
||||
"""Create a partial snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
addons = addons or []
|
||||
folders = folders or []
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL)
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# snapshot addons
|
||||
tasks = []
|
||||
for slug in addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon.is_installed:
|
||||
tasks.append(snapshot.import_addon(addon))
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Partial-Snapshot %s run %d addons",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Partial-Snapshot %s store folders %s",
|
||||
snapshot.slug, folders)
|
||||
await snapshot.store_folders(folders)
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Partial-Snapshot %s error -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_restore_full(self, snapshot):
|
||||
"""Restore a snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
if snapshot.sys_type != SNAPSHOT_FULL:
|
||||
_LOGGER.error(
|
||||
"Full-Restore %s is only a partial snapshot!", snapshot.slug)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# stop system
|
||||
tasks = []
|
||||
tasks.append(self.homeassistant.stop())
|
||||
|
||||
for addon in self.addons.list_addons:
|
||||
if addon.is_installed:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# restore folders
|
||||
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
|
||||
await snapshot.restore_folders()
|
||||
|
||||
# start homeassistant restore
|
||||
snapshot.restore_homeassistant(self.homeassistant)
|
||||
task_hass = self.loop.create_task(
|
||||
self.homeassistant.update(snapshot.homeassistant_version))
|
||||
|
||||
# restore repositories
|
||||
await self.addons.load_repositories(snapshot.repositories)
|
||||
|
||||
# restore addons
|
||||
tasks = []
|
||||
actual_addons = \
|
||||
set(addon.slug for addon in self.addons.list_addons
|
||||
if addon.is_installed)
|
||||
restore_addons = \
|
||||
set(data[ATTR_SLUG] for data in snapshot.addons)
|
||||
remove_addons = actual_addons - restore_addons
|
||||
|
||||
_LOGGER.info("Full-Restore %s restore addons %s, remove %s",
|
||||
snapshot.slug, restore_addons, remove_addons)
|
||||
|
||||
for slug in remove_addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(addon.uninstall())
|
||||
else:
|
||||
_LOGGER.warning("Can't remove addon %s", slug)
|
||||
|
||||
for slug in restore_addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(snapshot.export_addon(addon))
|
||||
else:
|
||||
_LOGGER.warning("Can't restore addon %s", slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Full-Restore %s restore addons tasks %d",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# finish homeassistant task
|
||||
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
|
||||
snapshot.slug)
|
||||
await task_hass
|
||||
await self.homeassistant.run()
|
||||
|
||||
_LOGGER.info("Full-Restore %s done", snapshot.slug)
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Full-Restore %s error -> %s", slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_restore_partial(self, snapshot, homeassistant=False,
|
||||
addons=None, folders=None):
|
||||
"""Restore a snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
addons = addons or []
|
||||
folders = folders or []
|
||||
|
||||
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
tasks = []
|
||||
|
||||
if FOLDER_HOMEASSISTANT in folders:
|
||||
await self.homeassistant.stop()
|
||||
|
||||
if folders:
|
||||
_LOGGER.info("Partial-Restore %s restore folders %s",
|
||||
snapshot.slug, folders)
|
||||
await snapshot.restore_folders(folders)
|
||||
|
||||
if homeassistant:
|
||||
snapshot.restore_homeassistant(self.homeassistant)
|
||||
tasks.append(self.homeassistant.update(
|
||||
snapshot.homeassistant_version))
|
||||
|
||||
for slug in addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(snapshot.export_addon(addon))
|
||||
else:
|
||||
_LOGGER.warning("Can't restore addon %s", slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Partial-Restore %s run %d tasks",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# make sure homeassistant run agen
|
||||
await self.homeassistant.run()
|
||||
|
||||
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Partial-Restore %s error -> %s", slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
300
hassio/snapshots/snapshot.py
Normal file
300
hassio/snapshots/snapshot.py
Normal file
@@ -0,0 +1,300 @@
|
||||
"""Represent a snapshot file."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
||||
from .util import remove_folder
|
||||
from ..const import (
|
||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
|
||||
ATTR_IMAGE)
|
||||
from ..tools import write_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Snapshot(object):
|
||||
"""A signle hassio snapshot."""
|
||||
|
||||
def __init__(self, config, loop, tar_file):
|
||||
"""Initialize a snapshot."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.tar_file = tar_file
|
||||
self._data = {}
|
||||
self._tmp = None
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return snapshot slug."""
|
||||
return self._data.get(ATTR_SLUG)
|
||||
|
||||
@property
|
||||
def sys_type(self):
|
||||
"""Return snapshot type."""
|
||||
return self._data.get(ATTR_TYPE)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return snapshot name."""
|
||||
return self._data[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_DATE]
|
||||
|
||||
@property
|
||||
def addons(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_ADDONS]
|
||||
|
||||
@property
|
||||
def folders(self):
|
||||
"""Return list of saved folders."""
|
||||
return self._data[ATTR_FOLDERS]
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_REPOSITORIES]
|
||||
|
||||
@repositories.setter
|
||||
def repositories(self, value):
|
||||
"""Set snapshot date."""
|
||||
self._data[ATTR_REPOSITORIES] = value
|
||||
|
||||
@property
|
||||
def homeassistant_version(self):
|
||||
"""Return snapshot homeassistant version."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)
|
||||
|
||||
@homeassistant_version.setter
|
||||
def homeassistant_version(self, value):
|
||||
"""Set snapshot homeassistant version."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
|
||||
|
||||
@property
|
||||
def homeassistant_devices(self):
|
||||
"""Return snapshot homeassistant devices."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_DEVICES)
|
||||
|
||||
@homeassistant_devices.setter
|
||||
def homeassistant_devices(self, value):
|
||||
"""Set snapshot homeassistant devices."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_DEVICES] = value
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return snapshot homeassistant custom image."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_IMAGE)
|
||||
|
||||
@homeassistant_image.setter
|
||||
def homeassistant_image(self, value):
|
||||
"""Set snapshot homeassistant custom image."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
"""Return snapshot size."""
|
||||
if not self.tar_file.is_file():
|
||||
return 0
|
||||
return self.tar_file.stat().st_size / 1048576 # calc mbyte
|
||||
|
||||
def create(self, slug, name, date, sys_type):
|
||||
"""Initialize a new snapshot."""
|
||||
# init metadata
|
||||
self._data[ATTR_SLUG] = slug
|
||||
self._data[ATTR_NAME] = name
|
||||
self._data[ATTR_DATE] = date
|
||||
self._data[ATTR_TYPE] = sys_type
|
||||
|
||||
# init other constructs
|
||||
self._data[ATTR_HOMEASSISTANT] = {}
|
||||
self._data[ATTR_ADDONS] = []
|
||||
self._data[ATTR_REPOSITORIES] = []
|
||||
self._data[ATTR_FOLDERS] = []
|
||||
|
||||
def snapshot_homeassistant(self, homeassistant):
|
||||
"""Read all data from homeassistant object."""
|
||||
self.homeassistant_version = homeassistant.version
|
||||
self.homeassistant_devices = homeassistant.devices
|
||||
|
||||
# custom image
|
||||
if homeassistant.is_custom_image:
|
||||
self.homeassistant_image = homeassistant.image
|
||||
|
||||
def restore_homeassistant(self, homeassistant):
|
||||
"""Write all data to homeassistant object."""
|
||||
homeassistant.devices = self.homeassistant_devices
|
||||
|
||||
# custom image
|
||||
if self.homeassistant_image:
|
||||
homeassistant.set_custom(
|
||||
self.homeassistant_image, self.homeassistant_version)
|
||||
|
||||
async def load(self):
|
||||
"""Read snapshot.json from tar file."""
|
||||
if not self.tar_file.is_file():
|
||||
_LOGGER.error("No tarfile %s", self.tar_file)
|
||||
return False
|
||||
|
||||
def _load_file():
|
||||
"""Read snapshot.json."""
|
||||
with tarfile.open(self.tar_file, "r:") as snapshot:
|
||||
json_file = snapshot.extractfile("./snapshot.json")
|
||||
return json_file.read()
|
||||
|
||||
# read snapshot.json
|
||||
try:
|
||||
raw = await self.loop.run_in_executor(None, _load_file)
|
||||
except (tarfile.TarError, KeyError) as err:
|
||||
_LOGGER.error(
|
||||
"Can't read snapshot tarfile %s -> %s", self.tar_file, err)
|
||||
return False
|
||||
|
||||
# parse data
|
||||
try:
|
||||
raw_dict = json.loads(raw)
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.error("Can't read data for %s -> %s", self.tar_file, err)
|
||||
return False
|
||||
|
||||
# validate
|
||||
try:
|
||||
self._data = SCHEMA_SNAPSHOT(raw_dict)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate data for %s -> %s", self.tar_file,
|
||||
humanize_error(raw_dict, err))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context to open a snapshot."""
|
||||
self._tmp = TemporaryDirectory(dir=str(self.config.path_tmp))
|
||||
|
||||
# create a snapshot
|
||||
if not self.tar_file.is_file():
|
||||
return self
|
||||
|
||||
# extract a exists snapshot
|
||||
def _extract_snapshot():
|
||||
"""Extract a snapshot."""
|
||||
with tarfile.open(self.tar_file, "r:") as tar:
|
||||
tar.extractall(path=self._tmp.name)
|
||||
|
||||
await self.loop.run_in_executor(None, _extract_snapshot)
|
||||
|
||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||
"""Async context to close a snapshot."""
|
||||
# exists snapshot or exception on build
|
||||
if self.tar_file.is_file() or exception_type is not None:
|
||||
return self._tmp.cleanup()
|
||||
|
||||
# validate data
|
||||
try:
|
||||
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Invalid data for %s -> %s", self.tar_file,
|
||||
humanize_error(self._data, err))
|
||||
raise ValueError("Invalid config") from None
|
||||
|
||||
# new snapshot, build it
|
||||
def _create_snapshot():
|
||||
"""Create a new snapshot."""
|
||||
with tarfile.open(self.tar_file, "w:") as tar:
|
||||
tar.add(self._tmp.name, arcname=".")
|
||||
|
||||
if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data):
|
||||
await self.loop.run_in_executor(None, _create_snapshot)
|
||||
else:
|
||||
_LOGGER.error("Can't write snapshot.json")
|
||||
|
||||
self._tmp.cleanup()
|
||||
self._tmp = None
|
||||
|
||||
async def import_addon(self, addon):
|
||||
"""Add a addon into snapshot."""
|
||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
||||
|
||||
if not await addon.snapshot(snapshot_file):
|
||||
_LOGGER.error("Can't make snapshot from %s", addon.slug)
|
||||
return False
|
||||
|
||||
# store to config
|
||||
self._data[ATTR_ADDONS].append({
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
})
|
||||
|
||||
return True
|
||||
|
||||
async def export_addon(self, addon):
|
||||
"""Restore a addon from snapshot."""
|
||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
||||
|
||||
if not await addon.restore(snapshot_file):
|
||||
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def store_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
folder_list = folder_list or ALL_FOLDERS
|
||||
|
||||
def _folder_save(name):
|
||||
"""Intenal function to snapshot a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||
origin_dir = Path(self.config.path_hassio, name)
|
||||
|
||||
try:
|
||||
with tarfile.open(snapshot_tar, "w:gz",
|
||||
compresslevel=1) as tar_file:
|
||||
tar_file.add(origin_dir, arcname=".")
|
||||
|
||||
self._data[ATTR_FOLDERS].append(name)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.warning("Can't snapshot folder %s -> %s", name, err)
|
||||
|
||||
# run tasks
|
||||
tasks = [self.loop.run_in_executor(None, _folder_save, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def restore_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
folder_list = folder_list or ALL_FOLDERS
|
||||
|
||||
def _folder_restore(name):
|
||||
"""Intenal function to restore a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||
origin_dir = Path(self.config.path_hassio, name)
|
||||
|
||||
# clean old stuff
|
||||
if origin_dir.is_dir():
|
||||
remove_folder(origin_dir)
|
||||
|
||||
try:
|
||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
||||
tar_file.extractall(path=origin_dir)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
||||
|
||||
# run tasks
|
||||
tasks = [self.loop.run_in_executor(None, _folder_restore, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
21
hassio/snapshots/util.py
Normal file
21
hassio/snapshots/util.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import shutil
|
||||
|
||||
|
||||
def create_slug(name, date_str):
|
||||
"""Generate a hash from repository."""
|
||||
key = "{} - {}".format(date_str, name).lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def remove_folder(folder):
|
||||
"""Remove folder data but not the folder itself."""
|
||||
for obj in folder.iterdir():
|
||||
try:
|
||||
if obj.is_dir():
|
||||
shutil.rmtree(str(obj), ignore_errors=True)
|
||||
else:
|
||||
obj.unlink()
|
||||
except (OSError, shutil.Error):
|
||||
pass
|
32
hassio/snapshots/validate.py
Normal file
32
hassio/snapshots/validate.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Validate some things around restore."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
||||
ATTR_IMAGE, FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
from ..validate import HASS_DEVICES
|
||||
|
||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||
vol.Required(ATTR_HOMEASSISTANT): vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
}),
|
||||
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})],
|
||||
vol.Optional(ATTR_REPOSITORIES, default=[]): [vol.Url()],
|
||||
}, extra=vol.ALLOW_EXTRA)
|
74
hassio/tasks.py
Normal file
74
hassio/tasks.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""Multible tasks."""
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def api_sessions_cleanup(config):
|
||||
"""Create scheduler task for cleanup api sessions."""
|
||||
async def _api_sessions_cleanup():
|
||||
"""Cleanup old api sessions."""
|
||||
now = datetime.now()
|
||||
for session, until_valid in config.security_sessions.items():
|
||||
if now >= until_valid:
|
||||
config.drop_security_session(session)
|
||||
|
||||
return _api_sessions_cleanup
|
||||
|
||||
|
||||
def addons_update(loop, addons):
|
||||
"""Create scheduler task for auto update addons."""
|
||||
async def _addons_update():
|
||||
"""Check if a update is available of a addon and update it."""
|
||||
tasks = []
|
||||
for addon in addons.list_addons:
|
||||
if not addon.is_installed or not addon.auto_update:
|
||||
continue
|
||||
|
||||
if addon.version_installed == addon.last_version:
|
||||
continue
|
||||
|
||||
if addon.test_udpate_schema():
|
||||
tasks.append(addon.update())
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Addon %s will be ignore, schema tests fails", addon.slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
||||
await asyncio.wait(tasks, loop=loop)
|
||||
|
||||
return _addons_update
|
||||
|
||||
|
||||
def hassio_update(supervisor, updater):
|
||||
"""Create scheduler task for update of supervisor hassio."""
|
||||
async def _hassio_update():
|
||||
"""Check and run update of supervisor hassio."""
|
||||
await updater.fetch_data()
|
||||
if updater.version_hassio == supervisor.version:
|
||||
return
|
||||
|
||||
# don't perform a update on beta/dev channel
|
||||
if updater.beta_channel:
|
||||
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new HassIO version %s.", updater.version_hassio)
|
||||
await supervisor.update(updater.version_hassio)
|
||||
|
||||
return _hassio_update
|
||||
|
||||
|
||||
def homeassistant_watchdog(loop, homeassistant):
|
||||
"""Create scheduler task for montoring running state."""
|
||||
async def _homeassistant_watchdog():
|
||||
"""Check running state and start if they is close."""
|
||||
if homeassistant.in_progress or await homeassistant.is_running():
|
||||
return
|
||||
|
||||
loop.create_task(homeassistant.run())
|
||||
|
||||
return _homeassistant_watchdog
|
132
hassio/tools.py
132
hassio/tools.py
@@ -1,55 +1,19 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
|
||||
from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_RE_VERSION = re.compile(r"VERSION=(.*)")
|
||||
_IMAGE_ARCH = re.compile(r".*/([a-z0-9]*)-hassio-supervisor")
|
||||
|
||||
|
||||
async def fetch_current_versions(websession, beta=False):
|
||||
"""Fetch current versions from github.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
url = URL_HASSIO_VERSION_BETA if beta else URL_HASSIO_VERSION
|
||||
try:
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(url) as request:
|
||||
return await request.json(content_type=None)
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s! %s", url, err)
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s! %s", url, err)
|
||||
|
||||
|
||||
def get_arch_from_image(image):
|
||||
"""Return arch from hassio image name."""
|
||||
found = _IMAGE_ARCH.match(image)
|
||||
if found:
|
||||
return found.group(1)
|
||||
|
||||
|
||||
def get_version_from_env(env_list):
|
||||
"""Extract Version from ENV list."""
|
||||
for env in env_list:
|
||||
found = _RE_VERSION.match(env)
|
||||
if found:
|
||||
return found.group(1)
|
||||
|
||||
_LOGGER.error("Can't find VERSION in env")
|
||||
return None
|
||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||
|
||||
|
||||
def get_local_ip(loop):
|
||||
@@ -77,9 +41,10 @@ def get_local_ip(loop):
|
||||
def write_json_file(jsonfile, data):
|
||||
"""Write a json file."""
|
||||
try:
|
||||
with open(jsonfile, 'w') as conf_file:
|
||||
conf_file.write(json.dumps(data))
|
||||
except OSError:
|
||||
json_str = json.dumps(data, indent=2)
|
||||
with jsonfile.open('w') as conf_file:
|
||||
conf_file.write(json_str)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -87,5 +52,82 @@ def write_json_file(jsonfile, data):
|
||||
|
||||
def read_json_file(jsonfile):
|
||||
"""Read a json file and return a dict."""
|
||||
with open(jsonfile, 'r') as cfile:
|
||||
with jsonfile.open('r') as cfile:
|
||||
return json.loads(cfile.read())
|
||||
|
||||
|
||||
async def fetch_timezone(websession):
|
||||
"""Read timezone from freegeoip."""
|
||||
data = {}
|
||||
with suppress(aiohttp.ClientError, asyncio.TimeoutError,
|
||||
json.JSONDecodeError, KeyError):
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(FREEGEOIP_URL) as request:
|
||||
data = await request.json()
|
||||
|
||||
return data.get('time_zone', 'UTC')
|
||||
|
||||
|
||||
class JsonConfig(object):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, json_file, schema):
|
||||
"""Initialize hass object."""
|
||||
self._file = json_file
|
||||
self._schema = schema
|
||||
self._data = {}
|
||||
|
||||
# init or load data
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
self._data = {}
|
||||
|
||||
# validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse %s -> %s",
|
||||
self._file, humanize_error(self._data, ex))
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
# validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse data -> %s",
|
||||
humanize_error(self._data, ex))
|
||||
return False
|
||||
|
||||
# write
|
||||
if not write_json_file(self._file, self._data):
|
||||
_LOGGER.error("Can't store config in %s", self._file)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class AsyncThrottle(object):
|
||||
"""
|
||||
Decorator that prevents a function from being called more than once every
|
||||
time period.
|
||||
"""
|
||||
def __init__(self, delta):
|
||||
"""Initialize async throttle."""
|
||||
self.throttle_period = delta
|
||||
self.time_of_last_call = datetime.min
|
||||
|
||||
def __call__(self, method):
|
||||
"""Throttle function"""
|
||||
async def wrapper(*args, **kwargs):
|
||||
"""Throttle function wrapper"""
|
||||
now = datetime.now()
|
||||
time_since_last_call = now - self.time_of_last_call
|
||||
|
||||
if time_since_last_call > self.throttle_period:
|
||||
self.time_of_last_call = now
|
||||
return await method(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
86
hassio/updater.py
Normal file
86
hassio/updater.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Fetch last versions from webserver."""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
|
||||
from .const import (
|
||||
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||
ATTR_BETA_CHANNEL)
|
||||
from .tools import AsyncThrottle, JsonConfig
|
||||
from .validate import SCHEMA_UPDATER_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Updater(JsonConfig):
|
||||
"""Fetch last versions from version.json."""
|
||||
|
||||
def __init__(self, config, loop, websession):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.websession = websession
|
||||
|
||||
@property
|
||||
def version_homeassistant(self):
|
||||
"""Return last version of homeassistant."""
|
||||
return self._data.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
@property
|
||||
def version_hassio(self):
|
||||
"""Return last version of hassio."""
|
||||
return self._data.get(ATTR_HASSIO)
|
||||
|
||||
@property
|
||||
def upstream(self):
|
||||
"""Return Upstream branch for version."""
|
||||
if self.beta_channel:
|
||||
return 'dev'
|
||||
return 'master'
|
||||
|
||||
@property
|
||||
def beta_channel(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
return self._data[ATTR_BETA_CHANNEL]
|
||||
|
||||
@beta_channel.setter
|
||||
def beta_channel(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
||||
self.save()
|
||||
|
||||
@AsyncThrottle(timedelta(seconds=60))
|
||||
async def fetch_data(self):
|
||||
"""Fetch current versions from github.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
url = URL_HASSIO_VERSION.format(self.upstream)
|
||||
try:
|
||||
_LOGGER.info("Fetch update data from %s", url)
|
||||
with async_timeout.timeout(10, loop=self.loop):
|
||||
async with self.websession.get(url) as request:
|
||||
data = await request.json(content_type=None)
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s -> %s", url, err)
|
||||
return
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s -> %s", url, err)
|
||||
return
|
||||
|
||||
# data valid?
|
||||
if not data:
|
||||
_LOGGER.warning("Invalid data from %s", url)
|
||||
return
|
||||
|
||||
# update versions
|
||||
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
||||
self._data[ATTR_HASSIO] = data.get('hassio')
|
||||
self.save()
|
85
hassio/validate.py
Normal file
85
hassio/validate.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Validate functions."""
|
||||
import voluptuous as vol
|
||||
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD,
|
||||
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||
ATTR_API_ENDPOINT, ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUDIO_INPUT, ATTR_HOMEASSISTANT, ATTR_HASSIO)
|
||||
|
||||
|
||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
||||
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
||||
|
||||
|
||||
def validate_timezone(timezone):
|
||||
"""Validate voluptuous timezone."""
|
||||
try:
|
||||
pytz.timezone(timezone)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
raise vol.Invalid(
|
||||
"Invalid time zone passed in. Valid options can be found here: "
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||
from None
|
||||
|
||||
return timezone
|
||||
|
||||
|
||||
def convert_to_docker_ports(data):
|
||||
"""Convert data into docker port list."""
|
||||
# dynamic ports
|
||||
if data is None:
|
||||
return
|
||||
|
||||
# single port
|
||||
if isinstance(data, int):
|
||||
return NETWORK_PORT(data)
|
||||
|
||||
# port list
|
||||
if isinstance(data, list) and len(data) > 2:
|
||||
return vol.Schema([NETWORK_PORT])(data)
|
||||
|
||||
# ip port mapping
|
||||
if isinstance(data, list) and len(data) == 2:
|
||||
return (vol.Coerce(str)(data[0]), NETWORK_PORT(data[1]))
|
||||
|
||||
raise vol.Invalid("Can't validate docker host settings")
|
||||
|
||||
|
||||
DOCKER_PORTS = vol.Schema({
|
||||
vol.All(vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")):
|
||||
convert_to_docker_ports,
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_UPDATER_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_HASSIO_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_API_ENDPOINT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_TIMEZONE, default='UTC'): validate_timezone,
|
||||
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
||||
vol.Optional(ATTR_SECURITY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TOTP): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SESSIONS, default={}):
|
||||
vol.Schema({vol.Coerce(str): vol.Coerce(str)}),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
}, extra=vol.REMOVE_EXTRA)
|
1
home-assistant-polymer
Submodule
1
home-assistant-polymer
Submodule
Submodule home-assistant-polymer added at 1aa387a72f
BIN
misc/hassio.png
Normal file
BIN
misc/hassio.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 42 KiB |
1
misc/hassio.xml
Normal file
1
misc/hassio.xml
Normal file
@@ -0,0 +1 @@
|
||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36" version="6.5.6" editor="www.draw.io" type="device"><diagram name="Page-1">5Vptc6M2EP41/ng3gHj9mPiSy820c5n6Q3sfsVBsNTJyhYid/voKkABZkOBY+KYtmYnR6pVn99ld1l6A5e74laX77a80Q2ThOdlxAb4sPC8OY/G/Erw2At9xG8GG4awR9QQr/DeSQkdKS5yhQhvIKSUc73UhpHmOINdkKWP0oA97okTfdZ9ukCFYwZSY0t9xxrdS6oZJ1/GA8GYrt469sOlYp/B5w2iZy/0WHniqr6Z7l6q15IMW2zSjh54I3C3AklHKm7vdcYlIBa2CrZl3P9LbnpuhnE+Z4DUTXlJSInXikIipt09UrCAOyF8lKOFfJVUdn4paZTdigNjtKD5ERw206DtIYKrenLJdSrrJ4m5TfX5fqX3E2Zqtmg4JS7urd9hijlb7FFbtg7A2MWjLd0S03Oo0mJAlJZTVowXYKIRQyAvO6DPq9Tj1Jc+/kutLvF4Q4+g4CqHbKkbYO6I7xNmrGKImJKCZIm09SKRuD53l+Arobc9oQjkulca6aZfuFCZupM6G9QcM/X3LcaW31WvB0e5CNGGG1vF6CE0QggRkrb7sAhhNBNCzAKBvAPiFwmfELkUOokCQ/trI+SZy3hBywAJyoYHcw9JArXaFqJpRUe9MLscQDXN5HQd+4NjB0A8DHcPQxDBwTAgDCxAmBl4oE3FINinjW7qheUruOumtjmgPPXTE/I9K/DkKZPOH6srFwZq+QDV/yBX+RJy/ygiclpwKUbfxL5Tu5RrNUavzvQ20eBxaMihHRTJ4p2yDeM9uTHUwRFKOX/TVLwFX5RK20fXeQDcB3im+deMRMSweALGfBbp/JdCj0Xxi3UX48xIMN6wSjNMEYlXuEXvBhXAJagOm+h7Sovj2fTTBaMXr0aSjMwP3fbdluKflMgybVEN3aFmA4sy347ZAoLstMJB1uPGA33JtRE3Xm4Nbbo9Yyou13NJ4VbuxeUnkqveOHouiK7EIzOO6NHh1dE/iQtc89VyFwIPfVK9YQgCJYBqGSnyPidpzqm5QnpmLCWFvqcFMfrm0qlgvvlZQUm8cvaxJrPLpRjy6wLByU9dxRSmKn6CtLFR3Rd5A/t56HS1/9224ovDKXHE/O3qQ/+zG8aWBfiKtPmjxwLR4d0Sn1i3enyVUSJ30srCJCPYcTk5zpHmb8xQ2Vl+AJXtp+WpPYdeKPa5ZUrjJMpoXhhqLbbqvbveMQlQU73sn3ZVN9lX34qr9fZMTCt07XhiBxANhEHtx7PhgpqRqyJN5bmB6ssSCI1O1nDmJ0rVOHdWlqYAkU59uc7zoXEAAOfWR4vq9Q5WqneE0Wq3Q0FJO6hdSz1ynobKxTm0U7dNMs5PYJCjk1KxYKX6WO9IMALcVOzAUyKdrRB5pgTmmuRiyppzTnRhAqo7btoitVVbrMna3xg3Bm2oup+fRvCvEnpZu5QYWiHxS0wEDNR0wkJBYqciaNJ5AUifSWOq/x1LX5OgUOk5Ity8PgO97LQshEng/L0SqvXsMPBwOpvcmBO+LWg2SiZDQMrs4Tl6FQInuz3xnIKeP5iovgLcLo9K4P5DEn8mRmTLEXqzt3hyaQ3qj0faDNPFNmjTmaz+S+icmc+pN7YVAMP6tjfNQrkcjIUzZ5fQL62uAfkH1Z4d+CThJJ4boN1TdsxLBopnY17f7yGaWOT9lP8i+YAb2TVZjYJDkK+bbuekxFp2QmwUomocevnppvQo94v9LcEpCnaOR5dgU/idjk/m9+G9oX71qUYbReBXl30s+Vf6dgXyi2f0WqlFG93szcPcP</diagram></mxfile>
|
BIN
misc/security.png
Normal file
BIN
misc/security.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 36 KiB |
1
misc/security.xml
Normal file
1
misc/security.xml
Normal file
@@ -0,0 +1 @@
|
||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0" version="6.5.8" editor="www.draw.io" type="device"><diagram name="Page-1">5Vxdd5s4EP01fmwOkgCbx9hp2j7sNrvpnnYfiVFsTjDyghwn++tXGMmAxileEB9O+9BjBhjM3GHmzjXKhCw2L58Sf7v+jQU0mmAreJmQmwnGU4eI/zPDa24gyM0NqyQMchMqDPfhv1QaLWndhQFNKwdyxiIebqvGJYtjuuQVm58kbF897JFF1atu/RUFhvulH0Hr9zDg69w6w25h/0zD1VpdGblevufBXz6tEraL5fUmmDwe/uW7N77yJW80XfsB25dM5OOELBLGeP5p87KgURZaFbb8vNs39h6/d0Jjfs4JOD/h2Y928tZvwyTlwnTP/YTLL8lfVWA4fRF+52u+iYQBiY8pT9gTXbCIJcISs1gcOX8Mo0gz+VG4isXmUnwzKuzzZ5rwUIT8Wu7YhEGQXWa+X4ec3m/9ZXbNvcivzCGL+b38Go7aztMGeWIb3rcMRXYV+lIyyTh8omxDefIqDpF7ySw/Q6asKxHaF/gjS9rWJewVkr5MudXRcRF28UFG/jQKBKDwVypipAe/FPUtC2N+uKIznzg3mYUmobhwFtoblvA1W7HYj+4KawcxQhgGyT0Vo5mBINkgSJ/9NB1hkDAiw0XJAVFaiyhdffk6wkDZ7oCBckGg2JbGh1uKs2b2drT0wvXAOGcbsYPGwXXWfDJbxJZPP4uSqK4ryiuZTYNKU4JhK4VFRSChkc/D52rbOhUW6e0uQ7pAwNOeZ1sLbMp2yZLKk8ptRPMjoNMc4aqj/HaBowNIxzs8C7cpwE2ckdLlLgm5uNPbMH5kvaLnDIYenmrPj9sQPuLUODIH3wzCNxVxFtdz/9llrGcexiEvtibkOiNwfpTS7KjpTVtsD085mQd+uqaBPE/slmRilm29hPyH+PzBurIcuf232LauCFH7S5XwxvpZpuQQVDKlyaPfMlNsy60AjK2mmYJrHJnLFA9kip8+ZfsP+WHdfe8+E856/kk/EOqsApOGECJS48gchGqcK2GYUm4Sw8vss7hpoT5GVDlyvM6wg6NhtdGyLQ9ZLAi4G2WF+kHMK+7qULK1gr4VBHTPkkAv6nrJt7b70iFGir1Kj/K4iC6vsWPPUGMHjgzmCxxiq/mS0jQVCfNGvvyvZOk1VxQdQFcWmlbowNRtRQfsMacc0XWNpikHHL2RcgIG/7V0mJxJWyYlFA306lSk5Rv5Jg94oq+mM66egDSqW31xSm16J9OmGTOrcWSwSEF5xMi43xGSA1FL0rTd6NQSODKIJNRvfmfJxodQvmPJGlfZoN2nZo2gEHMZorWDYJQ6UxkR1DsuRLXuN0xw2L8c2brXSGE4Ug+mW6vkHn6gdpqKIbpw7RDcVcc6JtpolGv11I1g3HAcQ+MGcGQQwBOKyBnaNU/E0XhROY4zvn2fGrfKqUZ1wrDK7TSWTXCNI4NJBWWTXOYejb6tiF7fU4jbVIHQpxDgyCB6UF/IZ4Xete3x9GK3aSnXxW3X7kzcPvHrfzdi5SAypVuVKV3itqros1EzhykyxByAoz6FylOvNbx7obI3XqANbNPG70nMahwZrFBQOBizUjkUSZjqM3VTkgAcGYQSihuXoZR5fQobBAobF6KU9RsmqCJcjlLWb6TguD6YUqaSe3h27plSyrzulDJS9ypB70qZeupGwHc9U0oZcGQQwPqf3dsoZflxFy6UkTZlwrBQ5pkSyoAjgzkFf7ovhLLbb1+/3XWfDGfVCnzubGyYCiPLlGAGPRmEESovZcXMCJAX2pqRZUo5Q1Z30hmpW4DRjXSWdYVDLzgcNcu64gVqaSrZRsotEDIlpkFPfapppH6VyftT03ojD/qqvebLjmZ1ngyWLSjCjFlPG4xEIFOCGvRkDky1TPHEy3+iSooiia2TPOLXeRVw5kqeVWoauKtXAW2oSY1U4LQ1noQ9G4SpuwXsGIRptAqnM2ScoPwzZolz0FBBouMvRTvwOT3WQJ2GywJZEHAzHLrgzIpB54wZ2a0Ys32iOaoHaQDGfHyd+rjQXWld7ZfMqwbaQb+E5Kc6s0mVzeDANsR6LNIy1fCJVDt3CUYXw5lWWWyvYaoRp85Tn8OZA8nbH39+WLCAts2YrtZTnVtuWg9Wem1pysXJTAPcsc8DvAmckPyNHM5z9ZbWo5UOgtvw+UWkzpNBOCFJ/ZKvzv7lJiqtPx8LV3l1lXpNp+VIJTaLv/mWo1b8XT3y8T8=</diagram></mxfile>
|
12
setup.py
12
setup.py
@@ -29,7 +29,13 @@ setup(
|
||||
keywords=['docker', 'home-assistant', 'api'],
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
packages=['hassio', 'hassio.dock', 'hassio.api', 'hassio.addons'],
|
||||
packages=[
|
||||
'hassio',
|
||||
'hassio.dock',
|
||||
'hassio.api',
|
||||
'hassio.addons',
|
||||
'hassio.snapshots'
|
||||
],
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'async_timeout',
|
||||
@@ -38,5 +44,9 @@ setup(
|
||||
'colorlog',
|
||||
'voluptuous',
|
||||
'gitpython',
|
||||
'pyotp',
|
||||
'pyqrcode',
|
||||
'pytz',
|
||||
'pyudev'
|
||||
]
|
||||
)
|
||||
|
11
version.json
11
version.json
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"hassio_tag": "0.13",
|
||||
"homeassistant_tag": "0.43.1",
|
||||
"resinos_version": "0.4",
|
||||
"resinhup_version": "0.1",
|
||||
"generic_hc_version": "0.1"
|
||||
"hassio": "0.52",
|
||||
"homeassistant": "0.50.2",
|
||||
"resinos": "1.0",
|
||||
"resinhup": "0.3",
|
||||
"generic": "0.3",
|
||||
"cluster": "0.1"
|
||||
}
|
||||
|
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"hassio_tag": "0.13",
|
||||
"homeassistant_tag": "0.43.1",
|
||||
"resinos_version": "0.4",
|
||||
"resinhup_version": "0.1",
|
||||
"generic_hc_version": "0.1"
|
||||
}
|
Reference in New Issue
Block a user