mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-13 02:59:21 +00:00
Compare commits
268 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f798e75e30 | ||
![]() |
710f8570d2 | ||
![]() |
4dfd11ffb4 | ||
![]() |
4e4368debb | ||
![]() |
30c7ddf4ef | ||
![]() |
7186f5a8c0 | ||
![]() |
f52d1c4509 | ||
![]() |
4dbece8e8e | ||
![]() |
f731c630a6 | ||
![]() |
0ac96c207e | ||
![]() |
e2a29b7290 | ||
![]() |
f107a73e28 | ||
![]() |
2c68e5801f | ||
![]() |
91502a0727 | ||
![]() |
872f1d0ae3 | ||
![]() |
3c4240a8a8 | ||
![]() |
7a470bb3ac | ||
![]() |
766a9af54e | ||
![]() |
ca303a62f2 | ||
![]() |
90030d3a28 | ||
![]() |
0ed48a7741 | ||
![]() |
a33d765776 | ||
![]() |
6bb4f0e369 | ||
![]() |
56a9f64730 | ||
![]() |
d5eb66bc0d | ||
![]() |
40343089b5 | ||
![]() |
1b887e38d6 | ||
![]() |
ba96f99cde | ||
![]() |
b7f5cc868b | ||
![]() |
c8343fdfb0 | ||
![]() |
91e4bf1676 | ||
![]() |
6dba8d4ef9 | ||
![]() |
65eaed4f90 | ||
![]() |
8233083392 | ||
![]() |
106378d1d0 | ||
![]() |
01d18d5ff3 | ||
![]() |
6d23f3bd1c | ||
![]() |
ef96579a29 | ||
![]() |
44f0a9f21a | ||
![]() |
d854307acb | ||
![]() |
334b41de71 | ||
![]() |
1da50eab7a | ||
![]() |
b119a42f4d | ||
![]() |
99aa438817 | ||
![]() |
99fa91f480 | ||
![]() |
93969d264d | ||
![]() |
711e199977 | ||
![]() |
4e645332c3 | ||
![]() |
df8afb3337 | ||
![]() |
255a33fc08 | ||
![]() |
d15b6f0294 | ||
![]() |
1aa24e40ae | ||
![]() |
c0bde4a488 | ||
![]() |
2a09b70294 | ||
![]() |
e35b0a54c1 | ||
![]() |
8287330c67 | ||
![]() |
6b16da93cd | ||
![]() |
c1cd9bba45 | ||
![]() |
e33420f26e | ||
![]() |
abd9683e11 | ||
![]() |
8cbeabbe21 | ||
![]() |
df7d988d2f | ||
![]() |
544c009b9c | ||
![]() |
b2e0babc60 | ||
![]() |
f7c79cbd3a | ||
![]() |
587e9618da | ||
![]() |
cb2dd3b81c | ||
![]() |
8d4dd7de3f | ||
![]() |
6927c989d0 | ||
![]() |
97853d1691 | ||
![]() |
0cdef0d118 | ||
![]() |
0b17ffc243 | ||
![]() |
c516d46f16 | ||
![]() |
cb8ec22b6d | ||
![]() |
4a5fbd79c1 | ||
![]() |
b636a03567 | ||
![]() |
c96faf7c0a | ||
![]() |
2e1cd4076a | ||
![]() |
9984a638ba | ||
![]() |
a492bccc03 | ||
![]() |
e7a0e0f565 | ||
![]() |
030e081d45 | ||
![]() |
8537536368 | ||
![]() |
f03f323aac | ||
![]() |
58c0c67796 | ||
![]() |
f5e196a663 | ||
![]() |
808df68e57 | ||
![]() |
fa51c2e6e9 | ||
![]() |
ba3760e770 | ||
![]() |
ad1a8557b8 | ||
![]() |
fe91f812d9 | ||
![]() |
4cc11305c7 | ||
![]() |
898c0330c8 | ||
![]() |
33e5f94f1f | ||
![]() |
da4ee63890 | ||
![]() |
d34203b133 | ||
![]() |
23addfb9a6 | ||
![]() |
81e1227a7b | ||
![]() |
75be8666a6 | ||
![]() |
6031a60084 | ||
![]() |
39d5785118 | ||
![]() |
bddcdcadb2 | ||
![]() |
3eac6a3366 | ||
![]() |
3c7b962cf9 | ||
![]() |
bd756e2a9c | ||
![]() |
e7920bee2a | ||
![]() |
ebcc21370e | ||
![]() |
34c4acf199 | ||
![]() |
47e45dfc9f | ||
![]() |
2ecea7c1b4 | ||
![]() |
5c0eccd12f | ||
![]() |
f34ab9402b | ||
![]() |
2569a82caf | ||
![]() |
4bdd256000 | ||
![]() |
6f4f6338c5 | ||
![]() |
7cb72b55a8 | ||
![]() |
1a9a08cbfb | ||
![]() |
237ee0363d | ||
![]() |
86180ddc34 | ||
![]() |
eed41d30ec | ||
![]() |
0b0fd6b910 | ||
![]() |
1f887b47ab | ||
![]() |
affd8057ca | ||
![]() |
7a8ee2c46a | ||
![]() |
35fe1f464c | ||
![]() |
0955bafebd | ||
![]() |
2e0c540c63 | ||
![]() |
6e9ef17a28 | ||
![]() |
eb3cdbfeb9 | ||
![]() |
f4cb16ad09 | ||
![]() |
956af2bd62 | ||
![]() |
b76cd5c004 | ||
![]() |
61d9301dcc | ||
![]() |
2ded05be83 | ||
![]() |
899d6766c5 | ||
![]() |
c67d57cef4 | ||
![]() |
b5cca7d341 | ||
![]() |
8919f13911 | ||
![]() |
990ae49608 | ||
![]() |
c2ba02722c | ||
![]() |
5bd1957337 | ||
![]() |
f59f0793bc | ||
![]() |
63b96700e0 | ||
![]() |
dffbcc2c7e | ||
![]() |
0dbe1ecc2a | ||
![]() |
da8526fcec | ||
![]() |
933b6f4d1e | ||
![]() |
16f2dfeebd | ||
![]() |
bc6eb5cab4 | ||
![]() |
8833845b2e | ||
![]() |
391be6afac | ||
![]() |
a4f74676b6 | ||
![]() |
600b32f75b | ||
![]() |
f199a5cf95 | ||
![]() |
5896fde441 | ||
![]() |
9998f9720f | ||
![]() |
f37589daa6 | ||
![]() |
ce2513f175 | ||
![]() |
1a4c5d24a4 | ||
![]() |
886d202f39 | ||
![]() |
5a42019ed7 | ||
![]() |
354093c121 | ||
![]() |
aa9c300d7c | ||
![]() |
d9ad5daae3 | ||
![]() |
4680ba6d0d | ||
![]() |
1423062ac3 | ||
![]() |
a036096684 | ||
![]() |
a1c443a6f2 | ||
![]() |
e6e1367cd6 | ||
![]() |
303e741289 | ||
![]() |
79cc23e273 | ||
![]() |
046ce0230a | ||
![]() |
33a66bee01 | ||
![]() |
f76749a933 | ||
![]() |
385af5bef5 | ||
![]() |
19b72b1a79 | ||
![]() |
f6048467ad | ||
![]() |
dbe6b860c7 | ||
![]() |
0a1e6b3e2d | ||
![]() |
f178dde589 | ||
![]() |
545d45ecf0 | ||
![]() |
c333f94cfa | ||
![]() |
76a999f650 | ||
![]() |
d2f8e35622 | ||
![]() |
1e4ed9c9d1 | ||
![]() |
57c21b4eb5 | ||
![]() |
088cc3ef15 | ||
![]() |
9e326f6324 | ||
![]() |
5840290df7 | ||
![]() |
58fdabb8ff | ||
![]() |
b8fc002fbc | ||
![]() |
d5e349266b | ||
![]() |
4c135dd617 | ||
![]() |
a98a0d1a1a | ||
![]() |
768b4d2b1a | ||
![]() |
ff640c598d | ||
![]() |
c76408e4e8 | ||
![]() |
2e168d089c | ||
![]() |
a61311e928 | ||
![]() |
85ffba90b2 | ||
![]() |
6623ec9bbc | ||
![]() |
7a4ed4029c | ||
![]() |
d4c52ce298 | ||
![]() |
c0aab8497f | ||
![]() |
cabe5b143f | ||
![]() |
3a791bace6 | ||
![]() |
19dfdb094b | ||
![]() |
ec5caa483e | ||
![]() |
e6967f8db5 | ||
![]() |
759356ff2e | ||
![]() |
4d8dbdb486 | ||
![]() |
cddb40a104 | ||
![]() |
36128d119a | ||
![]() |
dadb72aca8 | ||
![]() |
390d4fa6c7 | ||
![]() |
9559c39351 | ||
![]() |
9d95b70534 | ||
![]() |
3bad896978 | ||
![]() |
9f406df129 | ||
![]() |
a9b4174590 | ||
![]() |
9109e3803b | ||
![]() |
422dd78489 | ||
![]() |
9e1d6c9d2b | ||
![]() |
c8e3f2b48a | ||
![]() |
a287f52e47 | ||
![]() |
76952db3eb | ||
![]() |
871721f04b | ||
![]() |
3c0ebdf643 | ||
![]() |
0e258a4ae0 | ||
![]() |
645a8e2372 | ||
![]() |
c6cc8adbb7 | ||
![]() |
dd38c73b85 | ||
![]() |
c916314704 | ||
![]() |
e0dcce5895 | ||
![]() |
906616e224 | ||
![]() |
db20ea95d9 | ||
![]() |
d142ea5d23 | ||
![]() |
5d52404dab | ||
![]() |
43f4b36cfe | ||
![]() |
0393db19e6 | ||
![]() |
b197578df4 | ||
![]() |
ed428c0df4 | ||
![]() |
d38707821c | ||
![]() |
cfb392054e | ||
![]() |
0ea65efeb3 | ||
![]() |
c4ce7d1a74 | ||
![]() |
7ac95b98bc | ||
![]() |
f8413d8d63 | ||
![]() |
709b80b864 | ||
![]() |
b5b68c5c42 | ||
![]() |
d58e847978 | ||
![]() |
aad9ae6997 | ||
![]() |
139cf4fae4 | ||
![]() |
e01b2da223 | ||
![]() |
cbbe2d2d3c | ||
![]() |
7ca11a96b9 | ||
![]() |
3443d6d715 | ||
![]() |
99730734a0 | ||
![]() |
20fcd28dbe | ||
![]() |
76cead72e8 | ||
![]() |
a0f17ffd1d | ||
![]() |
86d92bdfa2 | ||
![]() |
25a0bc6549 | ||
![]() |
96971e7054 | ||
![]() |
2729877fbf | ||
![]() |
3b8b44fcb7 | ||
![]() |
22e3d50203 | ||
![]() |
2090b336e4 |
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "home-assistant-polymer"]
|
||||
path = home-assistant-polymer
|
||||
url = https://github.com/home-assistant/home-assistant-polymer
|
@@ -2,7 +2,7 @@ sudo: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.5"
|
||||
- python: "3.6"
|
||||
|
||||
cache:
|
||||
directories:
|
||||
|
302
API.md
302
API.md
@@ -2,7 +2,7 @@
|
||||
|
||||
## HassIO REST API
|
||||
|
||||
Interface for HomeAssistant to controll things from supervisor.
|
||||
Interface for HomeAssistant to control things from supervisor.
|
||||
|
||||
On error:
|
||||
```json
|
||||
@@ -22,29 +22,79 @@ On success
|
||||
|
||||
### HassIO
|
||||
|
||||
- `/supervisor/ping`
|
||||
- GET `/supervisor/ping`
|
||||
|
||||
- `/supervisor/info`
|
||||
- GET `/supervisor/info`
|
||||
|
||||
The addons from `addons` are only installed one.
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"current": "CURRENT_VERSION",
|
||||
"beta": "true|false",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"beta_channel": "true|false",
|
||||
"timezone": "TIMEZONE",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"version": "CURRENT_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"dedicated": "bool",
|
||||
"description": "description"
|
||||
"description": "description",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"repository": "12345678|null",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"build": "bool",
|
||||
"url": "null|url"
|
||||
}
|
||||
],
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
],
|
||||
"snapshots": [
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"data": "ISO",
|
||||
"name": "Custom name"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- `/supervisor/update`
|
||||
- GET `/supervisor/addons`
|
||||
|
||||
Get all available addons
|
||||
|
||||
```json
|
||||
{
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"repository": "core|local|REP_ID",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"build": "bool",
|
||||
"url": "null|url"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
{
|
||||
"slug": "12345678",
|
||||
"name": "Repitory Name|unknown",
|
||||
"source": "URL_OF_REPOSITORY",
|
||||
"url": "WEBSITE|REPOSITORY",
|
||||
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/update`
|
||||
Optional:
|
||||
```json
|
||||
{
|
||||
@@ -52,40 +102,137 @@ Optional:
|
||||
}
|
||||
```
|
||||
|
||||
- `/supervisor/option`
|
||||
- POST `/supervisor/options`
|
||||
```json
|
||||
{
|
||||
"beta": "true|false"
|
||||
"beta_channel": "true|false",
|
||||
"timezone": "TIMEZONE",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- `/supervisor/reload`
|
||||
- POST `/supervisor/reload`
|
||||
|
||||
Reload addons/version.
|
||||
|
||||
- `/supervisor/logs`
|
||||
- GET `/supervisor/logs`
|
||||
|
||||
Output the raw docker log
|
||||
|
||||
### Host
|
||||
### Security
|
||||
|
||||
- `/host/shutdown`
|
||||
|
||||
- `/host/reboot`
|
||||
|
||||
- `/host/info`
|
||||
See HostControll info command.
|
||||
- GET `/security/info`
|
||||
```json
|
||||
{
|
||||
"os": "",
|
||||
"version": "",
|
||||
"current": "",
|
||||
"level": "",
|
||||
"hostname": "",
|
||||
"initialize": "bool",
|
||||
"totp": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
- `/host/update`
|
||||
- POST `/security/options`
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/totp`
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
Return QR-Code
|
||||
|
||||
- POST `/security/session`
|
||||
```json
|
||||
{
|
||||
"password": "xy",
|
||||
"totp": "null|123456"
|
||||
}
|
||||
```
|
||||
|
||||
### Backup/Snapshot
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
```json
|
||||
{
|
||||
"name": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/partial`
|
||||
```json
|
||||
{
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- GET `/snapshots/{slug}/info`
|
||||
```json
|
||||
{
|
||||
"slug": "SNAPSHOT ID",
|
||||
"type": "full|partial",
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"homeassistant": {
|
||||
"version": "INSTALLED_HASS_VERSION",
|
||||
"devices": []
|
||||
},
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
"folders": ["NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
- POST `/host/reload`
|
||||
|
||||
- POST `/host/shutdown`
|
||||
|
||||
- POST `/host/reboot`
|
||||
|
||||
- GET `/host/info`
|
||||
See HostControl info command.
|
||||
```json
|
||||
{
|
||||
"type": "",
|
||||
"version": "",
|
||||
"last_version": "",
|
||||
"features": ["shutdown", "reboot", "update", "network_info", "network_control"],
|
||||
"hostname": "",
|
||||
"os": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/update`
|
||||
Optional:
|
||||
```json
|
||||
{
|
||||
@@ -95,9 +242,14 @@ Optional:
|
||||
|
||||
### Network
|
||||
|
||||
- `/network/info`
|
||||
- GET `/network/info`
|
||||
```json
|
||||
{
|
||||
"hostname": ""
|
||||
}
|
||||
```
|
||||
|
||||
- `/network/options`
|
||||
- POST `/network/options`
|
||||
```json
|
||||
{
|
||||
"hostname": "",
|
||||
@@ -111,16 +263,17 @@ Optional:
|
||||
|
||||
### HomeAssistant
|
||||
|
||||
- `/homeassistant/info`
|
||||
- GET `/homeassistant/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"current": "CURRENT_VERSION"
|
||||
"last_version": "LAST_VERSION",
|
||||
"devices": []
|
||||
}
|
||||
```
|
||||
|
||||
- `/homeassistant/update`
|
||||
- POST `/homeassistant/update`
|
||||
Optional:
|
||||
```json
|
||||
{
|
||||
@@ -128,36 +281,62 @@ Optional:
|
||||
}
|
||||
```
|
||||
|
||||
- `/homeassistant/logs`
|
||||
- GET `/homeassistant/logs`
|
||||
|
||||
Output the raw docker log
|
||||
|
||||
### REST API addons
|
||||
- POST `/homeassistant/restart`
|
||||
|
||||
- `/addons/{addon}/info`
|
||||
- POST `/homeassistant/options`
|
||||
```json
|
||||
{
|
||||
"version": "VERSION",
|
||||
"current": "CURRENT_VERSION",
|
||||
"state": "started|stopped",
|
||||
"devices": [],
|
||||
}
|
||||
```
|
||||
|
||||
### REST API addons
|
||||
|
||||
- POST `/addons/reload`
|
||||
|
||||
- GET `/addons/{addon}/info`
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"description": "description",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
"repository": "12345678|null",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
"state": "none|started|stopped",
|
||||
"boot": "auto|manual",
|
||||
"build": "bool",
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"host_network": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
```json
|
||||
{
|
||||
"boot": "auto|manual",
|
||||
"auto_update": "bool",
|
||||
"network": {
|
||||
"CONTAINER": "port|[ip, port]"
|
||||
},
|
||||
"options": {},
|
||||
}
|
||||
```
|
||||
|
||||
- `/addons/{addon}/options`
|
||||
```json
|
||||
{
|
||||
"boot": "auto|manual",
|
||||
"options": {},
|
||||
}
|
||||
```
|
||||
For reset custom network settings, set it `null`.
|
||||
|
||||
- `/addons/{addon}/start`
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
- `/addons/{addon}/stop`
|
||||
- POST `/addons/{addon}/stop`
|
||||
|
||||
- `/addons/{addon}/install`
|
||||
- POST `/addons/{addon}/install`
|
||||
Optional:
|
||||
```json
|
||||
{
|
||||
@@ -165,9 +344,9 @@ Optional:
|
||||
}
|
||||
```
|
||||
|
||||
- `/addons/{addon}/uninstall`
|
||||
- POST `/addons/{addon}/uninstall`
|
||||
|
||||
- `/addons/{addon}/update`
|
||||
- POST `/addons/{addon}/update`
|
||||
Optional:
|
||||
```json
|
||||
{
|
||||
@@ -175,24 +354,28 @@ Optional:
|
||||
}
|
||||
```
|
||||
|
||||
- `/addons/{addon}/logs`
|
||||
- GET `/addons/{addon}/logs`
|
||||
|
||||
Output the raw docker log
|
||||
|
||||
## Host Controll
|
||||
- POST `/addons/{addon}/restart`
|
||||
|
||||
## Host Control
|
||||
|
||||
Communicate over unix socket with a host daemon.
|
||||
|
||||
- commands
|
||||
```
|
||||
# info
|
||||
-> {'os', 'version', 'current', 'level', 'hostname'}
|
||||
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
||||
# reboot
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
|
||||
# hostname xy
|
||||
|
||||
# network info
|
||||
# network hostname xy
|
||||
-> {}
|
||||
# network wlan ssd xy
|
||||
# network wlan password xy
|
||||
# network int ip xy
|
||||
@@ -200,10 +383,13 @@ Communicate over unix socket with a host daemon.
|
||||
# network int route xy
|
||||
```
|
||||
|
||||
level:
|
||||
- 1: power functions
|
||||
- 2: host update
|
||||
- 4: network functions
|
||||
features:
|
||||
- shutdown
|
||||
- reboot
|
||||
- update
|
||||
- hostname
|
||||
- network_info
|
||||
- network_control
|
||||
|
||||
Answer:
|
||||
```
|
||||
|
3
MANIFEST.in
Normal file
3
MANIFEST.in
Normal file
@@ -0,0 +1,3 @@
|
||||
include LICENSE.md
|
||||
graft hassio
|
||||
recursive-exclude * *.py[co]
|
56
README.md
56
README.md
@@ -1,56 +1,14 @@
|
||||
# HassIO
|
||||
First private cloud solution for home automation.
|
||||
### First private cloud solution for home automation.
|
||||
|
||||
It is a docker image (supervisor) they manage HomeAssistant docker and give a interface to controll itself over UI. It have a own eco system with addons to extend the functionality in a easy way.
|
||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
||||
|
||||
[HassIO-Addons](https://github.com/pvizeli/hassio-addons) | [HassIO-Build](https://github.com/pvizeli/hassio-build)
|
||||

|
||||
|
||||
**HassIO is at the moment on development and not ready to use productive!**
|
||||
[HassIO-Addons](https://github.com/home-assistant/hassio-addons) | [HassIO-Build](https://github.com/home-assistant/hassio-build)
|
||||
|
||||
## Feature in progress
|
||||
- Backup/Restore
|
||||
- MQTT addon
|
||||
- DHCP-Server addon
|
||||
**HassIO is under active development and is not ready yet for production use.**
|
||||
|
||||
# HomeAssistant
|
||||
## Installation
|
||||
|
||||
## SSL
|
||||
|
||||
All addons they can create SSL certs do that in same schema. So you can put follow lines to your `configuration.yaml`.
|
||||
```yaml
|
||||
http:
|
||||
ssl_certificate: /ssl/fullchain.pem
|
||||
ssl_key: /ssl/privkey.pem
|
||||
```
|
||||
|
||||
# Hardware Image
|
||||
The image is based on ResinOS and Yocto Linux. It comes with the HassIO supervisor pre-installed. This includes support to update the supervisor over the air. After flashing your host OS will not require any more maintenance! The image does not include Home Assistant, instead it will downloaded when the image boots up for the first time.
|
||||
|
||||
Download can be found here: https://drive.google.com/drive/folders/0B2o1Uz6l1wVNbFJnb2gwNXJja28?usp=sharing
|
||||
|
||||
After extracting the archive, flash it to a drive using [Etcher](https://etcher.io/).
|
||||
|
||||
## History
|
||||
- **0.1**: First techpreview with dumy supervisor (ResinOS 2.0.0-RC5)
|
||||
- **0.2**: Fix some bugs and update it to HassIO 0.2
|
||||
- **0.3**: Update HostControll and feature for HassIO 0.3 (ResinOS 2.0.0 / need reflash)
|
||||
- **0.4**: Update HostControll and bring resinos OTA (resinhub) back (ResinOS 2.0.0-rev3)
|
||||
|
||||
## Configuring the image
|
||||
You can configure the WiFi network that the image should connect to after flashing using [`resin-device-toolbox`](https://resinos.io/docs/raspberrypi3/gettingstarted/#install-resin-device-toolbox).
|
||||
|
||||
## Developer access to ResinOS host
|
||||
Create an `authorized_keys` file in the boot partition of your SD card with your public key. After a boot it, you can acces your device as root over ssh on port 22222.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Read logoutput from supervisor:
|
||||
```bash
|
||||
journalctl -f -u resin-supervisor.service
|
||||
docker logs homeassistant
|
||||
```
|
||||
|
||||
## Install on a own System
|
||||
|
||||
We have a installer to install HassIO on own linux device without our hardware image:
|
||||
https://github.com/pvizeli/hassio-build/tree/master/install
|
||||
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Main file for HassIO."""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
@@ -17,16 +18,27 @@ if __name__ == "__main__":
|
||||
exit(1)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
hassio = core.HassIO(loop)
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hassio setup")
|
||||
config = bootstrap.initialize_system_data()
|
||||
hassio = core.HassIO(loop, config)
|
||||
|
||||
bootstrap.migrate_system_env(config)
|
||||
|
||||
_LOGGER.info("Run Hassio setup")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
|
||||
_LOGGER.info("Start Hassio task")
|
||||
_LOGGER.info("Start Hassio")
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
||||
|
||||
_LOGGER.info("Run Hassio loop")
|
||||
loop.run_forever()
|
||||
|
||||
_LOGGER.info("Cleanup system")
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hassio")
|
||||
|
@@ -1,164 +1,133 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from .data import AddonsData
|
||||
from .git import AddonsRepo
|
||||
from ..const import STATE_STOPPED, STATE_STARTED
|
||||
from ..dock.addon import DockerAddon
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import Data
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
|
||||
class AddonManager(AddonsData):
|
||||
|
||||
class AddonManager(object):
|
||||
"""Manage addons inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config)
|
||||
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.dock = dock
|
||||
self.repo = AddonsRepo(config, loop)
|
||||
self.dockers = {}
|
||||
self.data = Data(config)
|
||||
self.addons = {}
|
||||
self.repositories = {}
|
||||
|
||||
async def prepare(self, arch):
|
||||
@property
|
||||
def list_addons(self):
|
||||
"""Return a list of all addons."""
|
||||
return list(self.addons.values())
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of addon repositories."""
|
||||
return list(self.repositories.values())
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return a adddon from slug."""
|
||||
return self.addons.get(addon_slug)
|
||||
|
||||
async def prepare(self):
|
||||
"""Startup addon management."""
|
||||
self.arch = arch
|
||||
self.data.reload()
|
||||
|
||||
# load addon repository
|
||||
if await self.repo.load():
|
||||
self.read_addons_repo()
|
||||
# init hassio built-in repositories
|
||||
repositories = \
|
||||
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# load installed addons
|
||||
for addon in self.list_installed:
|
||||
self.dockers[addon] = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
await self.dockers[addon].attach()
|
||||
# init custom repositories & load addons
|
||||
await self.load_repositories(repositories)
|
||||
|
||||
async def reload(self):
|
||||
"""Update addons from repo and reload list."""
|
||||
if not await self.repo.pull():
|
||||
return
|
||||
self.read_addons_repo()
|
||||
|
||||
# remove stalled addons
|
||||
for addon in self.list_removed:
|
||||
_LOGGER.warning("Dedicated addon '%s' found!", addon)
|
||||
|
||||
async def auto_boot(self, start_type):
|
||||
"""Boot addons with mode auto."""
|
||||
boot_list = self.list_startup(start_type)
|
||||
tasks = []
|
||||
|
||||
for addon in boot_list:
|
||||
tasks.append(self.loop.create_task(self.start(addon)))
|
||||
|
||||
_LOGGER.info("Startup %s run %d addons", start_type, len(tasks))
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def install(self, addon, version=None):
|
||||
"""Install a addon."""
|
||||
if not self.exists_addon(addon):
|
||||
_LOGGER.error("Addon %s not exists for install", addon)
|
||||
return False
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
|
||||
if self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is already installed", addon)
|
||||
return False
|
||||
# update addons
|
||||
await self.load_addons()
|
||||
|
||||
if not os.path.isdir(self.path_data(addon)):
|
||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||
self.path_data(addon))
|
||||
os.mkdir(self.path_data(addon))
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories)
|
||||
|
||||
addon_docker = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.config, self.loop, self.data, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories[url] = repository
|
||||
|
||||
version = version or self.get_version(addon)
|
||||
if not await addon_docker.install(version):
|
||||
return False
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.config.addons_repositories = url
|
||||
|
||||
self.dockers[addon] = addon_docker
|
||||
self.set_addon_install(addon, version)
|
||||
return True
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def uninstall(self, addon):
|
||||
"""Remove a addon."""
|
||||
if not self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is already uninstalled", addon)
|
||||
return False
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories.pop(url).remove()
|
||||
self.config.drop_addon_repository(url)
|
||||
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
# update data
|
||||
self.data.reload()
|
||||
await self.load_addons()
|
||||
|
||||
if not await self.dockers[addon].remove():
|
||||
return False
|
||||
async def load_addons(self):
|
||||
"""Update/add internal addon store."""
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
if os.path.isdir(self.path_data(addon)):
|
||||
_LOGGER.info("Remove Home-Assistant addon data folder %s",
|
||||
self.path_data(addon))
|
||||
shutil.rmtree(self.path_data(addon))
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons)
|
||||
del_addons = set(self.addons) - all_addons
|
||||
|
||||
self.dockers.pop(addon)
|
||||
self.set_addon_uninstall(addon)
|
||||
return True
|
||||
_LOGGER.info("Load addons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
|
||||
async def state(self, addon):
|
||||
"""Return running state of addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(
|
||||
self.config, self.loop, self.dock, self.data, addon_slug)
|
||||
|
||||
if await self.dockers[addon].is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
tasks.append(addon.load())
|
||||
self.addons[addon_slug] = addon
|
||||
|
||||
async def start(self, addon):
|
||||
"""Set options and start addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
if not self.write_addon_options(addon):
|
||||
_LOGGER.error("Can't write options for addon %s", addon)
|
||||
return False
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons.pop(addon_slug)
|
||||
|
||||
return await self.dockers[addon].run()
|
||||
async def auto_boot(self, stage):
|
||||
"""Boot addons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
|
||||
async def stop(self, addon):
|
||||
"""Stop addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].stop()
|
||||
|
||||
async def update(self, addon, version=None):
|
||||
"""Update addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
version = version or self.get_version(addon)
|
||||
is_running = self.dockers[addon].is_running()
|
||||
|
||||
# update
|
||||
if await self.dockers[addon].update(version):
|
||||
self.set_addon_update(addon, version)
|
||||
if is_running:
|
||||
await self.start(addon)
|
||||
return True
|
||||
return False
|
||||
|
||||
async def logs(self, addon):
|
||||
"""Return addons log output."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].logs()
|
||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
488
hassio/addons/addon.py
Normal file
488
hassio/addons/addon.py
Normal file
@@ -0,0 +1,488 @@
|
||||
"""Init file for HassIO addons."""
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import (
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, MAP_VOLUME)
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK)
|
||||
from .util import check_installed
|
||||
from ..dock.addon import DockerAddon
|
||||
from ..tools import write_json_file, read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
|
||||
|
||||
class Addon(object):
|
||||
"""Hold data for addon inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock, data, slug):
|
||||
"""Initialize data holder."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.data = data
|
||||
self._id = slug
|
||||
|
||||
self.addon_docker = DockerAddon(config, loop, dock, self)
|
||||
|
||||
async def load(self):
|
||||
"""Async initialize of object."""
|
||||
if self.is_installed:
|
||||
await self.addon_docker.attach()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug/id of addon."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return addon data from system or cache."""
|
||||
return self.data.system.get(self._id, self.data.cache.get(self._id))
|
||||
|
||||
@property
|
||||
def is_installed(self):
|
||||
"""Return True if a addon is installed."""
|
||||
return self._id in self.data.system
|
||||
|
||||
@property
|
||||
def is_detached(self):
|
||||
"""Return True if addon is detached."""
|
||||
return self._id not in self.data.cache
|
||||
|
||||
@property
|
||||
def version_installed(self):
|
||||
"""Return installed version."""
|
||||
return self.data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
|
||||
def _set_install(self, version):
|
||||
"""Set addon as installed."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
}
|
||||
self.data.save()
|
||||
|
||||
def _set_uninstall(self):
|
||||
"""Set addon as uninstalled."""
|
||||
self.data.system.pop(self._id, None)
|
||||
self.data.user.pop(self._id, None)
|
||||
self.data.save()
|
||||
|
||||
def _set_update(self, version):
|
||||
"""Update version of addon."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id][ATTR_VERSION] = version
|
||||
self.data.save()
|
||||
|
||||
def _restore_data(self, user, system):
|
||||
"""Restore data to addon."""
|
||||
self.data.user[self._id] = deepcopy(user)
|
||||
self.data.system[self._id] = deepcopy(system)
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Return options with local changes."""
|
||||
if self.is_installed:
|
||||
return {
|
||||
**self.data.system[self._id][ATTR_OPTIONS],
|
||||
**self.data.user[self._id][ATTR_OPTIONS],
|
||||
}
|
||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
@options.setter
|
||||
def options(self, value):
|
||||
"""Store user addon options."""
|
||||
self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_BOOT]
|
||||
return self._mesh[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value):
|
||||
"""Store user boot options."""
|
||||
self.data.user[self._id][ATTR_BOOT] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def auto_update(self):
|
||||
"""Return if auto update is enable."""
|
||||
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value):
|
||||
"""Set auto update."""
|
||||
self.data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of addon."""
|
||||
return self._mesh[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self._mesh[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Return description of addon."""
|
||||
return self._mesh[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def repository(self):
|
||||
"""Return repository of addon."""
|
||||
return self._mesh[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return version of addon."""
|
||||
if self._id in self.data.cache:
|
||||
return self.data.cache[self._id][ATTR_VERSION]
|
||||
return self.version_installed
|
||||
|
||||
@property
|
||||
def startup(self):
|
||||
"""Return startup type of addon."""
|
||||
return self._mesh.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Return ports of addon."""
|
||||
if self.network_mode != 'bridge' or ATTR_PORTS not in self._mesh:
|
||||
return
|
||||
|
||||
if not self.is_installed or \
|
||||
ATTR_NETWORK not in self.data.user[self._id]:
|
||||
return self._mesh[ATTR_PORTS]
|
||||
return self.data.user[self._id][ATTR_NETWORK]
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value):
|
||||
"""Set custom ports of addon."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
else:
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
|
||||
self.data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
"""Return network mode of addon."""
|
||||
if self._mesh[ATTR_HOST_NETWORK]:
|
||||
return 'host'
|
||||
return 'bridge'
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return devices of addon."""
|
||||
return self._mesh.get(ATTR_DEVICES)
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs of addon."""
|
||||
return self._mesh.get(ATTR_TMPFS)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment of addon."""
|
||||
return self._mesh.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@property
|
||||
def privileged(self):
|
||||
"""Return list of privilege."""
|
||||
return self._mesh.get(ATTR_PRIVILEGED)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of addon."""
|
||||
return self._mesh.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def supported_arch(self):
|
||||
"""Return list of supported arch."""
|
||||
return self._mesh[ATTR_ARCH]
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of addon."""
|
||||
addon_data = self._mesh
|
||||
|
||||
# Repository with dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.config.arch)
|
||||
|
||||
# local build
|
||||
return "{}/{}-addon-{}".format(
|
||||
addon_data[ATTR_REPOSITORY], self.config.arch,
|
||||
addon_data[ATTR_SLUG])
|
||||
|
||||
@property
|
||||
def need_build(self):
|
||||
"""Return True if this addon need a local build."""
|
||||
return ATTR_IMAGE not in self._mesh
|
||||
|
||||
@property
|
||||
def map_volumes(self):
|
||||
"""Return a dict of {volume: policy} from addon."""
|
||||
volumes = {}
|
||||
for volume in self._mesh[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
volumes[result.group(1)] = result.group(2) or 'ro'
|
||||
|
||||
return volumes
|
||||
|
||||
@property
|
||||
def path_data(self):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return Path(self.config.path_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
"""Return addon data path external for docker."""
|
||||
return PurePath(self.config.path_extern_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_addon_options(self):
|
||||
"""Return path to addons options."""
|
||||
return Path(self.path_data, "options.json")
|
||||
|
||||
@property
|
||||
def path_addon_location(self):
|
||||
"""Return path to this addon."""
|
||||
return Path(self._mesh[ATTR_LOCATON])
|
||||
|
||||
def write_options(self):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.schema
|
||||
options = self.options
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_addon_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
|
||||
humanize_error(options, ex))
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Create a schema for addon options."""
|
||||
raw_schema = self._mesh[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
|
||||
async def install(self, version=None):
|
||||
"""Install a addon."""
|
||||
if self.config.arch not in self.supported_arch:
|
||||
_LOGGER.error(
|
||||
"Addon %s not supported on %s", self._id, self.config.arch)
|
||||
return False
|
||||
|
||||
if self.is_installed:
|
||||
_LOGGER.error("Addon %s is already installed", self._id)
|
||||
return False
|
||||
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||
self.path_data.mkdir()
|
||||
|
||||
version = version or self.last_version
|
||||
if not await self.addon_docker.install(version):
|
||||
return False
|
||||
|
||||
self._set_install(version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
"""Remove a addon."""
|
||||
if not await self.addon_docker.remove():
|
||||
return False
|
||||
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Remove Home-Assistant addon data folder %s", self.path_data)
|
||||
shutil.rmtree(str(self.path_data))
|
||||
|
||||
self._set_uninstall()
|
||||
return True
|
||||
|
||||
async def state(self):
|
||||
"""Return running state of addon."""
|
||||
if not self.is_installed:
|
||||
return STATE_NONE
|
||||
|
||||
if await self.addon_docker.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
@check_installed
|
||||
async def start(self):
|
||||
"""Set options and start addon."""
|
||||
return await self.addon_docker.run()
|
||||
|
||||
@check_installed
|
||||
async def stop(self):
|
||||
"""Stop addon."""
|
||||
return await self.addon_docker.stop()
|
||||
|
||||
@check_installed
|
||||
async def update(self, version=None):
|
||||
"""Update addon."""
|
||||
version = version or self.last_version
|
||||
|
||||
if version == self.version_installed:
|
||||
_LOGGER.warning(
|
||||
"Addon %s is already installed in %s", self._id, version)
|
||||
return True
|
||||
|
||||
if not await self.addon_docker.update(version):
|
||||
return False
|
||||
|
||||
self._set_update(version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def restart(self):
|
||||
"""Restart addon."""
|
||||
return await self.addon_docker.restart()
|
||||
|
||||
@check_installed
|
||||
async def logs(self):
|
||||
"""Return addons log output."""
|
||||
return await self.addon_docker.logs()
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
"""Snapshot a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.addon_docker.export_image(Path(temp, "image.tar")):
|
||||
return False
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.data.user.get(self._id, {}),
|
||||
ATTR_SYSTEM: self.data.system.get(self._id, {}),
|
||||
ATTR_VERSION: self.version_installed,
|
||||
ATTR_STATE: await self.state(),
|
||||
}
|
||||
|
||||
# store local configs/state
|
||||
if not write_json_file(Path(temp, "addon.json"), data):
|
||||
_LOGGER.error("Can't write addon.json for %s", self._id)
|
||||
return False
|
||||
|
||||
# write into tarfile
|
||||
def _create_tar():
|
||||
"""Write tar inside loop."""
|
||||
with tarfile.open(tar_file, "w:gz",
|
||||
compresslevel=1) as snapshot:
|
||||
snapshot.add(temp, arcname=".")
|
||||
snapshot.add(self.path_data, arcname="data")
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _create_tar)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def restore(self, tar_file):
|
||||
"""Restore a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
# extract snapshot
|
||||
def _extract_tar():
|
||||
"""Extract tar snapshot."""
|
||||
with tarfile.open(tar_file, "r:gz") as snapshot:
|
||||
snapshot.extractall(path=Path(temp))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _extract_tar)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
# read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't read addon.json -> %s", err)
|
||||
|
||||
# validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate %s, snapshot data -> %s",
|
||||
self._id, humanize_error(data, err))
|
||||
return False
|
||||
|
||||
# restore data / reload addon
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
|
||||
|
||||
# check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if version != self.addon_docker.version:
|
||||
image_file = Path(temp, "image.tar")
|
||||
if image_file.is_file():
|
||||
await self.addon_docker.import_image(image_file, version)
|
||||
else:
|
||||
if await self.addon_docker.install(version):
|
||||
await self.addon_docker.cleanup()
|
||||
else:
|
||||
await self.addon_docker.stop()
|
||||
|
||||
# restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
if self.path_data.is_dir():
|
||||
shutil.rmtree(str(self.path_data), ignore_errors=True)
|
||||
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data -> %s", err)
|
||||
return False
|
||||
|
||||
# run addon
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
return await self.start()
|
||||
|
||||
return True
|
12
hassio/addons/built-in.json
Normal file
12
hassio/addons/built-in.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"local": {
|
||||
"name": "Local Add-Ons",
|
||||
"url": "https://home-assistant.io/hassio",
|
||||
"maintainer": "By our self"
|
||||
},
|
||||
"core": {
|
||||
"name": "Built-in Add-Ons",
|
||||
"url": "https://home-assistant.io/addons",
|
||||
"maintainer": "Home Assistant authors"
|
||||
}
|
||||
}
|
@@ -1,242 +1,196 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import copy
|
||||
import logging
|
||||
import glob
|
||||
import json
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import validate_options, SCHEMA_ADDON_CONFIG
|
||||
from .util import extract_hash_from_path
|
||||
from .validate import (
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG,
|
||||
MAP_VOLUME)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
|
||||
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||
ATTR_PORTS, BOOT_AUTO, DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA,
|
||||
ATTR_IMAGE, ATTR_DEDICATED)
|
||||
from ..config import Config
|
||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
from ..tools import read_json_file, write_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ADDONS_REPO_PATTERN = "{}/*/config.json"
|
||||
SYSTEM = "system"
|
||||
USER = "user"
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
|
||||
|
||||
class AddonsData(Config):
|
||||
class Data(object):
|
||||
"""Hold data for addons inside HassIO."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS)
|
||||
self._file = FILE_HASSIO_ADDONS
|
||||
self._data = {}
|
||||
self.config = config
|
||||
self._addons_data = self._data.get(SYSTEM, {})
|
||||
self._user_data = self._data.get(USER, {})
|
||||
self._current_data = {}
|
||||
self.arch = None
|
||||
self._cache = {}
|
||||
self._repositories = {}
|
||||
|
||||
# init or load data
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
self._data = {}
|
||||
|
||||
# validate
|
||||
try:
|
||||
self._data = SCHEMA_ADDON_FILE(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse addons.json -> %s",
|
||||
humanize_error(self._data, ex))
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
self._data = {
|
||||
USER: self._user_data,
|
||||
SYSTEM: self._addons_data,
|
||||
}
|
||||
super().save()
|
||||
# validate
|
||||
try:
|
||||
self._data = SCHEMA_ADDON_FILE(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse addons data -> %s",
|
||||
humanize_error(self._data, ex))
|
||||
return False
|
||||
|
||||
def read_addons_repo(self):
|
||||
if not write_json_file(self._file, self._data):
|
||||
_LOGGER.error("Can't store config in %s", self._file)
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
"""Return local addon user data."""
|
||||
return self._data[ATTR_USER]
|
||||
|
||||
@property
|
||||
def system(self):
|
||||
"""Return local addon data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""Return addon data from cache/repositories."""
|
||||
return self._cache
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return addon data from repositories."""
|
||||
return self._repositories
|
||||
|
||||
def reload(self):
|
||||
"""Read data from addons repository."""
|
||||
self._current_data = {}
|
||||
self._cache = {}
|
||||
self._repositories = {}
|
||||
|
||||
self._read_addons_folder(self.config.path_addons_repo)
|
||||
self._read_addons_folder(self.config.path_addons_custom)
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_core, REPOSITORY_CORE)
|
||||
|
||||
def _read_addons_folder(self, folder):
|
||||
# read local repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
|
||||
# read custom git repositories
|
||||
for repository_element in self.config.path_addons_git.iterdir():
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
# update local data
|
||||
self._merge_config()
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
|
||||
# exists repository json
|
||||
repository_file = Path(path, "repository.json")
|
||||
try:
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(
|
||||
read_json_file(repository_file)
|
||||
)
|
||||
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read repository information from %s",
|
||||
repository_file)
|
||||
return
|
||||
|
||||
except vol.Invalid:
|
||||
_LOGGER.warning("Repository parse error %s", repository_file)
|
||||
return
|
||||
|
||||
# process data
|
||||
self._repositories[slug] = repository_info
|
||||
self._read_addons_folder(path, slug)
|
||||
|
||||
def _read_addons_folder(self, path, repository):
|
||||
"""Read data from addons folder."""
|
||||
pattern = ADDONS_REPO_PATTERN.format(folder)
|
||||
|
||||
for addon in glob.iglob(pattern):
|
||||
for addon in path.glob("**/config.json"):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
|
||||
# validate
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
self._current_data[addon_config[ATTR_SLUG]] = addon_config
|
||||
|
||||
except (OSError, KeyError):
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(
|
||||
repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s -> %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed addons."""
|
||||
return set(self._addons_data.keys())
|
||||
|
||||
@property
|
||||
def list(self):
|
||||
"""Return a list of available addons."""
|
||||
data = []
|
||||
all_addons = {**self._addons_data, **self._current_data}
|
||||
dedicated = self.list_removed
|
||||
|
||||
for addon, values in all_addons.items():
|
||||
i_version = self._addons_data.get(addon, {}).get(ATTR_VERSION)
|
||||
|
||||
data.append({
|
||||
ATTR_NAME: values[ATTR_NAME],
|
||||
ATTR_SLUG: values[ATTR_SLUG],
|
||||
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON],
|
||||
ATTR_VERSION: values[ATTR_VERSION],
|
||||
ATTR_INSTALLED: i_version,
|
||||
ATTR_DEDICATED: addon in dedicated,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
def list_startup(self, start_type):
|
||||
"""Get list of installed addon with need start by type."""
|
||||
addon_list = set()
|
||||
for addon in self._addons_data.keys():
|
||||
if self.get_boot(addon) != BOOT_AUTO:
|
||||
continue
|
||||
|
||||
try:
|
||||
if self._addons_data[addon][ATTR_STARTUP] == start_type:
|
||||
addon_list.add(addon)
|
||||
except KeyError:
|
||||
_LOGGER.warning("Orphaned addon detect %s", addon)
|
||||
continue
|
||||
|
||||
return addon_list
|
||||
|
||||
@property
|
||||
def list_removed(self):
|
||||
"""Return local addons they not support from repo."""
|
||||
addon_list = set()
|
||||
for addon in self._addons_data.keys():
|
||||
if addon not in self._current_data:
|
||||
addon_list.add(addon)
|
||||
|
||||
return addon_list
|
||||
|
||||
def exists_addon(self, addon):
|
||||
"""Return True if a addon exists."""
|
||||
return addon in self._current_data or addon in self._addons_data
|
||||
|
||||
def is_installed(self, addon):
|
||||
"""Return True if a addon is installed."""
|
||||
return addon in self._addons_data
|
||||
|
||||
def version_installed(self, addon):
|
||||
"""Return installed version."""
|
||||
return self._addons_data[addon][ATTR_VERSION]
|
||||
|
||||
def set_addon_install(self, addon, version):
|
||||
"""Set addon as installed."""
|
||||
self._addons_data[addon] = self._current_data[addon]
|
||||
self._user_data[addon] = {
|
||||
ATTR_OPTIONS: {},
|
||||
}
|
||||
self.save()
|
||||
|
||||
def set_addon_uninstall(self, addon):
|
||||
"""Set addon as uninstalled."""
|
||||
self._addons_data.pop(addon, None)
|
||||
self._user_data.pop(addon, None)
|
||||
self.save()
|
||||
|
||||
def set_addon_update(self, addon, version):
|
||||
"""Update version of addon."""
|
||||
self._addons_data[addon] = self._current_data[addon]
|
||||
self.save()
|
||||
|
||||
def set_options(self, addon, options):
|
||||
"""Store user addon options."""
|
||||
self._user_data[addon][ATTR_OPTIONS] = options
|
||||
self.save()
|
||||
|
||||
def set_boot(self, addon, boot):
|
||||
"""Store user boot options."""
|
||||
self._user_data[addon][ATTR_BOOT] = boot
|
||||
self.save()
|
||||
|
||||
def get_options(self, addon):
|
||||
"""Return options with local changes."""
|
||||
return {
|
||||
**self._addons_data[addon][ATTR_OPTIONS],
|
||||
**self._user_data[addon][ATTR_OPTIONS],
|
||||
}
|
||||
|
||||
def get_boot(self, addon):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self._user_data[addon]:
|
||||
return self._user_data[addon][ATTR_BOOT]
|
||||
|
||||
return self._addons_data[addon][ATTR_BOOT]
|
||||
|
||||
def get_name(self, addon):
|
||||
"""Return name of addon."""
|
||||
return self._addons_data[addon][ATTR_NAME]
|
||||
|
||||
def get_description(self, addon):
|
||||
"""Return description of addon."""
|
||||
return self._addons_data[addon][ATTR_DESCRIPTON]
|
||||
|
||||
def get_version(self, addon):
|
||||
"""Return version of addon."""
|
||||
if addon not in self._current_data:
|
||||
return self.version_installed(addon)
|
||||
return self._current_data[addon][ATTR_VERSION]
|
||||
|
||||
def get_ports(self, addon):
|
||||
"""Return ports of addon."""
|
||||
return self._addons_data[addon].get(ATTR_PORTS)
|
||||
|
||||
def get_image(self, addon):
|
||||
"""Return image name of addon."""
|
||||
addon_data = self._addons_data.get(addon, self._current_data[addon])
|
||||
|
||||
if ATTR_IMAGE not in addon_data:
|
||||
return "{}/{}-addon-{}".format(DOCKER_REPO, self.arch, addon)
|
||||
|
||||
return addon_data[ATTR_IMAGE]
|
||||
|
||||
def need_config(self, addon):
|
||||
"""Return True if config map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_CONFIG]
|
||||
|
||||
def need_ssl(self, addon):
|
||||
"""Return True if ssl map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_SSL]
|
||||
|
||||
def path_data(self, addon):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return "{}/{}".format(self.config.path_addons_data, addon)
|
||||
|
||||
def path_data_docker(self, addon):
|
||||
"""Return addon data path external for docker."""
|
||||
return "{}/{}".format(self.config.path_addons_data_docker, addon)
|
||||
|
||||
def path_addon_options(self, addon):
|
||||
"""Return path to addons options."""
|
||||
return "{}/options.json".format(self.path_data(addon))
|
||||
|
||||
def write_addon_options(self, addon):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.get_schema(addon)
|
||||
options = self.get_options(addon)
|
||||
|
||||
def _set_builtin_repositories(self):
|
||||
"""Add local built-in repository into dataset."""
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_addon_options(addon), options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", addon,
|
||||
humanize_error(options, ex))
|
||||
builtin_file = Path(__file__).parent.joinpath('built-in.json')
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read built-in.json -> %s", err)
|
||||
return
|
||||
|
||||
return False
|
||||
# core repository
|
||||
self._repositories[REPOSITORY_CORE] = \
|
||||
builtin_data[REPOSITORY_CORE]
|
||||
|
||||
def get_schema(self, addon):
|
||||
"""Create a schema for addon options."""
|
||||
raw_schema = self._addons_data[addon][ATTR_SCHEMA]
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
|
||||
schema = vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
return schema
|
||||
def _merge_config(self):
|
||||
"""Update local config if they have update.
|
||||
|
||||
It need to be the same version as the local version is for merge.
|
||||
"""
|
||||
have_change = False
|
||||
|
||||
for addon in set(self.system):
|
||||
# detached
|
||||
if addon not in self._cache:
|
||||
continue
|
||||
|
||||
cache = self._cache[addon]
|
||||
data = self.system[addon]
|
||||
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
|
||||
if data != cache:
|
||||
self.system[addon] = copy.deepcopy(cache)
|
||||
have_change = True
|
||||
|
||||
if have_change:
|
||||
self.save()
|
||||
|
@@ -1,38 +1,43 @@
|
||||
"""Init file for HassIO addons git."""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import git
|
||||
|
||||
from .util import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AddonsRepo(object):
|
||||
class GitRepo(object):
|
||||
"""Manage addons git repo."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize docker base wrapper."""
|
||||
def __init__(self, config, loop, path, url):
|
||||
"""Initialize git base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.repo = None
|
||||
self.path = path
|
||||
self.url = url
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
async def load(self):
|
||||
"""Init git addon repo."""
|
||||
if not os.path.isdir(self.config.path_addons_repo):
|
||||
if not self.path.is_dir():
|
||||
return await self.clone()
|
||||
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Load addons repository")
|
||||
_LOGGER.info("Load addon %s repository", self.path)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo, self.config.path_addons_repo)
|
||||
None, git.Repo, str(self.path))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't load addons repo: %s.", err)
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -41,13 +46,13 @@ class AddonsRepo(object):
|
||||
"""Clone git addon repo."""
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Clone addons repository")
|
||||
_LOGGER.info("Clone addon %s repository", self.url)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo.clone_from, URL_HASSIO_ADDONS,
|
||||
self.config.path_addons_repo)
|
||||
None, git.Repo.clone_from, self.url, str(self.path))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't clone addons repo: %s.", err)
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -60,12 +65,43 @@ class AddonsRepo(object):
|
||||
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Pull addons repository")
|
||||
_LOGGER.info("Pull addon %s repository", self.url)
|
||||
await self.loop.run_in_executor(
|
||||
None, self.repo.remotes.origin.pull)
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't pull addons repo: %s.", err)
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.exc.GitCommandError) as err:
|
||||
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""HassIO addons repository."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize git hassio addon repository."""
|
||||
super().__init__(
|
||||
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
|
||||
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom addons repository."""
|
||||
|
||||
def __init__(self, config, loop, url):
|
||||
"""Initialize git hassio addon repository."""
|
||||
path = Path(config.path_addons_git, get_hash_from_repository(url))
|
||||
|
||||
super().__init__(config, loop, path, url)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a custom addon."""
|
||||
if self.path.is_dir():
|
||||
_LOGGER.info("Remove custom addon repository %s", self.url)
|
||||
|
||||
def log_err(funct, path, _):
|
||||
"""Log error."""
|
||||
_LOGGER.warning("Can't remove %s", path)
|
||||
|
||||
shutil.rmtree(str(self.path), onerror=log_err)
|
||||
|
71
hassio/addons/repository.py
Normal file
71
hassio/addons/repository.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Represent a HassIO repository."""
|
||||
from .git import GitRepoHassIO, GitRepoCustom
|
||||
from .util import get_hash_from_repository
|
||||
from ..const import (
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
|
||||
class Repository(object):
|
||||
"""Repository in HassIO."""
|
||||
|
||||
def __init__(self, config, loop, data, repository):
|
||||
"""Initialize repository object."""
|
||||
self.data = data
|
||||
self.source = None
|
||||
self.git = None
|
||||
|
||||
if repository == REPOSITORY_LOCAL:
|
||||
self._id = repository
|
||||
elif repository == REPOSITORY_CORE:
|
||||
self._id = repository
|
||||
self.git = GitRepoHassIO(config, loop)
|
||||
else:
|
||||
self._id = get_hash_from_repository(repository)
|
||||
self.git = GitRepoCustom(config, loop, repository)
|
||||
self.source = repository
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return data struct repository."""
|
||||
return self.data.repositories.get(self._id, {})
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug of repository."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of repository."""
|
||||
return self._mesh.get(ATTR_NAME, UNKNOWN)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of repository."""
|
||||
return self._mesh.get(ATTR_URL, self.source)
|
||||
|
||||
@property
|
||||
def maintainer(self):
|
||||
"""Return url of repository."""
|
||||
return self._mesh.get(ATTR_MAINTAINER, UNKNOWN)
|
||||
|
||||
async def load(self):
|
||||
"""Load addon repository."""
|
||||
if self.git:
|
||||
return await self.git.load()
|
||||
return True
|
||||
|
||||
async def update(self):
|
||||
"""Update addon repository."""
|
||||
if self.git:
|
||||
return await self.git.pull()
|
||||
return True
|
||||
|
||||
def remove(self):
|
||||
"""Remove addon repository."""
|
||||
if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL):
|
||||
raise RuntimeError("Can't remove built-in repositories!")
|
||||
|
||||
self.git.remove()
|
35
hassio/addons/util.py
Normal file
35
hassio/addons/util.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_hash_from_repository(name):
|
||||
"""Generate a hash from repository."""
|
||||
key = name.lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def extract_hash_from_path(path):
|
||||
"""Extract repo id from path."""
|
||||
repo_dir = path.parts[-1]
|
||||
|
||||
if not RE_SHA1.match(repo_dir):
|
||||
return get_hash_from_repository(repo_dir)
|
||||
return repo_dir
|
||||
|
||||
|
||||
def check_installed(method):
|
||||
"""Wrap function with check if addon is installed."""
|
||||
async def wrap_check(addon, *args, **kwargs):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
||||
return False
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
@@ -3,9 +3,17 @@ import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||
ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||
ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER, STARTUP_BEFORE, BOOT_AUTO,
|
||||
BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE)
|
||||
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER,
|
||||
STARTUP_BEFORE, STARTUP_INITIALIZE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA,
|
||||
ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER, ATTR_ARCH, ATTR_DEVICES,
|
||||
ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64,
|
||||
ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_USER, ATTR_STATE, ATTR_SYSTEM,
|
||||
STATE_STARTED, STATE_STOPPED, ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT,
|
||||
ATTR_NETWORK, ATTR_AUTO_UPDATE)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS
|
||||
|
||||
|
||||
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
@@ -13,8 +21,18 @@ V_FLOAT = 'float'
|
||||
V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
V_PORT = 'port'
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL, V_PORT])
|
||||
|
||||
ARCH_ALL = [
|
||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
||||
]
|
||||
|
||||
PRIVILEGE_ALL = [
|
||||
"NET_ADMIN"
|
||||
]
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL])
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
@@ -22,20 +40,73 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE]),
|
||||
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE,
|
||||
STARTUP_INITIALIZE]),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): dict,
|
||||
vol.Optional(ATTR_MAP_CONFIG, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_TMPFS):
|
||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGE_ALL)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): {
|
||||
vol.Required(ATTR_SCHEMA): vol.Any({
|
||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
||||
])
|
||||
},
|
||||
}, False),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default={}): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||
},
|
||||
vol.Optional(ATTR_SYSTEM, default={}): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
@@ -54,10 +125,10 @@ def validate_options(raw_schema):
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value
|
||||
options[key] = _nested_validate(typ[0], value)
|
||||
options[key] = _nested_validate(typ[0], value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value)
|
||||
options[key] = _single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
@@ -68,9 +139,13 @@ def validate_options(raw_schema):
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(typ, value):
|
||||
def _single_validate(typ, value, key):
|
||||
"""Validate a single element."""
|
||||
try:
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||
|
||||
if typ == V_STR:
|
||||
return str(value)
|
||||
elif typ == V_INT:
|
||||
@@ -83,14 +158,16 @@ def _single_validate(typ, value):
|
||||
return vol.Email()(value)
|
||||
elif typ == V_URL:
|
||||
return vol.Url()(value)
|
||||
elif typ == V_PORT:
|
||||
return NETWORK_PORT(value)
|
||||
|
||||
raise vol.Invalid("Fatal error for {}.".format(value))
|
||||
except TypeError:
|
||||
raise vol.Invalid("Fatal error for {} type {}.".format(key, typ))
|
||||
except ValueError:
|
||||
raise vol.Invalid(
|
||||
"Type {} error for {}.".format(typ, value)) from None
|
||||
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
|
||||
|
||||
|
||||
def _nested_validate(typ, data_list):
|
||||
def _nested_validate(typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
@@ -103,10 +180,10 @@ def _nested_validate(typ, data_list):
|
||||
raise vol.Invalid(
|
||||
"Unknown nested options {}.".format(c_key))
|
||||
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value)
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
options.append(c_options)
|
||||
# normal list
|
||||
else:
|
||||
options.append(_single_validate(typ, element))
|
||||
options.append(_single_validate(typ, element, key))
|
||||
|
||||
return options
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Init file for HassIO rest api."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
@@ -8,6 +9,8 @@ from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .supervisor import APISupervisor
|
||||
from .security import APISecurity
|
||||
from .snapshots import APISnapshots
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,32 +28,38 @@ class RestAPI(object):
|
||||
self._handler = None
|
||||
self.server = None
|
||||
|
||||
def register_host(self, host_controll):
|
||||
"""Register hostcontroll function."""
|
||||
api_host = APIHost(self.config, self.loop, host_controll)
|
||||
def register_host(self, host_control):
|
||||
"""Register hostcontrol function."""
|
||||
api_host = APIHost(self.config, self.loop, host_control)
|
||||
|
||||
self.webapp.router.add_get('/host/info', api_host.info)
|
||||
self.webapp.router.add_get('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_get('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_get('/host/update', api_host.update)
|
||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_post('/host/update', api_host.update)
|
||||
|
||||
def register_network(self, host_controll):
|
||||
def register_network(self, host_control):
|
||||
"""Register network function."""
|
||||
api_net = APINetwork(self.config, self.loop, host_controll)
|
||||
api_net = APINetwork(self.config, self.loop, host_control)
|
||||
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_get('/network/options', api_net.options)
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
|
||||
def register_supervisor(self, supervisor, addons):
|
||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||
websession):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, supervisor, addons)
|
||||
self.config, self.loop, supervisor, snapshots, addons,
|
||||
host_control, websession)
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get('/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_get('/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_get(
|
||||
'/supervisor/addons', api_supervisor.available_addons)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||
|
||||
@@ -59,7 +68,9 @@ class RestAPI(object):
|
||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
|
||||
def register_addons(self, addons):
|
||||
@@ -67,17 +78,58 @@ class RestAPI(object):
|
||||
api_addons = APIAddons(self.config, self.loop, addons)
|
||||
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_get(
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
self.webapp.router.add_get(
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/uninstall', api_addons.uninstall)
|
||||
self.webapp.router.add_get('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_get('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_get('/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_get(
|
||||
self.webapp.router.add_post('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/restart', api_addons.restart)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||
|
||||
def register_security(self):
|
||||
"""Register security function."""
|
||||
api_security = APISecurity(self.config, self.loop)
|
||||
|
||||
self.webapp.router.add_get('/security/info', api_security.info)
|
||||
self.webapp.router.add_post('/security/options', api_security.options)
|
||||
self.webapp.router.add_post('/security/totp', api_security.totp)
|
||||
self.webapp.router.add_post('/security/session', api_security.session)
|
||||
|
||||
def register_snapshots(self, snapshots):
|
||||
"""Register snapshots function."""
|
||||
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
|
||||
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/remove', api_snapshots.remove)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial)
|
||||
|
||||
def register_panel(self):
|
||||
"""Register panel for homeassistant."""
|
||||
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
|
||||
|
||||
def get_panel(request):
|
||||
"""Return file response with panel."""
|
||||
return web.FileResponse(panel)
|
||||
|
||||
self.webapp.router.add_get('/panel', get_panel)
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
self._handler = self.webapp.make_handler(loop=self.loop)
|
||||
|
@@ -7,8 +7,11 @@ from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_CURRENT, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
STATE_STOPPED, STATE_STARTED, BOOT_AUTO, BOOT_MANUAL)
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK,
|
||||
BOOT_AUTO, BOOT_MANUAL)
|
||||
from ..validate import DOCKER_PORTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -16,8 +19,11 @@ SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL])
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
})
|
||||
|
||||
|
||||
@@ -32,12 +38,11 @@ class APIAddons(object):
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
addon = request.match_info.get('addon')
|
||||
|
||||
# check data
|
||||
if not self.addons.exists_addon(addon):
|
||||
addon = self.addons.get(request.match_info.get('addon'))
|
||||
if not addon:
|
||||
raise RuntimeError("Addon not exists")
|
||||
if check_installed and not self.addons.is_installed(addon):
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
raise RuntimeError("Addon is not installed")
|
||||
|
||||
return addon
|
||||
@@ -45,33 +50,44 @@ class APIAddons(object):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return addon information."""
|
||||
addon = self._extract_addon(request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
info = {
|
||||
ATTR_VERSION: self.addons.version_installed(addon),
|
||||
ATTR_CURRENT: self.addons.get_version(addon),
|
||||
ATTR_STATE: await self.addons.state(addon),
|
||||
ATTR_BOOT: self.addons.get_boot(addon),
|
||||
ATTR_OPTIONS: self.addons.get_options(addon),
|
||||
return {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LAST_VERSION: addon.last_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_HOST_NETWORK: addon.network_mode == 'host',
|
||||
}
|
||||
return info
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Store user options for addon."""
|
||||
addon = self._extract_addon(request)
|
||||
options_schema = self.addons.get_schema(addon)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): options_schema,
|
||||
vol.Optional(ATTR_OPTIONS): addon.schema,
|
||||
})
|
||||
|
||||
addon_config = await api_validate(addon_schema, request)
|
||||
body = await api_validate(addon_schema, request)
|
||||
|
||||
if ATTR_OPTIONS in addon_config:
|
||||
self.addons.set_options(addon, addon_config[ATTR_OPTIONS])
|
||||
if ATTR_BOOT in addon_config:
|
||||
self.addons.set_options(addon, addon_config[ATTR_BOOT])
|
||||
if ATTR_OPTIONS in body:
|
||||
addon.options = body[ATTR_OPTIONS]
|
||||
if ATTR_BOOT in body:
|
||||
addon.boot = body[ATTR_BOOT]
|
||||
if ATTR_AUTO_UPDATE in body:
|
||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||
if ATTR_NETWORK in body:
|
||||
addon.ports = body[ATTR_NETWORK]
|
||||
|
||||
return True
|
||||
|
||||
@@ -80,66 +96,55 @@ class APIAddons(object):
|
||||
"""Install addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
version = body.get(
|
||||
ATTR_VERSION, self.addons.get_version(addon))
|
||||
version = body.get(ATTR_VERSION)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.install(addon, version), loop=self.loop)
|
||||
addon.install(version=version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.uninstall(addon), loop=self.loop)
|
||||
return await asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def start(self, request):
|
||||
"""Start addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if await self.addons.state(addon) == STATE_STARTED:
|
||||
raise RuntimeError("Addon is already running")
|
||||
|
||||
# validate options
|
||||
# check options
|
||||
options = addon.options
|
||||
try:
|
||||
schema = self.addons.get_schema(addon)
|
||||
options = self.addons.get_options(addon)
|
||||
schema(options)
|
||||
addon.schema(options)
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.start(addon), loop=self.loop)
|
||||
return await asyncio.shield(addon.start(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def stop(self, request):
|
||||
"""Stop addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if await self.addons.state(addon) == STATE_STOPPED:
|
||||
raise RuntimeError("Addon is already stoped")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.stop(addon), loop=self.loop)
|
||||
return await asyncio.shield(addon.stop(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request)
|
||||
version = body.get(
|
||||
ATTR_VERSION, self.addons.get_version(addon))
|
||||
|
||||
if version == self.addons.version_installed(addon):
|
||||
raise RuntimeError("Version is already in use")
|
||||
version = body.get(ATTR_VERSION)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.update(addon, version), loop=self.loop)
|
||||
addon.update(version=version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restart(self, request):
|
||||
"""Restart addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return await asyncio.shield(addon.restart(), loop=self.loop)
|
||||
|
||||
@api_process_raw
|
||||
def logs(self, request):
|
||||
"""Return logs from addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return self.addons.logs(addon)
|
||||
return addon.logs()
|
||||
|
@@ -5,10 +5,16 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import ATTR_VERSION, ATTR_CURRENT
|
||||
from ..const import ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES
|
||||
from ..validate import HASS_DEVICES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
@@ -26,18 +32,27 @@ class APIHomeAssistant(object):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
info = {
|
||||
return {
|
||||
ATTR_VERSION: self.homeassistant.version,
|
||||
ATTR_CURRENT: self.config.current_homeassistant,
|
||||
ATTR_LAST_VERSION: self.config.last_homeassistant,
|
||||
ATTR_DEVICES: self.config.homeassistant_devices,
|
||||
}
|
||||
|
||||
return info
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set homeassistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_DEVICES in body:
|
||||
self.config.homeassistant_devices = body[ATTR_DEVICES]
|
||||
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
"""Update homeassistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.current_homeassistant)
|
||||
version = body.get(ATTR_VERSION, self.config.last_homeassistant)
|
||||
|
||||
if self.homeassistant.in_progress:
|
||||
raise RuntimeError("Other task is in progress")
|
||||
@@ -48,6 +63,15 @@ class APIHomeAssistant(object):
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.update(version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restart(self, request):
|
||||
"""Restart homeassistant."""
|
||||
if self.homeassistant.in_progress:
|
||||
raise RuntimeError("Other task is in progress")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.restart(), loop=self.loop)
|
||||
|
||||
@api_process_raw
|
||||
def logs(self, request):
|
||||
"""Return homeassistant docker logs.
|
||||
|
@@ -1,15 +1,16 @@
|
||||
"""Init file for HassIO host rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process_hostcontroll, api_process, api_validate
|
||||
from ..const import ATTR_VERSION
|
||||
from .util import api_process_hostcontrol, api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||
ATTR_OS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
@@ -18,44 +19,42 @@ SCHEMA_VERSION = vol.Schema({
|
||||
class APIHost(object):
|
||||
"""Handle rest api for host functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll):
|
||||
def __init__(self, config, loop, host_control):
|
||||
"""Initialize host rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
self.host_control = host_control
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
if not self.host_controll.active:
|
||||
info = {
|
||||
'os': UNKNOWN,
|
||||
'version': UNKNOWN,
|
||||
'current': UNKNOWN,
|
||||
'level': 0,
|
||||
'hostname': UNKNOWN,
|
||||
}
|
||||
return info
|
||||
return {
|
||||
ATTR_TYPE: self.host_control.type,
|
||||
ATTR_VERSION: self.host_control.version,
|
||||
ATTR_LAST_VERSION: self.host_control.last_version,
|
||||
ATTR_FEATURES: self.host_control.features,
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
ATTR_OS: self.host_control.os_info,
|
||||
}
|
||||
|
||||
return await self.host_controll.info()
|
||||
|
||||
@api_process_hostcontroll
|
||||
@api_process_hostcontrol
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return self.host_controll.reboot()
|
||||
return self.host_control.reboot()
|
||||
|
||||
@api_process_hostcontroll
|
||||
@api_process_hostcontrol
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return self.host_controll.shutdown()
|
||||
return self.host_control.shutdown()
|
||||
|
||||
@api_process_hostcontroll
|
||||
@api_process_hostcontrol
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION)
|
||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
||||
|
||||
if version == self.host_controll.version:
|
||||
if version == self.host_control.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
|
||||
return await self.host_controll.host_update(version=version)
|
||||
return await asyncio.shield(
|
||||
self.host_control.update(version=version), loop=self.loop)
|
||||
|
@@ -1,26 +1,43 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
import logging
|
||||
|
||||
from .util import api_process_hostcontroll
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_hostcontrol, api_validate
|
||||
from ..const import ATTR_HOSTNAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APINetwork(object):
|
||||
"""Handle rest api for network functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll):
|
||||
def __init__(self, config, loop, host_control):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
self.host_control = host_control
|
||||
|
||||
@api_process_hostcontroll
|
||||
def info(self, request):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show network settings."""
|
||||
pass
|
||||
return {
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
}
|
||||
|
||||
@api_process_hostcontroll
|
||||
def options(self, request):
|
||||
@api_process_hostcontrol
|
||||
async def options(self, request):
|
||||
"""Edit network settings."""
|
||||
pass
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
if self.host_control.hostname != body[ATTR_HOSTNAME]:
|
||||
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||
|
||||
return True
|
||||
|
102
hassio/api/security.py
Normal file
102
hassio/api/security.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Init file for HassIO security rest api."""
|
||||
from datetime import datetime, timedelta
|
||||
import io
|
||||
import logging
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
import pyotp
|
||||
import pyqrcode
|
||||
|
||||
from .util import api_process, api_validate, hash_password
|
||||
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_PASSWORD = vol.Schema({
|
||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
|
||||
vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APISecurity(object):
|
||||
"""Handle rest api for security functions."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize security rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
|
||||
def _check_password(self, body):
|
||||
"""Check if password is valid and security is initialize."""
|
||||
if not self.config.security_initialize:
|
||||
raise RuntimeError("First set a password")
|
||||
|
||||
password = hash_password(body[ATTR_PASSWORD])
|
||||
if password != self.config.security_password:
|
||||
raise RuntimeError("Wrong password")
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_INITIALIZE: self.config.security_initialize,
|
||||
ATTR_TOTP: self.config.security_totp is not None,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set options / password."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
|
||||
if self.config.security_initialize:
|
||||
raise RuntimeError("Password is already set!")
|
||||
|
||||
self.config.security_password = hash_password(body[ATTR_PASSWORD])
|
||||
self.config.security_initialize = True
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def totp(self, request):
|
||||
"""Set and initialze TOTP."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
self._check_password(body)
|
||||
|
||||
# generate TOTP
|
||||
totp_init_key = pyotp.random_base32()
|
||||
totp = pyotp.TOTP(totp_init_key)
|
||||
|
||||
# init qrcode
|
||||
buff = io.BytesIO()
|
||||
|
||||
qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO"))
|
||||
qrcode.svg(buff)
|
||||
|
||||
# finish
|
||||
self.config.security_totp = totp_init_key
|
||||
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
|
||||
|
||||
@api_process
|
||||
async def session(self, request):
|
||||
"""Set and initialze session."""
|
||||
body = await api_validate(SCHEMA_SESSION, request)
|
||||
self._check_password(body)
|
||||
|
||||
# check TOTP
|
||||
if self.config.security_totp:
|
||||
totp = pyotp.TOTP(self.config.security_totp)
|
||||
if body[ATTR_TOTP] != totp.now():
|
||||
raise RuntimeError("Invalid TOTP token!")
|
||||
|
||||
# create session
|
||||
valid_until = datetime.now() + timedelta(days=1)
|
||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
||||
|
||||
# store session
|
||||
self.config.security_sessions = (session, valid_until)
|
||||
return {ATTR_SESSION: session}
|
117
hassio/api/snapshots.py
Normal file
117
hassio/api/snapshots.py
Normal file
@@ -0,0 +1,117 @@
|
||||
"""Init file for HassIO snapshot rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||
ATTR_DEVICES)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
})
|
||||
|
||||
|
||||
class APISnapshots(object):
|
||||
"""Handle rest api for snapshot functions."""
|
||||
|
||||
def __init__(self, config, loop, snapshots):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.snapshots = snapshots
|
||||
|
||||
def _extract_snapshot(self, request):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
|
||||
if not snapshot:
|
||||
raise RuntimeError("Snapshot not exists")
|
||||
return snapshot
|
||||
|
||||
@staticmethod
|
||||
def _addons_list(snapshot):
|
||||
"""Generate a list with addons data."""
|
||||
data = []
|
||||
for addon_data in snapshot.addons:
|
||||
data.append({
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
})
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return snapshot info."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
|
||||
return {
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_TYPE: snapshot.sys_type,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_SIZE: snapshot.size,
|
||||
ATTR_HOMEASSISTANT: {
|
||||
ATTR_VERSION: snapshot.homeassistant_version,
|
||||
ATTR_DEVICES: snapshot.homeassistant_devices,
|
||||
},
|
||||
ATTR_ADDONS: self._addons_list(snapshot),
|
||||
ATTR_REPOSITORIES: snapshot.repositories,
|
||||
ATTR_FOLDERS: snapshot.folders,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def snapshot_full(self, request):
|
||||
"""Full-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_full(**body), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def snapshot_partial(self, request):
|
||||
"""Partial-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restore_full(self, request):
|
||||
"""Full-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return self.snapshots.remove(snapshot)
|
@@ -6,13 +6,20 @@ import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_CURRENT, ATTR_BETA, HASSIO_VERSION)
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES,
|
||||
ATTR_REPOSITORY, ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED,
|
||||
ATTR_DETACHED, ATTR_SOURCE, ATTR_MAINTAINER, ATTR_URL, ATTR_ARCH,
|
||||
ATTR_BUILD, ATTR_TIMEZONE, ATTR_DATE, ATTR_SNAPSHOTS)
|
||||
from ..tools import validate_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA): vol.Boolean(),
|
||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -23,12 +30,64 @@ SCHEMA_VERSION = vol.Schema({
|
||||
class APISupervisor(object):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, supervisor, addons):
|
||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||
host_control, websession):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.supervisor = supervisor
|
||||
self.addons = addons
|
||||
self.snapshots = snapshots
|
||||
self.host_control = host_control
|
||||
self.websession = websession
|
||||
|
||||
def _addons_list(self, only_installed=False):
|
||||
"""Return a list of addons."""
|
||||
data = []
|
||||
for addon in self.addons.list_addons:
|
||||
if only_installed and not addon.is_installed:
|
||||
continue
|
||||
|
||||
data.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
def _repositories_list(self):
|
||||
"""Return a list of addons repositories."""
|
||||
data = []
|
||||
for repository in self.addons.list_repositories:
|
||||
data.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
def _snapshots_list(self):
|
||||
"""Return a list of available snapshots."""
|
||||
data = []
|
||||
for snapshot in self.snapshots.list_snapshots:
|
||||
data.append({
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
@@ -38,29 +97,47 @@ class APISupervisor(object):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
info = {
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_CURRENT: self.config.current_hassio,
|
||||
ATTR_BETA: self.config.upstream_beta,
|
||||
ATTR_ADDONS: self.addons.list,
|
||||
ATTR_LAST_VERSION: self.config.last_hassio,
|
||||
ATTR_BETA_CHANNEL: self.config.upstream_beta,
|
||||
ATTR_ARCH: self.config.arch,
|
||||
ATTR_TIMEZONE: self.config.timezone,
|
||||
ATTR_ADDONS: self._addons_list(only_installed=True),
|
||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
||||
ATTR_SNAPSHOTS: self._snapshots_list(),
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def available_addons(self, request):
|
||||
"""Return information for all available addons."""
|
||||
return {
|
||||
ATTR_ADDONS: self._addons_list(),
|
||||
ATTR_REPOSITORIES: self._repositories_list(),
|
||||
}
|
||||
return info
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA in body:
|
||||
self.config.upstream_beta = body[ATTR_BETA]
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self.config.upstream_beta = body[ATTR_BETA_CHANNEL]
|
||||
|
||||
return self.config.save()
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.addons.load_repositories(new))
|
||||
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.current_hassio)
|
||||
version = body.get(ATTR_VERSION, self.config.last_hassio)
|
||||
|
||||
if version == self.supervisor.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
@@ -71,7 +148,12 @@ class APISupervisor(object):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload addons, config ect."""
|
||||
tasks = [self.addons.reload(), self.config.fetch_update_infos()]
|
||||
tasks = [
|
||||
self.addons.reload(),
|
||||
self.snapshots.reload(),
|
||||
self.config.fetch_update_infos(self.websession),
|
||||
self.host_control.load()
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
||||
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Init file for HassIO util for rest api."""
|
||||
import json
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
@@ -32,6 +33,8 @@ def api_process(method):
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
if isinstance(answer, web.Response):
|
||||
return answer
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
@@ -39,11 +42,11 @@ def api_process(method):
|
||||
return wrap_api
|
||||
|
||||
|
||||
def api_process_hostcontroll(method):
|
||||
"""Wrap HostControll calls to rest api."""
|
||||
async def wrap_hostcontroll(api, *args, **kwargs):
|
||||
def api_process_hostcontrol(method):
|
||||
"""Wrap HostControl calls to rest api."""
|
||||
async def wrap_hostcontrol(api, *args, **kwargs):
|
||||
"""Return host information."""
|
||||
if not api.host_controll.active:
|
||||
if not api.host_control.active:
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
try:
|
||||
@@ -59,7 +62,7 @@ def api_process_hostcontroll(method):
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
|
||||
return wrap_hostcontroll
|
||||
return wrap_hostcontrol
|
||||
|
||||
|
||||
def api_process_raw(method):
|
||||
@@ -78,10 +81,13 @@ def api_process_raw(method):
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return a API error message."""
|
||||
if message:
|
||||
_LOGGER.error(message)
|
||||
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
})
|
||||
}, status=400)
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
@@ -101,3 +107,9 @@ async def api_validate(schema, request):
|
||||
raise RuntimeError(humanize_error(data, ex)) from None
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def hash_password(password):
|
||||
"""Hash and salt our passwords."""
|
||||
key = ")*()*SALT_HASSIO2123{}6554547485HSKA!!*JSLAfdasda$".format(password)
|
||||
return hashlib.sha256(key.encode()).hexdigest()
|
||||
|
@@ -1,8 +1,8 @@
|
||||
"""Bootstrap HassIO."""
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import signal
|
||||
from pathlib import Path
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
@@ -12,35 +12,67 @@ from .config import CoreConfig
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_system_data(websession):
|
||||
def initialize_system_data():
|
||||
"""Setup default config and create folders."""
|
||||
config = CoreConfig(websession)
|
||||
config = CoreConfig()
|
||||
|
||||
# homeassistant config folder
|
||||
if not os.path.isdir(config.path_config):
|
||||
if not config.path_config.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home-Assistant config folder %s", config.path_config)
|
||||
os.mkdir(config.path_config)
|
||||
config.path_config.mkdir()
|
||||
|
||||
# homeassistant ssl folder
|
||||
if not os.path.isdir(config.path_ssl):
|
||||
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
||||
os.mkdir(config.path_ssl)
|
||||
# hassio ssl folder
|
||||
if not config.path_ssl.is_dir():
|
||||
_LOGGER.info("Create hassio ssl folder %s", config.path_ssl)
|
||||
config.path_ssl.mkdir()
|
||||
|
||||
# homeassistant addon data folder
|
||||
if not os.path.isdir(config.path_addons_data):
|
||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||
config.path_addons_data)
|
||||
os.mkdir(config.path_addons_data)
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create hassio addon data folder %s", config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not os.path.isdir(config.path_addons_custom):
|
||||
_LOGGER.info("Create Home-Assistant addon custom folder %s",
|
||||
config.path_addons_custom)
|
||||
os.mkdir(config.path_addons_custom)
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info("Create hassio addon local repository folder %s",
|
||||
config.path_addons_local)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info("Create hassio addon git repositories folder %s",
|
||||
config.path_addons_git)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
if not config.path_tmp.is_dir():
|
||||
_LOGGER.info("Create hassio temp folder %s", config.path_tmp)
|
||||
config.path_tmp.mkdir(parents=True)
|
||||
|
||||
# hassio backup folder
|
||||
if not config.path_backup.is_dir():
|
||||
_LOGGER.info("Create hassio backup folder %s", config.path_backup)
|
||||
config.path_backup.mkdir()
|
||||
|
||||
# share folder
|
||||
if not config.path_share.is_dir():
|
||||
_LOGGER.info("Create hassio share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def migrate_system_env(config):
|
||||
"""Cleanup some stuff after update."""
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
if old_build.is_dir():
|
||||
try:
|
||||
old_build.rmdir()
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't cleanup old addons build dir.")
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
@@ -76,8 +108,7 @@ def check_environment():
|
||||
_LOGGER.fatal("Can't find %s in env!", key)
|
||||
return False
|
||||
|
||||
mode = os.stat(SOCKET_DOCKER)[stat.ST_MODE]
|
||||
if not stat.S_ISSOCK(mode):
|
||||
if not SOCKET_DOCKER.is_socket():
|
||||
_LOGGER.fatal("Can't find docker socket!")
|
||||
return False
|
||||
|
||||
|
317
hassio/config.py
317
hassio/config.py
@@ -1,79 +1,117 @@
|
||||
"""Bootstrap HassIO."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .const import FILE_HASSIO_CONFIG, HASSIO_DATA
|
||||
from .tools import (
|
||||
fetch_current_versions, write_json_file, read_json_file)
|
||||
fetch_last_versions, write_json_file, read_json_file, validate_timezone)
|
||||
from .validate import HASS_DEVICES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HOMEASSISTANT_CONFIG = "{}/homeassistant"
|
||||
HOMEASSISTANT_IMAGE = 'homeassistant_image'
|
||||
HOMEASSISTANT_CURRENT = 'homeassistant_current'
|
||||
DATETIME_FORMAT = "%Y%m%d %H:%M:%S"
|
||||
|
||||
HASSIO_SSL = "{}/ssl"
|
||||
HASSIO_CURRENT = 'hassio_current'
|
||||
HASSIO_CLEANUP = 'hassio_cleanup'
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
HOMEASSISTANT_LAST = 'homeassistant_last'
|
||||
HOMEASSISTANT_DEVICES = 'homeassistant_devices'
|
||||
|
||||
ADDONS_REPO = "{}/addons"
|
||||
ADDONS_DATA = "{}/addons_data"
|
||||
ADDONS_CUSTOM = "{}/addons_custom"
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
HASSIO_LAST = 'hassio_last'
|
||||
|
||||
ADDONS_CORE = PurePath("addons/core")
|
||||
ADDONS_LOCAL = PurePath("addons/local")
|
||||
ADDONS_GIT = PurePath("addons/git")
|
||||
ADDONS_DATA = PurePath("addons/data")
|
||||
ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
|
||||
UPSTREAM_BETA = 'upstream_beta'
|
||||
|
||||
API_ENDPOINT = 'api_endpoint'
|
||||
TIMEZONE = 'timezone'
|
||||
|
||||
SECURITY_INITIALIZE = 'security_initialize'
|
||||
SECURITY_TOTP = 'security_totp'
|
||||
SECURITY_PASSWORD = 'security_password'
|
||||
SECURITY_SESSIONS = 'security_sessions'
|
||||
|
||||
|
||||
class Config(object):
|
||||
"""Hold all config data."""
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_CONFIG = vol.Schema({
|
||||
vol.Optional(UPSTREAM_BETA, default=False): vol.Boolean(),
|
||||
vol.Optional(API_ENDPOINT): vol.Coerce(str),
|
||||
vol.Optional(TIMEZONE, default='UTC'): validate_timezone,
|
||||
vol.Optional(HOMEASSISTANT_LAST): vol.Coerce(str),
|
||||
vol.Optional(HOMEASSISTANT_DEVICES, default=[]): HASS_DEVICES,
|
||||
vol.Optional(HASSIO_LAST): vol.Coerce(str),
|
||||
vol.Optional(ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
||||
vol.Optional(SECURITY_INITIALIZE, default=False): vol.Boolean(),
|
||||
vol.Optional(SECURITY_TOTP): vol.Coerce(str),
|
||||
vol.Optional(SECURITY_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(SECURITY_SESSIONS, default={}):
|
||||
{vol.Coerce(str): vol.Coerce(str)},
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
def __init__(self, config_file):
|
||||
|
||||
class CoreConfig(object):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
self._filename = config_file
|
||||
self.arch = None
|
||||
self._file = FILE_HASSIO_CONFIG
|
||||
self._data = {}
|
||||
|
||||
# init or load data
|
||||
if os.path.isfile(self._filename):
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._filename)
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't read %s", self._filename)
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
self._data = {}
|
||||
|
||||
# validate data
|
||||
if not self._validate_config():
|
||||
self._data = SCHEMA_CONFIG({})
|
||||
|
||||
def _validate_config(self):
|
||||
"""Validate config and return True or False."""
|
||||
# validate data
|
||||
try:
|
||||
self._data = SCHEMA_CONFIG(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning(
|
||||
"Invalid config %s", humanize_error(self._data, ex))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
if not write_json_file(self._filename, self._data):
|
||||
_LOGGER.exception("Can't store config in %s", self._filename)
|
||||
if not self._validate_config():
|
||||
return False
|
||||
|
||||
if not write_json_file(self._file, self._data):
|
||||
_LOGGER.error("Can't store config in %s", self._file)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class CoreConfig(Config):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self, websession):
|
||||
"""Initialize config object."""
|
||||
self.websession = websession
|
||||
|
||||
super().__init__(FILE_HASSIO_CONFIG)
|
||||
|
||||
# init data
|
||||
if not self._data:
|
||||
self._data.update({
|
||||
HOMEASSISTANT_IMAGE: os.environ['HOMEASSISTANT_REPOSITORY'],
|
||||
UPSTREAM_BETA: False,
|
||||
})
|
||||
self.save()
|
||||
|
||||
async def fetch_update_infos(self):
|
||||
async def fetch_update_infos(self, websession):
|
||||
"""Read current versions from web."""
|
||||
current = await fetch_current_versions(
|
||||
self.websession, beta=self.upstream_beta)
|
||||
last = await fetch_last_versions(websession, beta=self.upstream_beta)
|
||||
|
||||
if current:
|
||||
if last:
|
||||
self._data.update({
|
||||
HOMEASSISTANT_CURRENT: current.get('homeassistant_tag'),
|
||||
HASSIO_CURRENT: current.get('hassio_tag'),
|
||||
HOMEASSISTANT_LAST: last.get('homeassistant'),
|
||||
HASSIO_LAST: last.get('hassio'),
|
||||
})
|
||||
self.save()
|
||||
return True
|
||||
@@ -93,83 +131,206 @@ class CoreConfig(Config):
|
||||
@property
|
||||
def upstream_beta(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
return self._data.get(UPSTREAM_BETA, False)
|
||||
return self._data[UPSTREAM_BETA]
|
||||
|
||||
@upstream_beta.setter
|
||||
def upstream_beta(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[UPSTREAM_BETA] = bool(value)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def hassio_cleanup(self):
|
||||
"""Return Version they need to cleanup."""
|
||||
return self._data.get(HASSIO_CLEANUP)
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
return self._data[TIMEZONE]
|
||||
|
||||
@hassio_cleanup.setter
|
||||
def hassio_cleanup(self, version):
|
||||
"""Set or remove cleanup flag."""
|
||||
if version is None:
|
||||
self._data.pop(HASSIO_CLEANUP, None)
|
||||
else:
|
||||
self._data[HASSIO_CLEANUP] = version
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[TIMEZONE] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def homeassistant_devices(self):
|
||||
"""Return list of special device to map into homeassistant."""
|
||||
return self._data[HOMEASSISTANT_DEVICES]
|
||||
|
||||
@homeassistant_devices.setter
|
||||
def homeassistant_devices(self, value):
|
||||
"""Set list of special device."""
|
||||
self._data[HOMEASSISTANT_DEVICES] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return docker homeassistant repository."""
|
||||
return self._data.get(HOMEASSISTANT_IMAGE)
|
||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
||||
|
||||
@property
|
||||
def current_homeassistant(self):
|
||||
def last_homeassistant(self):
|
||||
"""Actual version of homeassistant."""
|
||||
return self._data.get(HOMEASSISTANT_CURRENT)
|
||||
return self._data.get(HOMEASSISTANT_LAST)
|
||||
|
||||
@property
|
||||
def current_hassio(self):
|
||||
def last_hassio(self):
|
||||
"""Actual version of hassio."""
|
||||
return self._data.get(HASSIO_CURRENT)
|
||||
return self._data.get(HASSIO_LAST)
|
||||
|
||||
@property
|
||||
def path_hassio_docker(self):
|
||||
def path_hassio(self):
|
||||
"""Return hassio data path."""
|
||||
return HASSIO_DATA
|
||||
|
||||
@property
|
||||
def path_extern_hassio(self):
|
||||
"""Return hassio data path extern for docker."""
|
||||
return os.environ['SUPERVISOR_SHARE']
|
||||
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
||||
|
||||
@property
|
||||
def path_config_docker(self):
|
||||
def path_extern_config(self):
|
||||
"""Return config path extern for docker."""
|
||||
return HOMEASSISTANT_CONFIG.format(self.path_hassio_docker)
|
||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
||||
|
||||
@property
|
||||
def path_config(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return HOMEASSISTANT_CONFIG.format(HASSIO_SHARE)
|
||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
||||
|
||||
@property
|
||||
def path_ssl_docker(self):
|
||||
def path_extern_ssl(self):
|
||||
"""Return SSL path extern for docker."""
|
||||
return HASSIO_SSL.format(self.path_hassio_docker)
|
||||
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
||||
|
||||
@property
|
||||
def path_ssl(self):
|
||||
"""Return SSL path inside supervisor."""
|
||||
return HASSIO_SSL.format(HASSIO_SHARE)
|
||||
return Path(HASSIO_DATA, HASSIO_SSL)
|
||||
|
||||
@property
|
||||
def path_addons_repo(self):
|
||||
"""Return git repo path for addons."""
|
||||
return ADDONS_REPO.format(HASSIO_SHARE)
|
||||
def path_addons_core(self):
|
||||
"""Return git path for core addons."""
|
||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
||||
|
||||
@property
|
||||
def path_addons_custom(self):
|
||||
def path_addons_git(self):
|
||||
"""Return path for git addons."""
|
||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
||||
|
||||
@property
|
||||
def path_addons_local(self):
|
||||
"""Return path for customs addons."""
|
||||
return ADDONS_CUSTOM.format(HASSIO_SHARE)
|
||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_extern_addons_local(self):
|
||||
"""Return path for customs addons."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_addons_data(self):
|
||||
"""Return root addon data folder."""
|
||||
return ADDONS_DATA.format(HASSIO_SHARE)
|
||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_addons_data_docker(self):
|
||||
def path_extern_addons_data(self):
|
||||
"""Return root addon data folder extern for docker."""
|
||||
return ADDONS_DATA.format(self.path_hassio_docker)
|
||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_tmp(self):
|
||||
"""Return hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
return Path(HASSIO_DATA, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_backup(self):
|
||||
"""Return root backup data folder extern for docker."""
|
||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_share(self):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder extern for docker."""
|
||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def addons_repositories(self):
|
||||
"""Return list of addons custom repositories."""
|
||||
return self._data[ADDONS_CUSTOM_LIST]
|
||||
|
||||
@addons_repositories.setter
|
||||
def addons_repositories(self, repo):
|
||||
"""Add a custom repository to list."""
|
||||
if repo in self._data[ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ADDONS_CUSTOM_LIST].append(repo)
|
||||
self.save()
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
if repo not in self._data[ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ADDONS_CUSTOM_LIST].remove(repo)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_initialize(self):
|
||||
"""Return is security was initialize."""
|
||||
return self._data[SECURITY_INITIALIZE]
|
||||
|
||||
@security_initialize.setter
|
||||
def security_initialize(self, value):
|
||||
"""Set is security initialize."""
|
||||
self._data[SECURITY_INITIALIZE] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_totp(self):
|
||||
"""Return the TOTP key."""
|
||||
return self._data.get(SECURITY_TOTP)
|
||||
|
||||
@security_totp.setter
|
||||
def security_totp(self, value):
|
||||
"""Set the TOTP key."""
|
||||
self._data[SECURITY_TOTP] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_password(self):
|
||||
"""Return the password key."""
|
||||
return self._data.get(SECURITY_PASSWORD)
|
||||
|
||||
@security_password.setter
|
||||
def security_password(self, value):
|
||||
"""Set the password key."""
|
||||
self._data[SECURITY_PASSWORD] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_sessions(self):
|
||||
"""Return api sessions."""
|
||||
return {session: datetime.strptime(until, DATETIME_FORMAT) for
|
||||
session, until in self._data[SECURITY_SESSIONS].items()}
|
||||
|
||||
@security_sessions.setter
|
||||
def security_sessions(self, value):
|
||||
"""Set the a new session."""
|
||||
session, valid = value
|
||||
if valid is None:
|
||||
self._data[SECURITY_SESSIONS].pop(session, None)
|
||||
else:
|
||||
self._data[SECURITY_SESSIONS].update(
|
||||
{session: valid.strftime(DATETIME_FORMAT)}
|
||||
)
|
||||
|
||||
self.save()
|
||||
|
103
hassio/const.py
103
hassio/const.py
@@ -1,28 +1,40 @@
|
||||
"""Const file for HassIO."""
|
||||
HASSIO_VERSION = '0.13'
|
||||
from pathlib import Path
|
||||
|
||||
URL_HASSIO_VERSION = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version.json'
|
||||
URL_HASSIO_VERSION_BETA = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version_beta.json'
|
||||
HASSIO_VERSION = '0.41'
|
||||
|
||||
URL_HASSIO_ADDONS = 'https://github.com/pvizeli/hassio-addons'
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/master/version.json')
|
||||
URL_HASSIO_VERSION_BETA = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/dev/version.json')
|
||||
|
||||
DOCKER_REPO = "pvizeli"
|
||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||
|
||||
HASSIO_SHARE = "/data"
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
||||
RUN_WATCHDOG_HOMEASSISTANT = 15
|
||||
RUN_CLEANUP_API_SESSIONS = 900
|
||||
|
||||
RESTART_EXIT_CODE = 100
|
||||
|
||||
FILE_HASSIO_ADDONS = "{}/addons.json".format(HASSIO_SHARE)
|
||||
FILE_HASSIO_CONFIG = "{}/config.json".format(HASSIO_SHARE)
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
|
||||
SOCKET_DOCKER = "/var/run/docker.sock"
|
||||
SOCKET_HC = "/var/run/hassio-hc.sock"
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||
|
||||
LABEL_VERSION = 'io.hass.version'
|
||||
LABEL_ARCH = 'io.hass.arch'
|
||||
LABEL_TYPE = 'io.hass.type'
|
||||
|
||||
META_ADDON = 'addon'
|
||||
META_SUPERVISOR = 'supervisor'
|
||||
META_HOMEASSISTANT = 'homeassistant'
|
||||
|
||||
JSON_RESULT = 'result'
|
||||
JSON_DATA = 'data'
|
||||
@@ -31,25 +43,59 @@ JSON_MESSAGE = 'message'
|
||||
RESULT_ERROR = 'error'
|
||||
RESULT_OK = 'ok'
|
||||
|
||||
ATTR_DATE = 'date'
|
||||
ATTR_ARCH = 'arch'
|
||||
ATTR_HOSTNAME = 'hostname'
|
||||
ATTR_TIMEZONE = 'timezone'
|
||||
ATTR_OS = 'os'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_SOURCE = 'source'
|
||||
ATTR_FEATURES = 'features'
|
||||
ATTR_ADDONS = 'addons'
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_CURRENT = 'current'
|
||||
ATTR_BETA = 'beta'
|
||||
ATTR_LAST_VERSION = 'last_version'
|
||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_SLUG = 'slug'
|
||||
ATTR_DESCRIPTON = 'description'
|
||||
ATTR_STARTUP = 'startup'
|
||||
ATTR_BOOT = 'boot'
|
||||
ATTR_PORTS = 'ports'
|
||||
ATTR_MAP_CONFIG = 'map_config'
|
||||
ATTR_MAP_SSL = 'map_ssl'
|
||||
ATTR_MAP = 'map'
|
||||
ATTR_OPTIONS = 'options'
|
||||
ATTR_INSTALLED = 'installed'
|
||||
ATTR_DEDICATED = 'dedicated'
|
||||
ATTR_DETACHED = 'detached'
|
||||
ATTR_STATE = 'state'
|
||||
ATTR_SCHEMA = 'schema'
|
||||
ATTR_IMAGE = 'image'
|
||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||
ATTR_REPOSITORY = 'repository'
|
||||
ATTR_REPOSITORIES = 'repositories'
|
||||
ATTR_URL = 'url'
|
||||
ATTR_MAINTAINER = 'maintainer'
|
||||
ATTR_PASSWORD = 'password'
|
||||
ATTR_TOTP = 'totp'
|
||||
ATTR_INITIALIZE = 'initialize'
|
||||
ATTR_SESSION = 'session'
|
||||
ATTR_LOCATON = 'location'
|
||||
ATTR_BUILD = 'build'
|
||||
ATTR_DEVICES = 'devices'
|
||||
ATTR_ENVIRONMENT = 'environment'
|
||||
ATTR_HOST_NETWORK = 'host_network'
|
||||
ATTR_NETWORK = 'network'
|
||||
ATTR_TMPFS = 'tmpfs'
|
||||
ATTR_PRIVILEGED = 'privileged'
|
||||
ATTR_USER = 'user'
|
||||
ATTR_SYSTEM = 'system'
|
||||
ATTR_SNAPSHOTS = 'snapshots'
|
||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||
ATTR_FOLDERS = 'folders'
|
||||
ATTR_SIZE = 'size'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_TIMEOUT = 'timeout'
|
||||
ATTR_AUTO_UPDATE = 'auto_update'
|
||||
|
||||
STARTUP_INITIALIZE = 'initialize'
|
||||
STARTUP_BEFORE = 'before'
|
||||
STARTUP_AFTER = 'after'
|
||||
STARTUP_ONCE = 'once'
|
||||
@@ -59,3 +105,26 @@ BOOT_MANUAL = 'manual'
|
||||
|
||||
STATE_STARTED = 'started'
|
||||
STATE_STOPPED = 'stopped'
|
||||
STATE_NONE = 'none'
|
||||
|
||||
MAP_CONFIG = 'config'
|
||||
MAP_SSL = 'ssl'
|
||||
MAP_ADDONS = 'addons'
|
||||
MAP_BACKUP = 'backup'
|
||||
MAP_SHARE = 'share'
|
||||
|
||||
ARCH_ARMHF = 'armhf'
|
||||
ARCH_AARCH64 = 'aarch64'
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_I386 = 'i386'
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
|
||||
FOLDER_HOMEASSISTANT = 'homeassistant'
|
||||
FOLDER_SHARE = 'share'
|
||||
FOLDER_ADDONS = 'addons/local'
|
||||
FOLDER_SSL = 'ssl'
|
||||
|
||||
SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
|
172
hassio/core.py
172
hassio/core.py
@@ -5,17 +5,22 @@ import logging
|
||||
import aiohttp
|
||||
import docker
|
||||
|
||||
from . import bootstrap
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .host_controll import HostControll
|
||||
from .host_control import HostControl
|
||||
from .const import (
|
||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, STARTUP_AFTER, STARTUP_BEFORE)
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
||||
RUN_CLEANUP_API_SESSIONS, STARTUP_AFTER, STARTUP_BEFORE,
|
||||
STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS, RUN_UPDATE_ADDONS_TASKS)
|
||||
from .scheduler import Scheduler
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .tools import get_arch_from_image, get_local_ip
|
||||
from .snapshots import SnapshotsManager
|
||||
from .tasks import (
|
||||
hassio_update, homeassistant_watchdog, homeassistant_setup,
|
||||
api_sessions_cleanup, addons_update)
|
||||
from .tools import get_local_ip, fetch_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,130 +28,143 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class HassIO(object):
|
||||
"""Main object of hassio."""
|
||||
|
||||
def __init__(self, loop):
|
||||
def __init__(self, loop, config):
|
||||
"""Initialize hassio object."""
|
||||
self.exit_code = 0
|
||||
self.loop = loop
|
||||
self.websession = aiohttp.ClientSession(loop=self.loop)
|
||||
self.config = bootstrap.initialize_system_data(self.websession)
|
||||
self.scheduler = Scheduler(self.loop)
|
||||
self.api = RestAPI(self.config, self.loop)
|
||||
self.config = config
|
||||
self.websession = aiohttp.ClientSession(loop=loop)
|
||||
self.scheduler = Scheduler(loop)
|
||||
self.api = RestAPI(config, loop)
|
||||
self.dock = docker.DockerClient(
|
||||
base_url="unix:/{}".format(SOCKET_DOCKER), version='auto')
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(
|
||||
self.config, self.loop, self.dock, self)
|
||||
self.homeassistant = DockerHomeAssistant(
|
||||
self.config, self.loop, self.dock)
|
||||
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
|
||||
self.homeassistant = DockerHomeAssistant(config, loop, self.dock)
|
||||
|
||||
# init HostControll
|
||||
self.host_controll = HostControll(self.loop)
|
||||
# init HostControl
|
||||
self.host_control = HostControl(loop)
|
||||
|
||||
# init addon system
|
||||
self.addons = AddonManager(self.config, self.loop, self.dock)
|
||||
self.addons = AddonManager(config, loop, self.dock)
|
||||
|
||||
# init snapshot system
|
||||
self.snapshots = SnapshotsManager(
|
||||
config, loop, self.scheduler, self.addons, self.homeassistant)
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
await self.supervisor.attach()
|
||||
if not await self.supervisor.attach():
|
||||
_LOGGER.fatal("Can't attach to supervisor docker container!")
|
||||
await self.supervisor.cleanup()
|
||||
|
||||
# set running arch
|
||||
self.config.arch = self.supervisor.arch
|
||||
|
||||
# set api endpoint
|
||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
||||
|
||||
# hostcontroll
|
||||
host_info = await self.host_controll.info()
|
||||
if host_info:
|
||||
self.host_controll.version = host_info.get('version')
|
||||
_LOGGER.info(
|
||||
"Connected to HostControll. OS: %s Version: %s Hostname: %s "
|
||||
"Feature-lvl: %d", host_info.get('os'),
|
||||
host_info.get('version'), host_info.get('hostname'),
|
||||
host_info.get('level', 0))
|
||||
# update timezone
|
||||
if self.config.timezone == 'UTC':
|
||||
self.config.timezone = await fetch_timezone(self.websession)
|
||||
|
||||
# rest api views
|
||||
self.api.register_host(self.host_controll)
|
||||
self.api.register_network(self.host_controll)
|
||||
self.api.register_supervisor(self.supervisor, self.addons)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
# hostcontrol
|
||||
await self.host_control.load()
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.config.fetch_update_infos, RUN_UPDATE_INFO_TASKS,
|
||||
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
||||
|
||||
# rest api views
|
||||
self.api.register_host(self.host_control)
|
||||
self.api.register_network(self.host_control)
|
||||
self.api.register_supervisor(
|
||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||
self.websession)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
self.api.register_security()
|
||||
self.api.register_snapshots(self.snapshots)
|
||||
self.api.register_panel()
|
||||
|
||||
# schedule api session cleanup
|
||||
self.scheduler.register_task(
|
||||
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
|
||||
now=True)
|
||||
|
||||
# first start of supervisor?
|
||||
if not await self.homeassistant.exists():
|
||||
_LOGGER.info("No HomeAssistant docker found.")
|
||||
await self._setup_homeassistant()
|
||||
await homeassistant_setup(
|
||||
self.config, self.loop, self.homeassistant, self.websession)
|
||||
else:
|
||||
await self.homeassistant.attach()
|
||||
|
||||
# Load addons
|
||||
arch = get_arch_from_image(self.supervisor.image)
|
||||
await self.addons.prepare(arch)
|
||||
await self.addons.prepare()
|
||||
|
||||
# schedule addon update task
|
||||
self.scheduler.register_task(
|
||||
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
|
||||
self.scheduler.register_task(
|
||||
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
self._hassio_update, RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
hassio_update(self.config, self.supervisor, self.websession),
|
||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
|
||||
# schedule snapshot update tasks
|
||||
self.scheduler.register_task(
|
||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
await asyncio.wait(
|
||||
[hassio_update(self.config, self.supervisor, self.websession)()],
|
||||
loop=self.loop
|
||||
)
|
||||
|
||||
# start api
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
||||
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
return
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
return
|
||||
|
||||
# start addon mark as before
|
||||
await self.addons.auto_boot(STARTUP_BEFORE)
|
||||
# start addon mark as before
|
||||
await self.addons.auto_boot(STARTUP_BEFORE)
|
||||
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
|
||||
# start addon mark as after
|
||||
await self.addons.auto_boot(STARTUP_AFTER)
|
||||
# start addon mark as after
|
||||
await self.addons.auto_boot(STARTUP_AFTER)
|
||||
|
||||
finally:
|
||||
# schedule homeassistant watchdog
|
||||
self.scheduler.register_task(
|
||||
homeassistant_watchdog(self.loop, self.homeassistant),
|
||||
RUN_WATCHDOG_HOMEASSISTANT)
|
||||
|
||||
async def stop(self, exit_code=0):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.stop()
|
||||
self.scheduler.suspend = True
|
||||
|
||||
# process stop task pararell
|
||||
tasks = [self.websession.close(), self.api.stop()]
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
# process stop tasks
|
||||
self.websession.close()
|
||||
await self.api.stop()
|
||||
|
||||
self.exit_code = exit_code
|
||||
self.loop.stop()
|
||||
|
||||
async def _setup_homeassistant(self):
|
||||
"""Install a homeassistant docker container."""
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.config.current_homeassistant:
|
||||
await self.config.fetch_update_infos()
|
||||
|
||||
tag = self.config.current_homeassistant
|
||||
if tag and await self.homeassistant.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on setup HomeAssistant. Retry in 60.")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed.")
|
||||
|
||||
async def _hassio_update(self):
|
||||
"""Check and run update of supervisor hassio."""
|
||||
if self.config.current_hassio == self.supervisor.version:
|
||||
return
|
||||
|
||||
_LOGGER.info(
|
||||
"Found new HassIO version %s.", self.config.current_hassio)
|
||||
await self.supervisor.update(self.config.current_hassio)
|
||||
|
@@ -5,7 +5,7 @@ import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..tools import get_version_from_env
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -13,18 +13,19 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerBase(object):
|
||||
"""Docker hassio wrapper."""
|
||||
|
||||
def __init__(self, config, loop, dock, image=None):
|
||||
def __init__(self, config, loop, dock, image=None, timeout=30):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.dock = dock
|
||||
self.image = image
|
||||
self.container = None
|
||||
self.timeout = timeout
|
||||
self.version = None
|
||||
self.arch = None
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return None
|
||||
|
||||
@@ -33,6 +34,24 @@ class DockerBase(object):
|
||||
"""Return True if a task is in progress."""
|
||||
return self._lock.locked()
|
||||
|
||||
def process_metadata(self, metadata, force=False):
|
||||
"""Read metadata and set it to object."""
|
||||
# read image
|
||||
if not self.image:
|
||||
self.image = metadata['Config']['Image']
|
||||
|
||||
# read version
|
||||
need_version = force or not self.version
|
||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
||||
elif need_version:
|
||||
_LOGGER.warning("Can't read version from %s", self.name)
|
||||
|
||||
# read arch
|
||||
need_arch = force or not self.arch
|
||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||
|
||||
async def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
if self._lock.locked():
|
||||
@@ -52,12 +71,12 @@ class DockerBase(object):
|
||||
image = self.dock.images.pull("{}:{}".format(self.image, tag))
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||
_LOGGER.info("Tag image %s with version %s as latest",
|
||||
self.image, self.version)
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
@@ -73,8 +92,7 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.dock.images.get(self.image)
|
||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||
self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -92,17 +110,21 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
else:
|
||||
self.container.reload()
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
image = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
return self.container.status == 'running'
|
||||
# container is not running
|
||||
if container.status != 'running':
|
||||
return False
|
||||
|
||||
# we run on a old image, stop and start it
|
||||
if container.image.id != image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
@@ -119,17 +141,17 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.image = self.container.attrs['Config']['Image']
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
_LOGGER.info("Attach to image %s with version %s",
|
||||
self.image, self.version)
|
||||
except (docker.errors.DockerException, KeyError):
|
||||
_LOGGER.fatal(
|
||||
"Can't attach to %s docker container!", self.docker_name)
|
||||
if self.image:
|
||||
obj_data = self.dock.images.get(self.image).attrs
|
||||
else:
|
||||
obj_data = self.dock.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self.process_metadata(obj_data)
|
||||
_LOGGER.info(
|
||||
"Attach to image %s with version %s", self.image, self.version)
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
@@ -163,23 +185,22 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return
|
||||
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
|
||||
self.container.reload()
|
||||
if self.container.status == 'running':
|
||||
if container.status == 'running':
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.container.stop()
|
||||
container.stop(timeout=self.timeout)
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.container.remove(force=True)
|
||||
|
||||
self.container = None
|
||||
_LOGGER.info("Clean %s docker application", self.image)
|
||||
container.remove(force=True)
|
||||
|
||||
async def remove(self):
|
||||
"""Remove docker container."""
|
||||
"""Remove docker images."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute remove while a task is in progress")
|
||||
return False
|
||||
@@ -188,25 +209,32 @@ class DockerBase(object):
|
||||
return await self.loop.run_in_executor(None, self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker container.
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
self._stop()
|
||||
# cleanup container
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove docker %s with latest and %s",
|
||||
self.image, self.version)
|
||||
_LOGGER.info(
|
||||
"Remove docker %s with latest and %s", self.image, self.version)
|
||||
|
||||
try:
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
# clean metadata
|
||||
self.version = None
|
||||
self.arch = None
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
@@ -223,29 +251,27 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.version)
|
||||
was_running = self._is_running()
|
||||
|
||||
_LOGGER.info("Update docker %s with %s:%s",
|
||||
old_image, self.image, tag)
|
||||
_LOGGER.info(
|
||||
"Update docker %s with %s:%s", self.version, self.image, tag)
|
||||
|
||||
# update docker image
|
||||
if self._install(tag):
|
||||
_LOGGER.info("Cleanup old %s docker", old_image)
|
||||
self._stop()
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning(
|
||||
"Can't remove old image %s -> %s", old_image, err)
|
||||
return True
|
||||
if not self._install(tag):
|
||||
return False
|
||||
|
||||
return False
|
||||
# cleanup old stuff
|
||||
if was_running:
|
||||
self._run()
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
async def logs(self):
|
||||
"""Return docker logs of container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute logs while a task is in progress")
|
||||
return False
|
||||
return b""
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._logs)
|
||||
@@ -255,10 +281,69 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
return
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return self.container.logs(tail=100, stdout=True, stderr=True)
|
||||
return container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||
|
||||
async def restart(self):
|
||||
"""Restart docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute restart while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute cleanup while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
await self.loop.run_in_executor(None, self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return
|
||||
|
||||
for image in self.dock.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||
self.dock.images.remove(image.id, force=True)
|
||||
|
@@ -1,30 +1,92 @@
|
||||
"""Init file for HassIO addon docker object."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env
|
||||
from .util import dockerfile_template
|
||||
from ..const import (
|
||||
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
|
||||
|
||||
class DockerAddon(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, addons_data, addon):
|
||||
def __init__(self, config, loop, dock, addon):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(
|
||||
config, loop, dock, image=addons_data.get_image(addon))
|
||||
config, loop, dock, image=addon.image, timeout=addon.timeout)
|
||||
self.addon = addon
|
||||
self.addons_data = addons_data
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addon)
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
'TZ': self.config.timezone,
|
||||
}
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": "{}".format(options)}
|
||||
return None
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
}}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_config): {
|
||||
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_ssl): {
|
||||
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_addons_local): {
|
||||
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_backup): {
|
||||
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_share): {
|
||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
@@ -32,59 +94,159 @@ class DockerAddon(DockerBase):
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return
|
||||
return True
|
||||
|
||||
# cleanup old container
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
# volumes
|
||||
volumes = {
|
||||
self.addons_data.path_data_docker(self.addon): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
}}
|
||||
if self.addons_data.need_config(self.addon):
|
||||
volumes.update({
|
||||
self.config.path_config_docker: {
|
||||
'bind': '/config', 'mode': 'rw'
|
||||
}})
|
||||
if self.addons_data.need_ssl(self.addon):
|
||||
volumes.update({
|
||||
self.config.path_ssl_docker: {
|
||||
'bind': '/ssl', 'mode': 'rw'
|
||||
}})
|
||||
# write config
|
||||
if not self.addon.write_options():
|
||||
return False
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.docker_name,
|
||||
name=self.name,
|
||||
detach=True,
|
||||
network_mode='bridge',
|
||||
ports=self.addons_data.get_ports(self.addon),
|
||||
volumes=volumes,
|
||||
network_mode=self.addon.network_mode,
|
||||
ports=self.addon.ports,
|
||||
devices=self.addon.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs
|
||||
)
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start docker addon %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
def _install(self, tag):
|
||||
"""Pull docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_dir = Path(self.config.path_tmp, self.addon.slug)
|
||||
try:
|
||||
# prepare temporary addon build folder
|
||||
try:
|
||||
source = self.addon.path_addon_location
|
||||
shutil.copytree(str(source), str(build_dir))
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't copy %s to temporary build folder -> %s",
|
||||
source, err)
|
||||
return False
|
||||
|
||||
# prepare Dockerfile
|
||||
try:
|
||||
dockerfile_template(
|
||||
Path(build_dir, 'Dockerfile'), self.config.arch,
|
||||
tag, META_ADDON)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't prepare dockerfile -> %s", err)
|
||||
|
||||
# run docker build
|
||||
try:
|
||||
build_tag = "{}:{}".format(self.image, tag)
|
||||
|
||||
_LOGGER.info("Start build %s on %s", build_tag, build_dir)
|
||||
image = self.dock.images.build(
|
||||
path=str(build_dir), tag=build_tag, pull=True)
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
|
||||
except (docker.errors.DockerException, TypeError) as err:
|
||||
_LOGGER.error("Can't build %s -> %s", build_tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Build %s done", build_tag)
|
||||
return True
|
||||
|
||||
finally:
|
||||
shutil.rmtree(str(build_dir), ignore_errors=True)
|
||||
|
||||
async def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute export while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(
|
||||
None, self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
_LOGGER.info("Attach to image %s with version %s",
|
||||
self.image, self.version)
|
||||
except (docker.errors.DockerException, KeyError):
|
||||
pass
|
||||
image = self.dock.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image.stream():
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
return True
|
||||
|
||||
async def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute import while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(
|
||||
None, self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.dock.api.load_image(read_tar)
|
||||
|
||||
image = self.dock.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Addons prepare some thing on start and that is normaly not repeatable.
|
||||
Need run inside executor.
|
||||
"""
|
||||
self._stop()
|
||||
return self._run()
|
||||
|
@@ -4,7 +4,6 @@ import logging
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,10 +18,22 @@ class DockerHomeAssistant(DockerBase):
|
||||
super().__init__(config, loop, dock, image=config.homeassistant_image)
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into docker."""
|
||||
if not self.config.homeassistant_devices:
|
||||
return
|
||||
|
||||
devices = []
|
||||
for device in self.config.homeassistant_devices:
|
||||
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
|
||||
|
||||
return devices
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
@@ -31,47 +42,34 @@ class DockerHomeAssistant(DockerBase):
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
# cleanup old container
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.docker_name,
|
||||
name=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self.config.api_endpoint,
|
||||
'TZ': self.config.timezone,
|
||||
},
|
||||
volumes={
|
||||
self.config.path_config_docker:
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
self.config.path_ssl_docker:
|
||||
{'bind': '/ssl', 'mode': 'rw'},
|
||||
str(self.config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
})
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start homeassistant %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update homeassistant docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._update, tag):
|
||||
await self.loop.run_in_executor(None, self._run)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
@@ -2,8 +2,6 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
@@ -13,14 +11,13 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerSupervisor(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, hassio, image=None):
|
||||
def __init__(self, config, loop, dock, stop_callback, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config, loop, dock, image=image)
|
||||
|
||||
self.hassio = hassio
|
||||
self.stop_callback = stop_callback
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
@@ -31,41 +28,14 @@ class DockerSupervisor(DockerBase):
|
||||
return False
|
||||
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
old_version = self.version
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.config.hassio_cleanup = old_version
|
||||
self.loop.create_task(self.hassio.stop(RESTART_EXIT_CODE))
|
||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def cleanup(self):
|
||||
"""Check if old supervisor version exists and cleanup."""
|
||||
if not self.config.hassio_cleanup:
|
||||
return
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._cleanup):
|
||||
self.config.hassio_cleanup = None
|
||||
|
||||
def _cleanup(self):
|
||||
"""Remove old image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.config.hassio_cleanup)
|
||||
|
||||
_LOGGER.info("Old supervisor docker found %s", old_image)
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove old image %s -> %s", old_image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
@@ -81,3 +51,7 @@ class DockerSupervisor(DockerBase):
|
||||
async def remove(self):
|
||||
"""Remove docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def restart(self):
|
||||
"""Restart docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
40
hassio/dock/util.py
Normal file
40
hassio/dock/util.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import re
|
||||
|
||||
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
||||
|
||||
|
||||
RESIN_BASE_IMAGE = {
|
||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
||||
ARCH_I386: "homeassistant/i386-base:latest",
|
||||
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
||||
}
|
||||
|
||||
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
|
||||
|
||||
|
||||
def dockerfile_template(dockerfile, arch, version, meta_type):
|
||||
"""Prepare a Hass.IO dockerfile."""
|
||||
buff = []
|
||||
resin_image = RESIN_BASE_IMAGE[arch]
|
||||
|
||||
# read docker
|
||||
with dockerfile.open('r') as dock_input:
|
||||
for line in dock_input:
|
||||
line = TMPL_IMAGE.sub(resin_image, line)
|
||||
buff.append(line)
|
||||
|
||||
# add metadata
|
||||
buff.append(create_metadata(version, arch, meta_type))
|
||||
|
||||
# write docker
|
||||
with dockerfile.open('w') as dock_output:
|
||||
dock_output.writelines(buff)
|
||||
|
||||
|
||||
def create_metadata(version, arch, meta_type):
|
||||
"""Generate docker label layer for hassio."""
|
||||
return ('LABEL io.hass.version="{}" '
|
||||
'io.hass.arch="{}" '
|
||||
'io.hass.type="{}"').format(version, arch, meta_type)
|
124
hassio/host_control.py
Normal file
124
hassio/host_control.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""Host control for HassIO."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .const import (
|
||||
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
|
||||
ATTR_HOSTNAME, ATTR_OS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 15
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
FEATURES_SHUTDOWN = 'shutdown'
|
||||
FEATURES_REBOOT = 'reboot'
|
||||
FEATURES_UPDATE = 'update'
|
||||
FEATURES_HOSTNAME = 'hostname'
|
||||
FEATURES_NETWORK_INFO = 'network_info'
|
||||
FEATURES_NETWORK_CONTROL = 'network_control'
|
||||
|
||||
|
||||
class HostControl(object):
|
||||
"""Client for host control."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize HostControl socket client."""
|
||||
self.loop = loop
|
||||
self.active = False
|
||||
self.version = UNKNOWN
|
||||
self.last_version = UNKNOWN
|
||||
self.type = UNKNOWN
|
||||
self.features = []
|
||||
self.hostname = UNKNOWN
|
||||
self.os_info = UNKNOWN
|
||||
|
||||
if SOCKET_HC.is_socket():
|
||||
self.active = True
|
||||
|
||||
async def _send_command(self, command):
|
||||
"""Send command to host.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
if not self.active:
|
||||
return
|
||||
|
||||
reader, writer = await asyncio.open_unix_connection(
|
||||
str(SOCKET_HC), loop=self.loop)
|
||||
|
||||
try:
|
||||
# send
|
||||
_LOGGER.info("Send '%s' to HostControl.", command)
|
||||
|
||||
with async_timeout.timeout(TIMEOUT, loop=self.loop):
|
||||
writer.write("{}\n".format(command).encode())
|
||||
data = await reader.readline()
|
||||
|
||||
response = data.decode().rstrip()
|
||||
_LOGGER.info("Receive from HostControl: %s.", response)
|
||||
|
||||
if response == "OK":
|
||||
return True
|
||||
elif response == "ERROR":
|
||||
return False
|
||||
elif response == "WRONG":
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return json.loads(response)
|
||||
except json.JSONDecodeError:
|
||||
_LOGGER.warning("Json parse error from HostControl '%s'.",
|
||||
response)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Timeout from HostControl!")
|
||||
|
||||
finally:
|
||||
writer.close()
|
||||
|
||||
async def load(self):
|
||||
"""Load Info from host.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
info = await self._send_command("info")
|
||||
if not info:
|
||||
return
|
||||
|
||||
self.version = info.get(ATTR_VERSION, UNKNOWN)
|
||||
self.last_version = info.get(ATTR_LAST_VERSION, UNKNOWN)
|
||||
self.type = info.get(ATTR_TYPE, UNKNOWN)
|
||||
self.features = info.get(ATTR_FEATURES, [])
|
||||
self.hostname = info.get(ATTR_HOSTNAME, UNKNOWN)
|
||||
self.os_info = info.get(ATTR_OS, UNKNOWN)
|
||||
|
||||
def reboot(self):
|
||||
"""Reboot the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("reboot")
|
||||
|
||||
def shutdown(self):
|
||||
"""Shutdown the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("shutdown")
|
||||
|
||||
def update(self, version=None):
|
||||
"""Update the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("update {}".format(version))
|
||||
return self._send_command("update")
|
||||
|
||||
def set_hostname(self, hostname):
|
||||
"""Update hostname on host."""
|
||||
return self._send_command("hostname {}".format(hostname))
|
@@ -1,102 +0,0 @@
|
||||
"""Host controll for HassIO."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .const import SOCKET_HC
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 15
|
||||
|
||||
LEVEL_POWER = 1
|
||||
LEVEL_UPDATE_HOST = 2
|
||||
LEVEL_NETWORK = 4
|
||||
|
||||
|
||||
class HostControll(object):
|
||||
"""Client for host controll."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize HostControll socket client."""
|
||||
self.loop = loop
|
||||
self.active = False
|
||||
self.version = None
|
||||
|
||||
mode = os.stat(SOCKET_HC)[stat.ST_MODE]
|
||||
if stat.S_ISSOCK(mode):
|
||||
self.active = True
|
||||
|
||||
async def _send_command(self, command):
|
||||
"""Send command to host.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
if not self.active:
|
||||
return
|
||||
|
||||
reader, writer = await asyncio.open_unix_connection(
|
||||
SOCKET_HC, loop=self.loop)
|
||||
|
||||
try:
|
||||
# send
|
||||
_LOGGER.info("Send '%s' to HostControll.", command)
|
||||
|
||||
with async_timeout.timeout(TIMEOUT, loop=self.loop):
|
||||
writer.write("{}\n".format(command).encode())
|
||||
data = await reader.readline()
|
||||
|
||||
response = data.decode()
|
||||
_LOGGER.debug("Receive from HostControll: %s.", response)
|
||||
|
||||
if response == "OK":
|
||||
return True
|
||||
elif response == "ERROR":
|
||||
return False
|
||||
elif response == "WRONG":
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return json.loads(response)
|
||||
except json.JSONDecodeError:
|
||||
_LOGGER.warning("Json parse error from HostControll.")
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Timeout from HostControll!")
|
||||
|
||||
finally:
|
||||
writer.close()
|
||||
|
||||
def info(self):
|
||||
"""Return Info from host.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("info")
|
||||
|
||||
def reboot(self):
|
||||
"""Reboot the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("reboot")
|
||||
|
||||
def shutdown(self):
|
||||
"""Shutdown the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("shutdown")
|
||||
|
||||
def host_update(self, version=None):
|
||||
"""Update the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("host-update {}".format(version))
|
||||
return self._send_command("host-update")
|
160
hassio/panel/hassio-main.html
Normal file
160
hassio/panel/hassio-main.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/panel/hassio-main.html.gz
Normal file
BIN
hassio/panel/hassio-main.html.gz
Normal file
Binary file not shown.
@@ -16,11 +16,7 @@ class Scheduler(object):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self._stop = False
|
||||
|
||||
def stop(self):
|
||||
"""Stop to execute tasks in scheduler."""
|
||||
self._stop = True
|
||||
self.suspend = False
|
||||
|
||||
def register_task(self, coro_callback, seconds, repeat=True,
|
||||
now=False):
|
||||
@@ -51,11 +47,8 @@ class Scheduler(object):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data.pop(idx)
|
||||
|
||||
# stop execute tasks
|
||||
if self._stop:
|
||||
return
|
||||
|
||||
self.loop.create_task(data[CALL]())
|
||||
if not self.suspend:
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
task = self.loop.call_later(data[SEC], self._run_task, idx)
|
||||
|
313
hassio/snapshots/__init__.py
Normal file
313
hassio/snapshots/__init__.py
Normal file
@@ -0,0 +1,313 @@
|
||||
"""Snapshot system control."""
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
|
||||
from .snapshot import Snapshot
|
||||
from .util import create_slug
|
||||
from ..const import (
|
||||
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SnapshotsManager(object):
|
||||
"""Manage snapshots."""
|
||||
|
||||
def __init__(self, config, loop, sheduler, addons, homeassistant):
|
||||
"""Initialize a snapshot manager."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.sheduler = sheduler
|
||||
self.addons = addons
|
||||
self.homeassistant = homeassistant
|
||||
self.snapshots = {}
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
@property
|
||||
def list_snapshots(self):
|
||||
"""Return a list of all snapshot object."""
|
||||
return set(self.snapshots.values())
|
||||
|
||||
def get(self, slug):
|
||||
"""Return snapshot object."""
|
||||
return self.snapshots.get(slug)
|
||||
|
||||
def _create_snapshot(self, name, sys_type):
|
||||
"""Initialize a new snapshot object from name."""
|
||||
date_str = str(datetime.utcnow())
|
||||
slug = create_slug(name, date_str)
|
||||
tar_file = Path(self.config.path_backup, "{}.tar".format(slug))
|
||||
|
||||
# init object
|
||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
||||
snapshot.create(slug, name, date_str, sys_type)
|
||||
|
||||
# set general data
|
||||
snapshot.homeassistant_version = self.homeassistant.version
|
||||
snapshot.homeassistant_devices = self.config.homeassistant_devices
|
||||
snapshot.repositories = self.config.addons_repositories
|
||||
|
||||
return snapshot
|
||||
|
||||
async def reload(self):
|
||||
"""Load exists backups."""
|
||||
self.snapshots = {}
|
||||
|
||||
async def _load_snapshot(tar_file):
|
||||
"""Internal function to load snapshot."""
|
||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
||||
if await snapshot.load():
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
|
||||
tasks = [_load_snapshot(tar_file) for tar_file in
|
||||
self.config.path_backup.glob("*.tar")]
|
||||
|
||||
_LOGGER.info("Found %d snapshot files", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
def remove(self, snapshot):
|
||||
"""Remove a snapshot."""
|
||||
try:
|
||||
snapshot.tar_file.unlink()
|
||||
self.snapshots.pop(snapshot.slug, None)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't remove snapshot %s -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def do_snapshot_full(self, name=""):
|
||||
"""Create a full snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
|
||||
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# snapshot addons
|
||||
tasks = []
|
||||
for addon in self.addons.list_addons:
|
||||
if not addon.is_installed:
|
||||
continue
|
||||
tasks.append(snapshot.import_addon(addon))
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Full-Snapshot %s run %d addons",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders()
|
||||
|
||||
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Full-Snapshot %s error -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_snapshot_partial(self, name="", addons=None, folders=None):
|
||||
"""Create a partial snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
addons = addons or []
|
||||
folders = folders or []
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL)
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# snapshot addons
|
||||
tasks = []
|
||||
for slug in addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon.is_installed:
|
||||
tasks.append(snapshot.import_addon(addon))
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Partial-Snapshot %s run %d addons",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Partial-Snapshot %s store folders %s",
|
||||
snapshot.slug, folders)
|
||||
await snapshot.store_folders(folders)
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Partial-Snapshot %s error -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_restore_full(self, snapshot):
|
||||
"""Restore a snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
if snapshot.sys_type != SNAPSHOT_FULL:
|
||||
_LOGGER.error(
|
||||
"Full-Restore %s is only a partial snapshot!", snapshot.slug)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# stop system
|
||||
tasks = []
|
||||
tasks.append(self.homeassistant.stop())
|
||||
|
||||
for addon in self.addons.list_addons:
|
||||
if addon.is_installed:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# restore folders
|
||||
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
|
||||
await snapshot.restore_folders()
|
||||
|
||||
# start homeassistant restore
|
||||
self.config.homeassistant_devices = \
|
||||
snapshot.homeassistant_devices
|
||||
task_hass = self.loop.create_task(
|
||||
self.homeassistant.update(snapshot.homeassistant_version))
|
||||
|
||||
# restore repositories
|
||||
await self.addons.load_repositories(snapshot.repositories)
|
||||
|
||||
# restore addons
|
||||
tasks = []
|
||||
actual_addons = \
|
||||
set(addon.slug for addon in self.addons.list_addons
|
||||
if addon.is_installed)
|
||||
restore_addons = \
|
||||
set(data[ATTR_SLUG] for data in snapshot.addons)
|
||||
remove_addons = actual_addons - restore_addons
|
||||
|
||||
_LOGGER.info("Full-Restore %s restore addons %s, remove %s",
|
||||
snapshot.slug, restore_addons, remove_addons)
|
||||
|
||||
for slug in remove_addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(addon.uninstall())
|
||||
else:
|
||||
_LOGGER.warning("Can't remove addon %s", slug)
|
||||
|
||||
for slug in restore_addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(snapshot.export_addon(addon))
|
||||
else:
|
||||
_LOGGER.warning("Can't restore addon %s", slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Full-Restore %s restore addons tasks %d",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# finish homeassistant task
|
||||
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
|
||||
snapshot.slug)
|
||||
await task_hass
|
||||
await self.homeassistant.run()
|
||||
|
||||
_LOGGER.info("Full-Restore %s done", snapshot.slug)
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Full-Restore %s error -> %s", slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_restore_partial(self, snapshot, homeassistant=False,
|
||||
addons=None, folders=None):
|
||||
"""Restore a snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
addons = addons or []
|
||||
folders = folders or []
|
||||
|
||||
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
tasks = []
|
||||
|
||||
if FOLDER_HOMEASSISTANT in folders:
|
||||
await self.homeassistant.stop()
|
||||
|
||||
if folders:
|
||||
_LOGGER.info("Partial-Restore %s restore folders %s",
|
||||
snapshot.slug, folders)
|
||||
await snapshot.restore_folders(folders)
|
||||
|
||||
if homeassistant:
|
||||
self.config.homeassistant_devices = \
|
||||
snapshot.homeassistant_devices
|
||||
tasks.append(self.homeassistant.update(
|
||||
snapshot.homeassistant_version))
|
||||
|
||||
for slug in addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(snapshot.export_addon(addon))
|
||||
else:
|
||||
_LOGGER.warning("Can't restore addon %s", slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Partial-Restore %s run %d tasks",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# make sure homeassistant run agen
|
||||
await self.homeassistant.run()
|
||||
|
||||
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Partial-Restore %s error -> %s", slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
271
hassio/snapshots/snapshot.py
Normal file
271
hassio/snapshots/snapshot.py
Normal file
@@ -0,0 +1,271 @@
|
||||
"""Represent a snapshot file."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
||||
from .util import remove_folder
|
||||
from ..const import (
|
||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES)
|
||||
from ..tools import write_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Snapshot(object):
|
||||
"""A signle hassio snapshot."""
|
||||
|
||||
def __init__(self, config, loop, tar_file):
|
||||
"""Initialize a snapshot."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.tar_file = tar_file
|
||||
self._data = {}
|
||||
self._tmp = None
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return snapshot slug."""
|
||||
return self._data.get(ATTR_SLUG)
|
||||
|
||||
@property
|
||||
def sys_type(self):
|
||||
"""Return snapshot type."""
|
||||
return self._data.get(ATTR_TYPE)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return snapshot name."""
|
||||
return self._data[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_DATE]
|
||||
|
||||
@property
|
||||
def addons(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_ADDONS]
|
||||
|
||||
@property
|
||||
def folders(self):
|
||||
"""Return list of saved folders."""
|
||||
return self._data[ATTR_FOLDERS]
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_REPOSITORIES]
|
||||
|
||||
@repositories.setter
|
||||
def repositories(self, value):
|
||||
"""Set snapshot date."""
|
||||
self._data[ATTR_REPOSITORIES] = value
|
||||
|
||||
@property
|
||||
def homeassistant_version(self):
|
||||
"""Return snapshot homeassistant version."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)
|
||||
|
||||
@homeassistant_version.setter
|
||||
def homeassistant_version(self, value):
|
||||
"""Set snapshot homeassistant version."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
|
||||
|
||||
@property
|
||||
def homeassistant_devices(self):
|
||||
"""Return snapshot homeassistant devices."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_DEVICES)
|
||||
|
||||
@homeassistant_devices.setter
|
||||
def homeassistant_devices(self, value):
|
||||
"""Set snapshot homeassistant devices."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_DEVICES] = value
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
"""Return snapshot size."""
|
||||
if not self.tar_file.is_file():
|
||||
return 0
|
||||
return self.tar_file.stat().st_size / 1048576 # calc mbyte
|
||||
|
||||
def create(self, slug, name, date, sys_type):
|
||||
"""Initialize a new snapshot."""
|
||||
# init metadata
|
||||
self._data[ATTR_SLUG] = slug
|
||||
self._data[ATTR_NAME] = name
|
||||
self._data[ATTR_DATE] = date
|
||||
self._data[ATTR_TYPE] = sys_type
|
||||
|
||||
# init other constructs
|
||||
self._data[ATTR_HOMEASSISTANT] = {}
|
||||
self._data[ATTR_ADDONS] = []
|
||||
self._data[ATTR_REPOSITORIES] = []
|
||||
self._data[ATTR_FOLDERS] = []
|
||||
|
||||
async def load(self):
|
||||
"""Read snapshot.json from tar file."""
|
||||
if not self.tar_file.is_file():
|
||||
_LOGGER.error("No tarfile %s", self.tar_file)
|
||||
return False
|
||||
|
||||
def _load_file():
|
||||
"""Read snapshot.json."""
|
||||
with tarfile.open(self.tar_file, "r:") as snapshot:
|
||||
json_file = snapshot.extractfile("./snapshot.json")
|
||||
return json_file.read()
|
||||
|
||||
# read snapshot.json
|
||||
try:
|
||||
raw = await self.loop.run_in_executor(None, _load_file)
|
||||
except (tarfile.TarError, KeyError) as err:
|
||||
_LOGGER.error(
|
||||
"Can't read snapshot tarfile %s -> %s", self.tar_file, err)
|
||||
return False
|
||||
|
||||
# parse data
|
||||
try:
|
||||
raw_dict = json.loads(raw)
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.error("Can't read data for %s -> %s", self.tar_file, err)
|
||||
return False
|
||||
|
||||
# validate
|
||||
try:
|
||||
self._data = SCHEMA_SNAPSHOT(raw_dict)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate data for %s -> %s", self.tar_file,
|
||||
humanize_error(raw_dict, err))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context to open a snapshot."""
|
||||
self._tmp = TemporaryDirectory(dir=str(self.config.path_tmp))
|
||||
|
||||
# create a snapshot
|
||||
if not self.tar_file.is_file():
|
||||
return self
|
||||
|
||||
# extract a exists snapshot
|
||||
def _extract_snapshot():
|
||||
"""Extract a snapshot."""
|
||||
with tarfile.open(self.tar_file, "r:") as tar:
|
||||
tar.extractall(path=self._tmp.name)
|
||||
|
||||
await self.loop.run_in_executor(None, _extract_snapshot)
|
||||
|
||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||
"""Async context to close a snapshot."""
|
||||
# exists snapshot or exception on build
|
||||
if self.tar_file.is_file() or exception_type is not None:
|
||||
return self._tmp.cleanup()
|
||||
|
||||
# validate data
|
||||
try:
|
||||
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Invalid data for %s -> %s", self.tar_file,
|
||||
humanize_error(self._data, err))
|
||||
raise ValueError("Invalid config") from None
|
||||
|
||||
# new snapshot, build it
|
||||
def _create_snapshot():
|
||||
"""Create a new snapshot."""
|
||||
with tarfile.open(self.tar_file, "w:") as tar:
|
||||
tar.add(self._tmp.name, arcname=".")
|
||||
|
||||
if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data):
|
||||
await self.loop.run_in_executor(None, _create_snapshot)
|
||||
else:
|
||||
_LOGGER.error("Can't write snapshot.json")
|
||||
|
||||
self._tmp.cleanup()
|
||||
self._tmp = None
|
||||
|
||||
async def import_addon(self, addon):
|
||||
"""Add a addon into snapshot."""
|
||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
||||
|
||||
if not await addon.snapshot(snapshot_file):
|
||||
_LOGGER.error("Can't make snapshot from %s", addon.slug)
|
||||
return False
|
||||
|
||||
# store to config
|
||||
self._data[ATTR_ADDONS].append({
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
})
|
||||
|
||||
return True
|
||||
|
||||
async def export_addon(self, addon):
|
||||
"""Restore a addon from snapshot."""
|
||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
||||
|
||||
if not await addon.restore(snapshot_file):
|
||||
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def store_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
folder_list = folder_list or ALL_FOLDERS
|
||||
|
||||
def _folder_save(name):
|
||||
"""Intenal function to snapshot a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||
origin_dir = Path(self.config.path_hassio, name)
|
||||
|
||||
try:
|
||||
with tarfile.open(snapshot_tar, "w:gz",
|
||||
compresslevel=1) as tar_file:
|
||||
tar_file.add(origin_dir, arcname=".")
|
||||
|
||||
self._data[ATTR_FOLDERS].append(name)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.warning("Can't snapshot folder %s -> %s", name, err)
|
||||
|
||||
# run tasks
|
||||
tasks = [self.loop.run_in_executor(None, _folder_save, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def restore_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
folder_list = folder_list or ALL_FOLDERS
|
||||
|
||||
def _folder_restore(name):
|
||||
"""Intenal function to restore a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||
origin_dir = Path(self.config.path_hassio, name)
|
||||
|
||||
# clean old stuff
|
||||
if origin_dir.is_dir():
|
||||
remove_folder(origin_dir)
|
||||
|
||||
try:
|
||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
||||
tar_file.extractall(path=origin_dir)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
||||
|
||||
# run tasks
|
||||
tasks = [self.loop.run_in_executor(None, _folder_restore, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
21
hassio/snapshots/util.py
Normal file
21
hassio/snapshots/util.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import shutil
|
||||
|
||||
|
||||
def create_slug(name, date_str):
|
||||
"""Generate a hash from repository."""
|
||||
key = "{} - {}".format(date_str, name).lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def remove_folder(folder):
|
||||
"""Remove folder data but not the folder itself."""
|
||||
for obj in folder.iterdir():
|
||||
try:
|
||||
if obj.is_dir():
|
||||
shutil.rmtree(str(obj), ignore_errors=True)
|
||||
else:
|
||||
obj.unlink()
|
||||
except (OSError, shutil.Error):
|
||||
pass
|
31
hassio/snapshots/validate.py
Normal file
31
hassio/snapshots/validate.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Validate some things around restore."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
||||
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
from ..validate import HASS_DEVICES
|
||||
|
||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||
vol.Required(ATTR_HOMEASSISTANT): vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||
}),
|
||||
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})],
|
||||
vol.Optional(ATTR_REPOSITORIES, default=[]): [vol.Url()],
|
||||
}, extra=vol.ALLOW_EXTRA)
|
85
hassio/tasks.py
Normal file
85
hassio/tasks.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Multible tasks."""
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def api_sessions_cleanup(config):
|
||||
"""Create scheduler task for cleanup api sessions."""
|
||||
async def _api_sessions_cleanup():
|
||||
"""Cleanup old api sessions."""
|
||||
now = datetime.now()
|
||||
for session, until_valid in config.security_sessions.items():
|
||||
if now >= until_valid:
|
||||
config.security_sessions = (session, None)
|
||||
|
||||
return _api_sessions_cleanup
|
||||
|
||||
|
||||
def addons_update(loop, addons):
|
||||
"""Create scheduler task for auto update addons."""
|
||||
async def _addons_update():
|
||||
"""Check if a update is available of a addon and update it."""
|
||||
tasks = []
|
||||
for addon in addons.list_addons:
|
||||
if not addon.is_installed or not addon.auto_update:
|
||||
continue
|
||||
|
||||
if addon.version_installed != addon.version:
|
||||
tasks.append(addon.update())
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
||||
await asyncio.wait(tasks, loop=loop)
|
||||
|
||||
return _addons_update
|
||||
|
||||
|
||||
def hassio_update(config, supervisor, websession):
|
||||
"""Create scheduler task for update of supervisor hassio."""
|
||||
async def _hassio_update():
|
||||
"""Check and run update of supervisor hassio."""
|
||||
await config.fetch_update_infos(websession)
|
||||
if config.last_hassio == supervisor.version:
|
||||
return
|
||||
|
||||
# don't perform a update on beta/dev channel
|
||||
if config.upstream_beta:
|
||||
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new HassIO version %s.", config.last_hassio)
|
||||
await supervisor.update(config.last_hassio)
|
||||
|
||||
return _hassio_update
|
||||
|
||||
|
||||
def homeassistant_watchdog(loop, homeassistant):
|
||||
"""Create scheduler task for montoring running state."""
|
||||
async def _homeassistant_watchdog():
|
||||
"""Check running state and start if they is close."""
|
||||
if homeassistant.in_progress or await homeassistant.is_running():
|
||||
return
|
||||
|
||||
loop.create_task(homeassistant.run())
|
||||
|
||||
return _homeassistant_watchdog
|
||||
|
||||
|
||||
async def homeassistant_setup(config, loop, homeassistant, websession):
|
||||
"""Install a homeassistant docker container."""
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not config.last_homeassistant:
|
||||
await config.fetch_update_infos(websession)
|
||||
|
||||
tag = config.last_homeassistant
|
||||
if tag and await homeassistant.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on setup HomeAssistant. Retry in 60.")
|
||||
await asyncio.sleep(60, loop=loop)
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed.")
|
@@ -1,22 +1,23 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
import pytz
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_RE_VERSION = re.compile(r"VERSION=(.*)")
|
||||
_IMAGE_ARCH = re.compile(r".*/([a-z0-9]*)-hassio-supervisor")
|
||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||
|
||||
|
||||
async def fetch_current_versions(websession, beta=False):
|
||||
async def fetch_last_versions(websession, beta=False):
|
||||
"""Fetch current versions from github.
|
||||
|
||||
Is a coroutine.
|
||||
@@ -34,24 +35,6 @@ async def fetch_current_versions(websession, beta=False):
|
||||
_LOGGER.warning("Can't parse versions from %s! %s", url, err)
|
||||
|
||||
|
||||
def get_arch_from_image(image):
|
||||
"""Return arch from hassio image name."""
|
||||
found = _IMAGE_ARCH.match(image)
|
||||
if found:
|
||||
return found.group(1)
|
||||
|
||||
|
||||
def get_version_from_env(env_list):
|
||||
"""Extract Version from ENV list."""
|
||||
for env in env_list:
|
||||
found = _RE_VERSION.match(env)
|
||||
if found:
|
||||
return found.group(1)
|
||||
|
||||
_LOGGER.error("Can't find VERSION in env")
|
||||
return None
|
||||
|
||||
|
||||
def get_local_ip(loop):
|
||||
"""Retrieve local IP address.
|
||||
|
||||
@@ -77,9 +60,10 @@ def get_local_ip(loop):
|
||||
def write_json_file(jsonfile, data):
|
||||
"""Write a json file."""
|
||||
try:
|
||||
with open(jsonfile, 'w') as conf_file:
|
||||
conf_file.write(json.dumps(data))
|
||||
except OSError:
|
||||
json_str = json.dumps(data, indent=2)
|
||||
with jsonfile.open('w') as conf_file:
|
||||
conf_file.write(json_str)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -87,5 +71,30 @@ def write_json_file(jsonfile, data):
|
||||
|
||||
def read_json_file(jsonfile):
|
||||
"""Read a json file and return a dict."""
|
||||
with open(jsonfile, 'r') as cfile:
|
||||
with jsonfile.open('r') as cfile:
|
||||
return json.loads(cfile.read())
|
||||
|
||||
|
||||
def validate_timezone(timezone):
|
||||
"""Validate voluptuous timezone."""
|
||||
try:
|
||||
pytz.timezone(timezone)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
raise vol.Invalid(
|
||||
"Invalid time zone passed in. Valid options can be found here: "
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||
from None
|
||||
|
||||
return timezone
|
||||
|
||||
|
||||
async def fetch_timezone(websession):
|
||||
"""Read timezone from freegeoip."""
|
||||
data = {}
|
||||
with suppress(aiohttp.ClientError, asyncio.TimeoutError,
|
||||
json.JSONDecodeError, KeyError):
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(FREEGEOIP_URL) as request:
|
||||
data = await request.json()
|
||||
|
||||
return data.get('time_zone', 'UTC')
|
||||
|
32
hassio/validate.py
Normal file
32
hassio/validate.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Validate functions."""
|
||||
import voluptuous as vol
|
||||
|
||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
||||
|
||||
|
||||
def convert_to_docker_ports(data):
|
||||
"""Convert data into docker port list."""
|
||||
# dynamic ports
|
||||
if data is None:
|
||||
return
|
||||
|
||||
# single port
|
||||
if isinstance(data, int):
|
||||
return NETWORK_PORT(data)
|
||||
|
||||
# port list
|
||||
if isinstance(data, list) and len(data) > 2:
|
||||
return vol.Schema([NETWORK_PORT])(data)
|
||||
|
||||
# ip port mapping
|
||||
if isinstance(data, list) and len(data) == 2:
|
||||
return (vol.Coerce(str)(data[0]), NETWORK_PORT(data[1]))
|
||||
|
||||
raise vol.Invalid("Can't validate docker host settings")
|
||||
|
||||
|
||||
DOCKER_PORTS = vol.Schema({
|
||||
vol.All(vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")):
|
||||
convert_to_docker_ports,
|
||||
})
|
1
home-assistant-polymer
Submodule
1
home-assistant-polymer
Submodule
Submodule home-assistant-polymer added at d2a56655d0
BIN
misc/hassio.png
Normal file
BIN
misc/hassio.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 42 KiB |
1
misc/hassio.xml
Normal file
1
misc/hassio.xml
Normal file
@@ -0,0 +1 @@
|
||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36" version="6.5.6" editor="www.draw.io" type="device"><diagram name="Page-1">5Vptc6M2EP41/ng3gHj9mPiSy820c5n6Q3sfsVBsNTJyhYid/voKkABZkOBY+KYtmYnR6pVn99ld1l6A5e74laX77a80Q2ThOdlxAb4sPC8OY/G/Erw2At9xG8GG4awR9QQr/DeSQkdKS5yhQhvIKSUc73UhpHmOINdkKWP0oA97okTfdZ9ukCFYwZSY0t9xxrdS6oZJ1/GA8GYrt469sOlYp/B5w2iZy/0WHniqr6Z7l6q15IMW2zSjh54I3C3AklHKm7vdcYlIBa2CrZl3P9LbnpuhnE+Z4DUTXlJSInXikIipt09UrCAOyF8lKOFfJVUdn4paZTdigNjtKD5ERw206DtIYKrenLJdSrrJ4m5TfX5fqX3E2Zqtmg4JS7urd9hijlb7FFbtg7A2MWjLd0S03Oo0mJAlJZTVowXYKIRQyAvO6DPq9Tj1Jc+/kutLvF4Q4+g4CqHbKkbYO6I7xNmrGKImJKCZIm09SKRuD53l+Arobc9oQjkulca6aZfuFCZupM6G9QcM/X3LcaW31WvB0e5CNGGG1vF6CE0QggRkrb7sAhhNBNCzAKBvAPiFwmfELkUOokCQ/trI+SZy3hBywAJyoYHcw9JArXaFqJpRUe9MLscQDXN5HQd+4NjB0A8DHcPQxDBwTAgDCxAmBl4oE3FINinjW7qheUruOumtjmgPPXTE/I9K/DkKZPOH6srFwZq+QDV/yBX+RJy/ygiclpwKUbfxL5Tu5RrNUavzvQ20eBxaMihHRTJ4p2yDeM9uTHUwRFKOX/TVLwFX5RK20fXeQDcB3im+deMRMSweALGfBbp/JdCj0Xxi3UX48xIMN6wSjNMEYlXuEXvBhXAJagOm+h7Sovj2fTTBaMXr0aSjMwP3fbdluKflMgybVEN3aFmA4sy347ZAoLstMJB1uPGA33JtRE3Xm4Nbbo9Yyou13NJ4VbuxeUnkqveOHouiK7EIzOO6NHh1dE/iQtc89VyFwIPfVK9YQgCJYBqGSnyPidpzqm5QnpmLCWFvqcFMfrm0qlgvvlZQUm8cvaxJrPLpRjy6wLByU9dxRSmKn6CtLFR3Rd5A/t56HS1/9224ovDKXHE/O3qQ/+zG8aWBfiKtPmjxwLR4d0Sn1i3enyVUSJ30srCJCPYcTk5zpHmb8xQ2Vl+AJXtp+WpPYdeKPa5ZUrjJMpoXhhqLbbqvbveMQlQU73sn3ZVN9lX34qr9fZMTCt07XhiBxANhEHtx7PhgpqRqyJN5bmB6ssSCI1O1nDmJ0rVOHdWlqYAkU59uc7zoXEAAOfWR4vq9Q5WqneE0Wq3Q0FJO6hdSz1ynobKxTm0U7dNMs5PYJCjk1KxYKX6WO9IMALcVOzAUyKdrRB5pgTmmuRiyppzTnRhAqo7btoitVVbrMna3xg3Bm2oup+fRvCvEnpZu5QYWiHxS0wEDNR0wkJBYqciaNJ5AUifSWOq/x1LX5OgUOk5Ity8PgO97LQshEng/L0SqvXsMPBwOpvcmBO+LWg2SiZDQMrs4Tl6FQInuz3xnIKeP5iovgLcLo9K4P5DEn8mRmTLEXqzt3hyaQ3qj0faDNPFNmjTmaz+S+icmc+pN7YVAMP6tjfNQrkcjIUzZ5fQL62uAfkH1Z4d+CThJJ4boN1TdsxLBopnY17f7yGaWOT9lP8i+YAb2TVZjYJDkK+bbuekxFp2QmwUomocevnppvQo94v9LcEpCnaOR5dgU/idjk/m9+G9oX71qUYbReBXl30s+Vf6dgXyi2f0WqlFG93szcPcP</diagram></mxfile>
|
BIN
misc/security.png
Normal file
BIN
misc/security.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 36 KiB |
1
misc/security.xml
Normal file
1
misc/security.xml
Normal file
@@ -0,0 +1 @@
|
||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0" version="6.5.8" editor="www.draw.io" type="device"><diagram name="Page-1">5Vxdd5s4EP01fmwOkgCbx9hp2j7sNrvpnnYfiVFsTjDyghwn++tXGMmAxileEB9O+9BjBhjM3GHmzjXKhCw2L58Sf7v+jQU0mmAreJmQmwnGU4eI/zPDa24gyM0NqyQMchMqDPfhv1QaLWndhQFNKwdyxiIebqvGJYtjuuQVm58kbF897JFF1atu/RUFhvulH0Hr9zDg69w6w25h/0zD1VpdGblevufBXz6tEraL5fUmmDwe/uW7N77yJW80XfsB25dM5OOELBLGeP5p87KgURZaFbb8vNs39h6/d0Jjfs4JOD/h2Y928tZvwyTlwnTP/YTLL8lfVWA4fRF+52u+iYQBiY8pT9gTXbCIJcISs1gcOX8Mo0gz+VG4isXmUnwzKuzzZ5rwUIT8Wu7YhEGQXWa+X4ec3m/9ZXbNvcivzCGL+b38Go7aztMGeWIb3rcMRXYV+lIyyTh8omxDefIqDpF7ySw/Q6asKxHaF/gjS9rWJewVkr5MudXRcRF28UFG/jQKBKDwVypipAe/FPUtC2N+uKIznzg3mYUmobhwFtoblvA1W7HYj+4KawcxQhgGyT0Vo5mBINkgSJ/9NB1hkDAiw0XJAVFaiyhdffk6wkDZ7oCBckGg2JbGh1uKs2b2drT0wvXAOGcbsYPGwXXWfDJbxJZPP4uSqK4ryiuZTYNKU4JhK4VFRSChkc/D52rbOhUW6e0uQ7pAwNOeZ1sLbMp2yZLKk8ptRPMjoNMc4aqj/HaBowNIxzs8C7cpwE2ckdLlLgm5uNPbMH5kvaLnDIYenmrPj9sQPuLUODIH3wzCNxVxFtdz/9llrGcexiEvtibkOiNwfpTS7KjpTVtsD085mQd+uqaBPE/slmRilm29hPyH+PzBurIcuf232LauCFH7S5XwxvpZpuQQVDKlyaPfMlNsy60AjK2mmYJrHJnLFA9kip8+ZfsP+WHdfe8+E856/kk/EOqsApOGECJS48gchGqcK2GYUm4Sw8vss7hpoT5GVDlyvM6wg6NhtdGyLQ9ZLAi4G2WF+kHMK+7qULK1gr4VBHTPkkAv6nrJt7b70iFGir1Kj/K4iC6vsWPPUGMHjgzmCxxiq/mS0jQVCfNGvvyvZOk1VxQdQFcWmlbowNRtRQfsMacc0XWNpikHHL2RcgIG/7V0mJxJWyYlFA306lSk5Rv5Jg94oq+mM66egDSqW31xSm16J9OmGTOrcWSwSEF5xMi43xGSA1FL0rTd6NQSODKIJNRvfmfJxodQvmPJGlfZoN2nZo2gEHMZorWDYJQ6UxkR1DsuRLXuN0xw2L8c2brXSGE4Ug+mW6vkHn6gdpqKIbpw7RDcVcc6JtpolGv11I1g3HAcQ+MGcGQQwBOKyBnaNU/E0XhROY4zvn2fGrfKqUZ1wrDK7TSWTXCNI4NJBWWTXOYejb6tiF7fU4jbVIHQpxDgyCB6UF/IZ4Xete3x9GK3aSnXxW3X7kzcPvHrfzdi5SAypVuVKV3itqros1EzhykyxByAoz6FylOvNbx7obI3XqANbNPG70nMahwZrFBQOBizUjkUSZjqM3VTkgAcGYQSihuXoZR5fQobBAobF6KU9RsmqCJcjlLWb6TguD6YUqaSe3h27plSyrzulDJS9ypB70qZeupGwHc9U0oZcGQQwPqf3dsoZflxFy6UkTZlwrBQ5pkSyoAjgzkFf7ovhLLbb1+/3XWfDGfVCnzubGyYCiPLlGAGPRmEESovZcXMCJAX2pqRZUo5Q1Z30hmpW4DRjXSWdYVDLzgcNcu64gVqaSrZRsotEDIlpkFPfapppH6VyftT03ojD/qqvebLjmZ1ngyWLSjCjFlPG4xEIFOCGvRkDky1TPHEy3+iSooiia2TPOLXeRVw5kqeVWoauKtXAW2oSY1U4LQ1noQ9G4SpuwXsGIRptAqnM2ScoPwzZolz0FBBouMvRTvwOT3WQJ2GywJZEHAzHLrgzIpB54wZ2a0Ys32iOaoHaQDGfHyd+rjQXWld7ZfMqwbaQb+E5Kc6s0mVzeDANsR6LNIy1fCJVDt3CUYXw5lWWWyvYaoRp85Tn8OZA8nbH39+WLCAts2YrtZTnVtuWg9Wem1pysXJTAPcsc8DvAmckPyNHM5z9ZbWo5UOgtvw+UWkzpNBOCFJ/ZKvzv7lJiqtPx8LV3l1lXpNp+VIJTaLv/mWo1b8XT3y8T8=</diagram></mxfile>
|
11
setup.py
11
setup.py
@@ -29,7 +29,13 @@ setup(
|
||||
keywords=['docker', 'home-assistant', 'api'],
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
packages=['hassio', 'hassio.dock', 'hassio.api', 'hassio.addons'],
|
||||
packages=[
|
||||
'hassio',
|
||||
'hassio.dock',
|
||||
'hassio.api',
|
||||
'hassio.addons',
|
||||
'hassio.snapshots'
|
||||
],
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'async_timeout',
|
||||
@@ -38,5 +44,8 @@ setup(
|
||||
'colorlog',
|
||||
'voluptuous',
|
||||
'gitpython',
|
||||
'pyotp',
|
||||
'pyqrcode',
|
||||
'pytz'
|
||||
]
|
||||
)
|
||||
|
10
version.json
10
version.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"hassio_tag": "0.12",
|
||||
"homeassistant_tag": "0.43",
|
||||
"resinos_version": "0.4",
|
||||
"resinhup_version": "0.1",
|
||||
"generic_hc_version": "0.1"
|
||||
"hassio": "0.41",
|
||||
"homeassistant": "0.48.1",
|
||||
"resinos": "0.8",
|
||||
"resinhup": "0.1",
|
||||
"generic": "0.3"
|
||||
}
|
||||
|
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"hassio_tag": "0.13",
|
||||
"homeassistant_tag": "0.43",
|
||||
"resinos_version": "0.4",
|
||||
"resinhup_version": "0.1",
|
||||
"generic_hc_version": "0.1"
|
||||
}
|
Reference in New Issue
Block a user