mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-11 18:19:21 +00:00
Compare commits
416 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
087b082a6b | ||
![]() |
0b85209eae | ||
![]() |
d81bc7de46 | ||
![]() |
e3a99b9f89 | ||
![]() |
5d319b37ea | ||
![]() |
9f25606986 | ||
![]() |
ecd12732ee | ||
![]() |
85fbde8e36 | ||
![]() |
6e6c2c3efb | ||
![]() |
0d4a808449 | ||
![]() |
087f746647 | ||
![]() |
640d66ad1a | ||
![]() |
f5f5ed83af | ||
![]() |
95f01a1161 | ||
![]() |
b84e7e7d94 | ||
![]() |
5d7018f3f0 | ||
![]() |
d87a85ceb5 | ||
![]() |
9ab6e80b6f | ||
![]() |
78e91e859e | ||
![]() |
9eee8eade6 | ||
![]() |
124ce0b8b7 | ||
![]() |
00e7d96472 | ||
![]() |
398815efd8 | ||
![]() |
bdc2bdcf56 | ||
![]() |
68eafb0a7d | ||
![]() |
7ca2fd7193 | ||
![]() |
ec823edd8f | ||
![]() |
858c7a1fa7 | ||
![]() |
6ac45a24fc | ||
![]() |
9430b39042 | ||
![]() |
ae7466ccfe | ||
![]() |
2c17fe5da8 | ||
![]() |
a0fb91af29 | ||
![]() |
f626e31fd3 | ||
![]() |
0151a149fd | ||
![]() |
9dea93142b | ||
![]() |
7f878bfac0 | ||
![]() |
ebe9ae2341 | ||
![]() |
e777bbd024 | ||
![]() |
2116d56124 | ||
![]() |
0b6a82b018 | ||
![]() |
b4ea28af4e | ||
![]() |
22f59712df | ||
![]() |
efe95f7bab | ||
![]() |
200c68f67f | ||
![]() |
dcefec7b99 | ||
![]() |
5db798bcf8 | ||
![]() |
70005296cc | ||
![]() |
f2bf8dea93 | ||
![]() |
fee858c956 | ||
![]() |
e3ae48c8ff | ||
![]() |
fa9e20385e | ||
![]() |
f51c9704e0 | ||
![]() |
57c58d81c0 | ||
![]() |
1ec1082068 | ||
![]() |
35b7c2269c | ||
![]() |
cc3e6ec6fd | ||
![]() |
4df42e054d | ||
![]() |
1b481e0b37 | ||
![]() |
3aa4cdf540 | ||
![]() |
029f277945 | ||
![]() |
e7e0b9adda | ||
![]() |
5fbff75da8 | ||
![]() |
58299a0389 | ||
![]() |
1151d7e17b | ||
![]() |
b56ed547e3 | ||
![]() |
a71ebba940 | ||
![]() |
4fcb516c75 | ||
![]() |
22142d32d2 | ||
![]() |
21194f1411 | ||
![]() |
09df046fa8 | ||
![]() |
63d3889d5c | ||
![]() |
0ffc0559e2 | ||
![]() |
78118a502c | ||
![]() |
946cc3d618 | ||
![]() |
c40a3f18e9 | ||
![]() |
f01945bf8c | ||
![]() |
0f72db45f9 | ||
![]() |
83510341b6 | ||
![]() |
70dd6593e4 | ||
![]() |
60ba2db561 | ||
![]() |
5820d16419 | ||
![]() |
9f9ff0d1ad | ||
![]() |
806161e3ac | ||
![]() |
44ae9c7b63 | ||
![]() |
75d24ba534 | ||
![]() |
13243cd02c | ||
![]() |
411fad8a45 | ||
![]() |
5fe9d63c79 | ||
![]() |
33095f8792 | ||
![]() |
0253722369 | ||
![]() |
495c45564a | ||
![]() |
8517b43e85 | ||
![]() |
033ea4e7dc | ||
![]() |
a0c9e5ad26 | ||
![]() |
408d6eafcc | ||
![]() |
054e357483 | ||
![]() |
cb520bff23 | ||
![]() |
024ebe0026 | ||
![]() |
7b62e2f07b | ||
![]() |
7d52b3ba01 | ||
![]() |
46caa23319 | ||
![]() |
9aa5eda2c8 | ||
![]() |
f48182a69c | ||
![]() |
788f883490 | ||
![]() |
e84e82d018 | ||
![]() |
20e73796b8 | ||
![]() |
7769d6fff1 | ||
![]() |
561e80c2be | ||
![]() |
96f47a4c32 | ||
![]() |
7482d6dd45 | ||
![]() |
aea31ee6dd | ||
![]() |
de43965ecb | ||
![]() |
baa61c6aa0 | ||
![]() |
cb22dafb3c | ||
![]() |
ea26784c3e | ||
![]() |
72332ed40f | ||
![]() |
46f2bf16a8 | ||
![]() |
e2725f8033 | ||
![]() |
9084ac119f | ||
![]() |
41943ba61a | ||
![]() |
33794669a1 | ||
![]() |
fe155a4ff0 | ||
![]() |
124e487ef7 | ||
![]() |
f361916a60 | ||
![]() |
20afa1544b | ||
![]() |
c08d5af4db | ||
![]() |
dc341c8af8 | ||
![]() |
2507b52adb | ||
![]() |
1302708135 | ||
![]() |
1314812f92 | ||
![]() |
f739e3ed11 | ||
![]() |
abb526fc0f | ||
![]() |
efb1a24b8f | ||
![]() |
bc0835963d | ||
![]() |
316190dff8 | ||
![]() |
029ead0c7c | ||
![]() |
a85172f30b | ||
![]() |
dfe2532813 | ||
![]() |
cf3bb23629 | ||
![]() |
2132042aca | ||
![]() |
19e448fc54 | ||
![]() |
a4e0fb8e99 | ||
![]() |
5b72e2887e | ||
![]() |
d2b6ec1b7e | ||
![]() |
4b541a23c4 | ||
![]() |
99869449ae | ||
![]() |
eab73f3895 | ||
![]() |
9e96615ffa | ||
![]() |
350010feb5 | ||
![]() |
7395e4620b | ||
![]() |
7d91ae4513 | ||
![]() |
343f759983 | ||
![]() |
24ee3f8cc0 | ||
![]() |
c143eadb62 | ||
![]() |
e7df38f4d1 | ||
![]() |
3e42318ac8 | ||
![]() |
c6e5d2932e | ||
![]() |
1aaf21a350 | ||
![]() |
f185eece8a | ||
![]() |
9d951280ef | ||
![]() |
3f598bafc0 | ||
![]() |
cddd859f56 | ||
![]() |
e7adf50ec1 | ||
![]() |
ac437f809a | ||
![]() |
f13dee9b9d | ||
![]() |
00855c0909 | ||
![]() |
1fafed5a07 | ||
![]() |
7adb81b350 | ||
![]() |
4647035b00 | ||
![]() |
8ad7344e02 | ||
![]() |
f1c46b3385 | ||
![]() |
7f84073b12 | ||
![]() |
e383a11bb7 | ||
![]() |
cc113e2251 | ||
![]() |
c5a3830c7d | ||
![]() |
a2abadc970 | ||
![]() |
db444b89d3 | ||
![]() |
77881e8a58 | ||
![]() |
0b15f88da3 | ||
![]() |
7c6bf96f6f | ||
![]() |
dc77e2d8d9 | ||
![]() |
68824fab4f | ||
![]() |
d6b3a36714 | ||
![]() |
8ab1f703c7 | ||
![]() |
95a4e292aa | ||
![]() |
3b9252558f | ||
![]() |
4a324dccc6 | ||
![]() |
8fffb0f8b5 | ||
![]() |
87adfce211 | ||
![]() |
297813f6e6 | ||
![]() |
362315852a | ||
![]() |
d221f36cf8 | ||
![]() |
9e18589b6b | ||
![]() |
c4d09210e1 | ||
![]() |
43797c5eb5 | ||
![]() |
fe38fe94dc | ||
![]() |
f185291eca | ||
![]() |
7541ae6476 | ||
![]() |
d94715be2b | ||
![]() |
99cc5972c8 | ||
![]() |
3d101a24a1 | ||
![]() |
2ed3ddf05b | ||
![]() |
10b3658bd7 | ||
![]() |
9f5903089e | ||
![]() |
0593885ed4 | ||
![]() |
3efbe11d49 | ||
![]() |
1c2e0e5749 | ||
![]() |
f64da6a547 | ||
![]() |
94fba7e175 | ||
![]() |
a59245e6bb | ||
![]() |
217c1acc62 | ||
![]() |
2c0a68bd8f | ||
![]() |
e37ffd6107 | ||
![]() |
3bde598fa7 | ||
![]() |
53f42ff934 | ||
![]() |
9041eb9e9a | ||
![]() |
70ac395232 | ||
![]() |
82f68b4a7b | ||
![]() |
2b2f3214e9 | ||
![]() |
1c0d63a02e | ||
![]() |
de77215630 | ||
![]() |
f300b843c1 | ||
![]() |
0bb81136bb | ||
![]() |
2a81ced817 | ||
![]() |
7363951a9a | ||
![]() |
6f770b78af | ||
![]() |
10219a348f | ||
![]() |
23d1013cfa | ||
![]() |
05980d4147 | ||
![]() |
e5e25c895f | ||
![]() |
b486883ff6 | ||
![]() |
42dd4d9557 | ||
![]() |
7dff9e09a7 | ||
![]() |
c315b026a3 | ||
![]() |
a4ba4c80e8 | ||
![]() |
ccd48b63a2 | ||
![]() |
6d5f70ced6 | ||
![]() |
ccffb4b786 | ||
![]() |
68dbbe212c | ||
![]() |
5df869e08a | ||
![]() |
63b9e023b4 | ||
![]() |
8f357739ec | ||
![]() |
808fc0f8b6 | ||
![]() |
1a6f6085e6 | ||
![]() |
0de3e9a233 | ||
![]() |
f1237f124f | ||
![]() |
69142b6fb0 | ||
![]() |
28f295a1e2 | ||
![]() |
55c2127baa | ||
![]() |
265c36b345 | ||
![]() |
9f081fe32f | ||
![]() |
e4fb6ad727 | ||
![]() |
1040a1624a | ||
![]() |
a2ee2852a0 | ||
![]() |
b2e3b726d9 | ||
![]() |
0f4e557552 | ||
![]() |
2efa9f9483 | ||
![]() |
43e6ca8f4a | ||
![]() |
34d67a7bcd | ||
![]() |
5a6051f9a1 | ||
![]() |
157e48f946 | ||
![]() |
9469a258ff | ||
![]() |
fd0aeb5341 | ||
![]() |
4d4a4ce043 | ||
![]() |
678f77cc05 | ||
![]() |
6c30248389 | ||
![]() |
fda7c1cf11 | ||
![]() |
364e5ec0b8 | ||
![]() |
947bf7799c | ||
![]() |
e22836d706 | ||
![]() |
6c8fcbfb80 | ||
![]() |
f1fe1877fe | ||
![]() |
3c0831c8eb | ||
![]() |
35b3f364c9 | ||
![]() |
c4299b51cd | ||
![]() |
31caed20fa | ||
![]() |
41fed656c1 | ||
![]() |
c5ee2ebc49 | ||
![]() |
743a218219 | ||
![]() |
093ef17fb7 | ||
![]() |
a41912be0a | ||
![]() |
5becd51b50 | ||
![]() |
ef7a375396 | ||
![]() |
19879e3287 | ||
![]() |
d1c4f342fc | ||
![]() |
2f62b7046c | ||
![]() |
0cca8f522b | ||
![]() |
39decec001 | ||
![]() |
3489db2768 | ||
![]() |
3382688669 | ||
![]() |
cf00ce7d78 | ||
![]() |
2c714aa003 | ||
![]() |
1e7858bf06 | ||
![]() |
4e428c2e41 | ||
![]() |
b95ab3e95a | ||
![]() |
0dd7f8fbaa | ||
![]() |
a2789ac540 | ||
![]() |
a785e10a3f | ||
![]() |
10dad5a209 | ||
![]() |
9327b24d44 | ||
![]() |
7d02bb2fe9 | ||
![]() |
a2d3ee0d67 | ||
![]() |
d29fab69e8 | ||
![]() |
6205f40298 | ||
![]() |
6b169f3f17 | ||
![]() |
0d4a5a7ffb | ||
![]() |
dac90d29dd | ||
![]() |
7e815633e7 | ||
![]() |
f062f31ca2 | ||
![]() |
1374f90433 | ||
![]() |
b692b19a4d | ||
![]() |
92d5b14cf5 | ||
![]() |
6a84829c16 | ||
![]() |
7036ecbd0a | ||
![]() |
19b5059972 | ||
![]() |
cebc377fa7 | ||
![]() |
d36c3919d7 | ||
![]() |
0684427373 | ||
![]() |
8ff79e85bf | ||
![]() |
ee4b28a490 | ||
![]() |
fddd5b8860 | ||
![]() |
72279072ac | ||
![]() |
0b70448273 | ||
![]() |
4eb24fcbc5 | ||
![]() |
06edf59d14 | ||
![]() |
36ca851bc2 | ||
![]() |
a4e453bf83 | ||
![]() |
d211eec66f | ||
![]() |
db8540d4ab | ||
![]() |
30e270e7c0 | ||
![]() |
9734307551 | ||
![]() |
c650f8d1e1 | ||
![]() |
10005898f8 | ||
![]() |
716389e0c1 | ||
![]() |
658729feb5 | ||
![]() |
ae7808eb2a | ||
![]() |
d8e0e9e0b0 | ||
![]() |
a860a3c122 | ||
![]() |
fe60d526b9 | ||
![]() |
769904778f | ||
![]() |
a3a40c79d6 | ||
![]() |
b44f613136 | ||
![]() |
801be9c60b | ||
![]() |
b6db6a1287 | ||
![]() |
4181174bcc | ||
![]() |
3be46e6011 | ||
![]() |
98b93efc5c | ||
![]() |
6156019c2f | ||
![]() |
80d60148a9 | ||
![]() |
8baf59a608 | ||
![]() |
b546365aaa | ||
![]() |
0a68698912 | ||
![]() |
45288a2491 | ||
![]() |
f34a175e4f | ||
![]() |
6e7e145822 | ||
![]() |
9abebe2d5d | ||
![]() |
b0c5884c3f | ||
![]() |
a79e6a8eea | ||
![]() |
c1f1aed9ca | ||
![]() |
65b0e17b5b | ||
![]() |
6947131b47 | ||
![]() |
914dd53da0 | ||
![]() |
58616ef686 | ||
![]() |
563e0c1e0e | ||
![]() |
437070fd7a | ||
![]() |
baa9cf451c | ||
![]() |
c2918d4519 | ||
![]() |
1efdcd4691 | ||
![]() |
2a43087ed7 | ||
![]() |
5716324934 | ||
![]() |
ae267e0380 | ||
![]() |
3918a2a228 | ||
![]() |
e375fc36d3 | ||
![]() |
f5e29b4651 | ||
![]() |
524d875516 | ||
![]() |
60bdc00ce9 | ||
![]() |
073166190f | ||
![]() |
b80e4d7d70 | ||
![]() |
cc434e27cf | ||
![]() |
8377e04b62 | ||
![]() |
0a47fb9c83 | ||
![]() |
a5d3c850e9 | ||
![]() |
d6391f62be | ||
![]() |
c6f302e448 | ||
![]() |
9706022c21 | ||
![]() |
1d858f4920 | ||
![]() |
e09ba30d46 | ||
![]() |
38ec3d14ed | ||
![]() |
8ee9380cc7 | ||
![]() |
6e74e4c008 | ||
![]() |
5ebc58851b | ||
![]() |
16b09bbfc5 | ||
![]() |
d4b5fc79f4 | ||
![]() |
e51c044ccd | ||
![]() |
d3b1ba81f7 | ||
![]() |
26f55f02c0 | ||
![]() |
8050707ff9 | ||
![]() |
46252030cf | ||
![]() |
681fa835ef | ||
![]() |
d6560eb976 | ||
![]() |
3770b307af | ||
![]() |
0dacbb31be | ||
![]() |
bbdbd756a7 | ||
![]() |
508e38e622 | ||
![]() |
ffe45d0d02 | ||
![]() |
9206d1acf8 | ||
![]() |
da867ef8ef | ||
![]() |
4826201e51 | ||
![]() |
463c97f9e7 | ||
![]() |
3983928c6c | ||
![]() |
15e626027f | ||
![]() |
d46810752e | ||
![]() |
3d10b502a0 | ||
![]() |
433c5cef3b | ||
![]() |
697caf553a |
4
.github/release-drafter.yml
vendored
Normal file
4
.github/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
$CHANGES
|
12
.travis.yml
12
.travis.yml
@@ -1,12 +1,6 @@
|
||||
sudo: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.6"
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
sudo: true
|
||||
dist: xenial
|
||||
install: pip install -U tox
|
||||
language: python
|
||||
python: 3.7
|
||||
script: tox
|
||||
|
169
API.md
169
API.md
@@ -36,7 +36,7 @@ The addons from `addons` are only installed one.
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"beta_channel": "true|false",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons": [
|
||||
@@ -72,7 +72,7 @@ Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"beta_channel": "true|false",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons_repositories": [
|
||||
@@ -217,22 +217,22 @@ return:
|
||||
### Host
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
- POST `/host/shutdown`
|
||||
|
||||
- POST `/host/reboot`
|
||||
|
||||
- GET `/host/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "",
|
||||
"version": "",
|
||||
"last_version": "",
|
||||
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
|
||||
"hostname": "",
|
||||
"os": "",
|
||||
"audio": {
|
||||
"input": "0,0",
|
||||
"output": "0,0"
|
||||
}
|
||||
"hostname": "hostname|null",
|
||||
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
||||
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
||||
"kernel": "4.15.7|null",
|
||||
"chassis": "specific|null",
|
||||
"deployment": "stable|beta|dev|null",
|
||||
"cpe": "xy|null",
|
||||
}
|
||||
```
|
||||
|
||||
@@ -240,22 +240,67 @@ return:
|
||||
|
||||
```json
|
||||
{
|
||||
"audio_input": "0,0",
|
||||
"audio_output": "0,0"
|
||||
"hostname": "",
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/update`
|
||||
- POST `/host/reload`
|
||||
|
||||
Optional:
|
||||
#### Services
|
||||
|
||||
- GET `/host/services`
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
"services": [
|
||||
{
|
||||
"name": "xy.service",
|
||||
"description": "XY ...",
|
||||
"state": "active|"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/host/hardware`
|
||||
- POST `/host/service/{unit}/stop`
|
||||
|
||||
- POST `/host/service/{unit}/start`
|
||||
|
||||
- POST `/host/service/{unit}/reload`
|
||||
|
||||
### HassOS
|
||||
|
||||
- GET `/hassos/info`
|
||||
```json
|
||||
{
|
||||
"version": "2.3",
|
||||
"version_cli": "7",
|
||||
"version_latest": "2.4",
|
||||
"version_cli_latest": "8",
|
||||
"board": "ova|rpi"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update`
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update/cli`
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/config/sync`
|
||||
|
||||
Load host configs from a USB stick.
|
||||
|
||||
### Hardware
|
||||
|
||||
- GET `/hardware/info`
|
||||
```json
|
||||
{
|
||||
"serial": ["/dev/xy"],
|
||||
@@ -274,23 +319,18 @@ Optional:
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
### Network
|
||||
|
||||
- GET `/network/info`
|
||||
|
||||
- GET `/hardware/audio`
|
||||
```json
|
||||
{
|
||||
"hostname": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/network/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": "",
|
||||
"audio": {
|
||||
"input": {
|
||||
"0,0": "Mic"
|
||||
},
|
||||
"output": {
|
||||
"1,0": "Jack",
|
||||
"1,1": "HDMI"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -302,6 +342,7 @@ Optional:
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"machine": "Image machine type",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
@@ -340,6 +381,7 @@ Output is the raw Docker log.
|
||||
"port": "port for access hass",
|
||||
"ssl": "bool",
|
||||
"password": "",
|
||||
"refresh_token": "",
|
||||
"watchdog": "bool",
|
||||
"startup_time": 600
|
||||
}
|
||||
@@ -410,6 +452,7 @@ Get all available addons.
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xdssd_xybla",
|
||||
"description": "description",
|
||||
"long_description": "null|markdown",
|
||||
"auto_update": "bool",
|
||||
@@ -427,6 +470,7 @@ Get all available addons.
|
||||
"host_ipc": "bool",
|
||||
"host_dbus": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"apparmor": "disable|default|profile",
|
||||
"devices": ["/dev/xy"],
|
||||
"auto_uart": "bool",
|
||||
"icon": "bool",
|
||||
@@ -434,9 +478,14 @@ Get all available addons.
|
||||
"changelog": "bool",
|
||||
"hassio_api": "bool",
|
||||
"homeassistant_api": "bool",
|
||||
"full_access": "bool",
|
||||
"protected": "bool",
|
||||
"rating": "1-6",
|
||||
"stdin": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"gpio": "bool",
|
||||
"devicetree": "bool",
|
||||
"docker_api": "bool",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0",
|
||||
@@ -461,6 +510,7 @@ Get all available addons.
|
||||
"CONTAINER": "port|[ip, port]"
|
||||
},
|
||||
"options": {},
|
||||
"protected": "bool",
|
||||
"audio_output": "null|0,0",
|
||||
"audio_input": "null|0,0"
|
||||
}
|
||||
@@ -564,17 +614,9 @@ return:
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/services/xy`
|
||||
```json
|
||||
{
|
||||
"available": "bool",
|
||||
"xy": {}
|
||||
}
|
||||
```
|
||||
|
||||
#### MQTT
|
||||
|
||||
This service perform a auto discovery to Home-Assistant.
|
||||
This service performs an auto discovery to Home-Assistant.
|
||||
|
||||
- GET `/services/mqtt`
|
||||
```json
|
||||
@@ -602,46 +644,3 @@ This service perform a auto discovery to Home-Assistant.
|
||||
```
|
||||
|
||||
- DEL `/services/mqtt`
|
||||
|
||||
## Host Control
|
||||
|
||||
Communicate over UNIX socket with a host daemon.
|
||||
|
||||
- commands
|
||||
|
||||
```
|
||||
# info
|
||||
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
||||
# reboot
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
|
||||
# hostname xy
|
||||
|
||||
# network info
|
||||
-> {}
|
||||
# network wlan ssd xy
|
||||
# network wlan password xy
|
||||
# network int ip xy
|
||||
# network int netmask xy
|
||||
# network int route xy
|
||||
```
|
||||
|
||||
Features:
|
||||
|
||||
- shutdown
|
||||
- reboot
|
||||
- update
|
||||
- hostname
|
||||
- network_info
|
||||
- network_control
|
||||
|
||||
Answer:
|
||||
```
|
||||
{}|OK|ERROR|WRONG
|
||||
```
|
||||
|
||||
- {}: json
|
||||
- OK: call was successfully
|
||||
- ERROR: error on call
|
||||
- WRONG: not supported
|
||||
|
28
Dockerfile
28
Dockerfile
@@ -1,24 +1,22 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
# Add env
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# Setup base
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
git \
|
||||
socat \
|
||||
libstdc++ \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
libstdc++ \
|
||||
eudev-libs
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt /usr/src/
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
python3-dev \
|
||||
g++ \
|
||||
&& pip3 install --no-cache-dir \
|
||||
uvloop==0.9.1 \
|
||||
cchardet==2.1.1 \
|
||||
pycryptodome==3.4.11 \
|
||||
&& apk del .build-dependencies
|
||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -f /usr/src/requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . /usr/src/hassio
|
||||
|
@@ -4,8 +4,7 @@ from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import hassio.bootstrap as bootstrap
|
||||
import hassio.core as core
|
||||
from hassio import bootstrap
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,14 +33,13 @@ if __name__ == "__main__":
|
||||
|
||||
_LOGGER.info("Initialize Hassio setup")
|
||||
coresys = bootstrap.initialize_coresys(loop)
|
||||
hassio = core.HassIO(coresys)
|
||||
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
_LOGGER.info("Setup HassIO")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
|
||||
try:
|
||||
@@ -49,7 +47,7 @@ if __name__ == "__main__":
|
||||
loop.run_forever()
|
||||
finally:
|
||||
_LOGGER.info("Stopping HassIO")
|
||||
loop.run_until_complete(hassio.stop())
|
||||
loop.run_until_complete(coresys.core.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
|
@@ -5,7 +5,7 @@ import logging
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import AddonsData
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -40,11 +40,11 @@ class AddonManager(CoreSysAttributes):
|
||||
return list(self.repositories_obj.values())
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return a add-on from slug."""
|
||||
"""Return an add-on from slug."""
|
||||
return self.addons_obj.get(addon_slug)
|
||||
|
||||
def from_uuid(self, uuid):
|
||||
"""Return a add-on from uuid."""
|
||||
"""Return an add-on from uuid."""
|
||||
for addon in self.list_addons:
|
||||
if addon.is_installed and uuid == addon.uuid:
|
||||
return addon
|
||||
@@ -56,7 +56,7 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
# init hassio built-in repositories
|
||||
repositories = \
|
||||
set(self._config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# init custom repositories & load addons
|
||||
await self.load_repositories(repositories)
|
||||
@@ -66,7 +66,7 @@ class AddonManager(CoreSysAttributes):
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories_obj.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
@@ -90,16 +90,16 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self._config.add_addon_repository(url)
|
||||
self.sys_config.add_addon_repository(url)
|
||||
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories_obj.pop(url).remove()
|
||||
self._config.drop_addon_repository(url)
|
||||
self.sys_config.drop_addon_repository(url)
|
||||
|
||||
# update data
|
||||
self.data.reload()
|
||||
@@ -125,13 +125,13 @@ class AddonManager(CoreSysAttributes):
|
||||
self.addons_obj[addon_slug] = addon
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons_obj.pop(addon_slug)
|
||||
|
||||
async def auto_boot(self, stage):
|
||||
async def boot(self, stage):
|
||||
"""Boot addons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
@@ -141,5 +141,18 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.sleep(self._config.wait_boot, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage):
|
||||
"""Shutdown addons."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and \
|
||||
await addon.state() == STATE_STARTED and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
_LOGGER.info("Shutdown %s stop %d addons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Init file for HassIO addons."""
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
import json
|
||||
@@ -13,7 +14,7 @@ from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import (
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME, RE_SERVICE)
|
||||
from .utils import check_installed
|
||||
from .utils import check_installed, remove_data
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||
@@ -23,10 +24,15 @@ from ..const import (
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES)
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES,
|
||||
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, ATTR_FULL_ACCESS,
|
||||
ATTR_PROTECTED,
|
||||
SECURITY_PROFILE, SECURITY_DISABLE, SECURITY_DEFAULT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..utils.json import write_json_file, read_json_file
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..exceptions import HostAppArmorError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -63,11 +69,11 @@ class Addon(CoreSysAttributes):
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return addons data storage."""
|
||||
return self._addons.data
|
||||
return self.sys_addons.data
|
||||
|
||||
@property
|
||||
def is_installed(self):
|
||||
"""Return True if a addon is installed."""
|
||||
"""Return True if an addon is installed."""
|
||||
return self._id in self._data.system
|
||||
|
||||
@property
|
||||
@@ -161,7 +167,7 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
"""Return a API token for this add-on."""
|
||||
"""Return an API token for this add-on."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id][ATTR_UUID]
|
||||
return None
|
||||
@@ -196,6 +202,18 @@ class Addon(CoreSysAttributes):
|
||||
return self._data.cache[self._id][ATTR_VERSION]
|
||||
return self.version_installed
|
||||
|
||||
@property
|
||||
def protected(self):
|
||||
"""Return if addon is in protected mode."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id][ATTR_PROTECTED]
|
||||
return True
|
||||
|
||||
@protected.setter
|
||||
def protected(self, value):
|
||||
"""Set addon in protected mode."""
|
||||
self._data.user[self._id][ATTR_PROTECTED] = value
|
||||
|
||||
@property
|
||||
def startup(self):
|
||||
"""Return startup type of addon."""
|
||||
@@ -316,11 +334,25 @@ class Addon(CoreSysAttributes):
|
||||
"""Return list of privilege."""
|
||||
return self._mesh.get(ATTR_PRIVILEGED)
|
||||
|
||||
@property
|
||||
def apparmor(self):
|
||||
"""Return True if apparmor is enabled."""
|
||||
if not self._mesh.get(ATTR_APPARMOR):
|
||||
return SECURITY_DISABLE
|
||||
elif self.sys_host.apparmor.exists(self.slug):
|
||||
return SECURITY_PROFILE
|
||||
return SECURITY_DEFAULT
|
||||
|
||||
@property
|
||||
def legacy(self):
|
||||
"""Return if the add-on don't support hass labels."""
|
||||
return self._mesh.get(ATTR_LEGACY)
|
||||
|
||||
@property
|
||||
def access_docker_api(self):
|
||||
"""Return if the add-on need read-only docker API access."""
|
||||
return self._mesh.get(ATTR_DOCKER_API)
|
||||
|
||||
@property
|
||||
def access_hassio_api(self):
|
||||
"""Return True if the add-on access to hassio api."""
|
||||
@@ -341,6 +373,16 @@ class Addon(CoreSysAttributes):
|
||||
"""Return True if the add-on access to gpio interface."""
|
||||
return self._mesh[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_full_access(self):
|
||||
"""Return True if the add-on want full access to hardware."""
|
||||
return self._mesh[ATTR_FULL_ACCESS]
|
||||
|
||||
@property
|
||||
def with_devicetree(self):
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self._mesh[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def with_audio(self):
|
||||
"""Return True if the add-on access to audio."""
|
||||
@@ -352,15 +394,14 @@ class Addon(CoreSysAttributes):
|
||||
if not self.with_audio:
|
||||
return None
|
||||
|
||||
setting = self._config.audio_output
|
||||
if self.is_installed and \
|
||||
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
|
||||
setting = self._data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
return setting
|
||||
return self._data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
return self.sys_host.alsa.default.output
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set/remove custom audio output settings."""
|
||||
"""Set/reset audio output settings."""
|
||||
if value is None:
|
||||
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||
else:
|
||||
@@ -372,14 +413,13 @@ class Addon(CoreSysAttributes):
|
||||
if not self.with_audio:
|
||||
return None
|
||||
|
||||
setting = self._config.audio_input
|
||||
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
|
||||
setting = self._data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
return setting
|
||||
return self._data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
return self.sys_host.alsa.default.input
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set/remove custom audio input settings."""
|
||||
"""Set/reset audio input settings."""
|
||||
if value is None:
|
||||
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||
else:
|
||||
@@ -392,7 +432,7 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def with_icon(self):
|
||||
"""Return True if a icon exists."""
|
||||
"""Return True if an icon exists."""
|
||||
return self.path_icon.exists()
|
||||
|
||||
@property
|
||||
@@ -417,11 +457,11 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
# Repository with dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
return addon_data[ATTR_IMAGE].format(arch=self._arch)
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.sys_arch)
|
||||
|
||||
# local build
|
||||
return "{}/{}-addon-{}".format(
|
||||
addon_data[ATTR_REPOSITORY], self._arch,
|
||||
addon_data[ATTR_REPOSITORY], self.sys_arch,
|
||||
addon_data[ATTR_SLUG])
|
||||
|
||||
@property
|
||||
@@ -442,12 +482,12 @@ class Addon(CoreSysAttributes):
|
||||
@property
|
||||
def path_data(self):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return Path(self._config.path_addons_data, self._id)
|
||||
return Path(self.sys_config.path_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
"""Return addon data path external for docker."""
|
||||
return PurePath(self._config.path_extern_addons_data, self._id)
|
||||
return PurePath(self.sys_config.path_extern_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_options(self):
|
||||
@@ -474,9 +514,24 @@ class Addon(CoreSysAttributes):
|
||||
"""Return path to addon changelog."""
|
||||
return Path(self.path_location, 'CHANGELOG.md')
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return path to custom AppArmor profile."""
|
||||
return Path(self.path_location, 'apparmor.txt')
|
||||
|
||||
@property
|
||||
def path_asound(self):
|
||||
"""Return path to asound config."""
|
||||
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
|
||||
|
||||
@property
|
||||
def path_extern_asound(self):
|
||||
"""Return path to asound config for docker."""
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
|
||||
|
||||
def save_data(self):
|
||||
"""Save data of addon."""
|
||||
self._addons.data.save_data()
|
||||
self.sys_addons.data.save_data()
|
||||
|
||||
def write_options(self):
|
||||
"""Return True if addon options is written to data."""
|
||||
@@ -496,6 +551,41 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
return False
|
||||
|
||||
def write_asound(self):
|
||||
"""Write asound config to file and return True on success."""
|
||||
asound_config = self.sys_host.alsa.asound(
|
||||
alsa_input=self.audio_input, alsa_output=self.audio_output)
|
||||
|
||||
try:
|
||||
with self.path_asound.open('w') as config_file:
|
||||
config_file.write(asound_config)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Addon %s can't write asound: %s", self._id, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def _install_apparmor(self):
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||
exists_addon = self.path_apparmor.exists()
|
||||
|
||||
# Nothing to do
|
||||
if not exists_local and not exists_addon:
|
||||
return
|
||||
|
||||
# Need removed
|
||||
if exists_local and not exists_addon:
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
return
|
||||
|
||||
# Need install/update
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
|
||||
profile_file = Path(tmp_folder, 'apparmor.txt')
|
||||
|
||||
adjust_profile(self.slug, self.path_apparmor, profile_file)
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Create a schema for addon options."""
|
||||
@@ -536,10 +626,10 @@ class Addon(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
async def install(self):
|
||||
"""Install a addon."""
|
||||
if self._arch not in self.supported_arch:
|
||||
"""Install an addon."""
|
||||
if self.sys_arch not in self.supported_arch:
|
||||
_LOGGER.error(
|
||||
"Addon %s not supported on %s", self._id, self._arch)
|
||||
"Addon %s not supported on %s", self._id, self.sys_arch)
|
||||
return False
|
||||
|
||||
if self.is_installed:
|
||||
@@ -551,6 +641,9 @@ class Addon(CoreSysAttributes):
|
||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||
self.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
|
||||
if not await self.instance.install(self.last_version):
|
||||
return False
|
||||
|
||||
@@ -559,14 +652,24 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
"""Remove a addon."""
|
||||
"""Remove an addon."""
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Remove Home-Assistant addon data folder %s", self.path_data)
|
||||
shutil.rmtree(str(self.path_data))
|
||||
await remove_data(self.path_data)
|
||||
|
||||
# Cleanup audio settings
|
||||
if self.path_asound.exists():
|
||||
with suppress(OSError):
|
||||
self.path_asound.unlink()
|
||||
|
||||
# Cleanup apparmor profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
with suppress(HostAppArmorError):
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
|
||||
self._set_uninstall()
|
||||
return True
|
||||
@@ -583,9 +686,14 @@ class Addon(CoreSysAttributes):
|
||||
@check_installed
|
||||
async def start(self):
|
||||
"""Set options and start addon."""
|
||||
# Options
|
||||
if not self.write_options():
|
||||
return False
|
||||
|
||||
# Sound
|
||||
if self.with_audio and not self.write_asound():
|
||||
return False
|
||||
|
||||
return await self.instance.run()
|
||||
|
||||
@check_installed
|
||||
@@ -609,6 +717,9 @@ class Addon(CoreSysAttributes):
|
||||
return False
|
||||
self._set_update(self.last_version)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.start()
|
||||
@@ -671,11 +782,11 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
"""Snapshot a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
||||
"""Snapshot state of an addon."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.instance.export_image(Path(temp, "image.tar")):
|
||||
self.instance.export_image(Path(temp, 'image.tar')):
|
||||
return False
|
||||
|
||||
data = {
|
||||
@@ -687,11 +798,20 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
# store local configs/state
|
||||
try:
|
||||
write_json_file(Path(temp, "addon.json"), data)
|
||||
write_json_file(Path(temp, 'addon.json'), data)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't save meta for %s: %s", self._id, err)
|
||||
return False
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
profile = Path(temp, 'apparmor.txt')
|
||||
try:
|
||||
self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't backup AppArmor profile")
|
||||
return False
|
||||
|
||||
# write into tarfile
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
@@ -701,7 +821,7 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
try:
|
||||
_LOGGER.info("Build snapshot for addon %s", self._id)
|
||||
await self._loop.run_in_executor(None, _write_tarfile)
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
@@ -710,8 +830,8 @@ class Addon(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
async def restore(self, tar_file):
|
||||
"""Restore a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
||||
"""Restore state of an addon."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||
# extract snapshot
|
||||
def _extract_tarfile():
|
||||
"""Extract tar snapshot."""
|
||||
@@ -719,14 +839,14 @@ class Addon(CoreSysAttributes):
|
||||
snapshot.extractall(path=Path(temp))
|
||||
|
||||
try:
|
||||
await self._loop.run_in_executor(None, _extract_tarfile)
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
# read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
data = read_json_file(Path(temp, 'addon.json'))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't read addon.json: %s", err)
|
||||
|
||||
@@ -747,7 +867,7 @@ class Addon(CoreSysAttributes):
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore image for addon %s", self._id)
|
||||
|
||||
image_file = Path(temp, "image.tar")
|
||||
image_file = Path(temp, 'image.tar')
|
||||
if image_file.is_file():
|
||||
await self.instance.import_image(image_file, version)
|
||||
else:
|
||||
@@ -759,17 +879,27 @@ class Addon(CoreSysAttributes):
|
||||
# restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
if self.path_data.is_dir():
|
||||
shutil.rmtree(str(self.path_data), ignore_errors=True)
|
||||
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
||||
|
||||
_LOGGER.info("Restore data for addon %s", self._id)
|
||||
if self.path_data.is_dir():
|
||||
await remove_data(self.path_data)
|
||||
try:
|
||||
_LOGGER.info("Restore data for addon %s", self._id)
|
||||
await self._loop.run_in_executor(None, _restore_data)
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data: %s", err)
|
||||
return False
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(temp, 'apparmor.txt')
|
||||
if profile_file.exists():
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(
|
||||
self.slug, profile_file)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't restore AppArmor profile")
|
||||
return False
|
||||
|
||||
# run addon
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
return await self.start()
|
||||
|
@@ -25,13 +25,13 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return addon of build data."""
|
||||
return self._addons.get(self._id)
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def base_image(self):
|
||||
"""Base images for this addon."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self._arch, BASE_IMAGE[self._arch])
|
||||
self.sys_arch, BASE_IMAGE[self.sys_arch])
|
||||
|
||||
@property
|
||||
def squash(self):
|
||||
@@ -53,15 +53,15 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
'squash': self.squash,
|
||||
'labels': {
|
||||
'io.hass.version': version,
|
||||
'io.hass.arch': self._arch,
|
||||
'io.hass.arch': self.sys_arch,
|
||||
'io.hass.type': META_ADDON,
|
||||
'io.hass.name': self.addon.name,
|
||||
'io.hass.description': self.addon.description,
|
||||
'io.hass.name': self._fix_label('name'),
|
||||
'io.hass.description': self._fix_label('description'),
|
||||
},
|
||||
'buildargs': {
|
||||
'BUILD_FROM': self.base_image,
|
||||
'BUILD_VERSION': version,
|
||||
'BUILD_ARCH': self._arch,
|
||||
'BUILD_ARCH': self.sys_arch,
|
||||
**self.additional_args,
|
||||
}
|
||||
}
|
||||
@@ -70,3 +70,8 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
args['labels']['io.hass.url'] = self.addon.url
|
||||
|
||||
return args
|
||||
|
||||
def _fix_label(self, label_name):
|
||||
"""Remove characters they are not supported."""
|
||||
label = getattr(self.addon, label_name, "")
|
||||
return label.replace("'", "")
|
||||
|
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"local": {
|
||||
"name": "Local Add-Ons",
|
||||
"name": "Local add-ons",
|
||||
"url": "https://home-assistant.io/hassio",
|
||||
"maintainer": "you"
|
||||
},
|
||||
"core": {
|
||||
"name": "Built-in Add-Ons",
|
||||
"name": "Official add-ons",
|
||||
"url": "https://home-assistant.io/addons",
|
||||
"maintainer": "Home Assistant authors"
|
||||
"maintainer": "Home Assistant"
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,4 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import copy
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
@@ -11,7 +10,7 @@ from .utils import extract_hash_from_path
|
||||
from .validate import (
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
FILE_HASSIO_ADDONS, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils.json import JsonConfig, read_json_file
|
||||
@@ -56,23 +55,20 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
self._config.path_addons_core, REPOSITORY_CORE)
|
||||
self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
|
||||
# read local repository
|
||||
self._read_addons_folder(
|
||||
self._config.path_addons_local, REPOSITORY_LOCAL)
|
||||
self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
|
||||
# read custom git repositories
|
||||
for repository_element in self._config.path_addons_git.iterdir():
|
||||
for repository_element in self.sys_config.path_addons_git.iterdir():
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
# update local data
|
||||
self._merge_config()
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
@@ -84,7 +80,7 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
read_json_file(repository_file)
|
||||
)
|
||||
|
||||
except (OSError, json.JSONDecodeError):
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read repository information from %s",
|
||||
repository_file)
|
||||
return
|
||||
@@ -138,25 +134,3 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
|
||||
def _merge_config(self):
|
||||
"""Update local config if they have update.
|
||||
|
||||
It need to be the same version as the local version is for merge.
|
||||
"""
|
||||
have_change = False
|
||||
|
||||
for addon in set(self.system):
|
||||
# detached
|
||||
if addon not in self._cache:
|
||||
continue
|
||||
|
||||
cache = self._cache[addon]
|
||||
data = self.system[addon]
|
||||
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
|
||||
if data != cache:
|
||||
self.system[addon] = copy.deepcopy(cache)
|
||||
have_change = True
|
||||
|
||||
if have_change:
|
||||
self.save_data()
|
||||
|
@@ -8,8 +8,9 @@ import shutil
|
||||
import git
|
||||
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS
|
||||
from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import RE_REPOSITORY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -22,9 +23,20 @@ class GitRepo(CoreSysAttributes):
|
||||
self.coresys = coresys
|
||||
self.repo = None
|
||||
self.path = path
|
||||
self.url = url
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
self._data = RE_REPOSITORY.match(url).groupdict()
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return repository URL."""
|
||||
return self._data[ATTR_URL]
|
||||
|
||||
@property
|
||||
def branch(self):
|
||||
"""Return repository branch."""
|
||||
return self._data[ATTR_BRANCH]
|
||||
|
||||
async def load(self):
|
||||
"""Init git addon repo."""
|
||||
if not self.path.is_dir():
|
||||
@@ -33,12 +45,13 @@ class GitRepo(CoreSysAttributes):
|
||||
async with self.lock:
|
||||
try:
|
||||
_LOGGER.info("Load addon %s repository", self.path)
|
||||
self.repo = await self._loop.run_in_executor(
|
||||
None, git.Repo, str(self.path))
|
||||
self.repo = await self.sys_run_in_executor(
|
||||
git.Repo, str(self.path))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
|
||||
self._remove()
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -46,16 +59,27 @@ class GitRepo(CoreSysAttributes):
|
||||
async def clone(self):
|
||||
"""Clone git addon repo."""
|
||||
async with self.lock:
|
||||
git_args = {
|
||||
attribute: value
|
||||
for attribute, value in (
|
||||
('recursive', True),
|
||||
('branch', self.branch),
|
||||
('depth', 1),
|
||||
('shallow-submodules', True)
|
||||
) if value is not None
|
||||
}
|
||||
|
||||
try:
|
||||
_LOGGER.info("Clone addon %s repository", self.url)
|
||||
self.repo = await self._loop.run_in_executor(
|
||||
None, ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path),
|
||||
recursive=True))
|
||||
self.repo = await self.sys_run_in_executor(ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path),
|
||||
**git_args
|
||||
))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
|
||||
self._remove()
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -67,18 +91,43 @@ class GitRepo(CoreSysAttributes):
|
||||
return False
|
||||
|
||||
async with self.lock:
|
||||
_LOGGER.info("Update addon %s repository", self.url)
|
||||
branch = self.repo.active_branch.name
|
||||
|
||||
try:
|
||||
_LOGGER.info("Pull addon %s repository", self.url)
|
||||
await self._loop.run_in_executor(
|
||||
None, self.repo.remotes.origin.pull)
|
||||
# Download data
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.remotes.origin.fetch, **{
|
||||
'update-shallow': True,
|
||||
'depth': 1,
|
||||
}))
|
||||
|
||||
# Jump on top of that
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.git.reset, f"origin/{branch}", hard=True))
|
||||
|
||||
# Cleanup old data
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.git.clean, "-xdf"))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
||||
_LOGGER.error("Can't update %s repo: %s.", self.url, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _remove(self):
|
||||
"""Remove a repository."""
|
||||
if not self.path.is_dir():
|
||||
return
|
||||
|
||||
def log_err(funct, path, _):
|
||||
"""Log error."""
|
||||
_LOGGER.warning("Can't remove %s", path)
|
||||
|
||||
shutil.rmtree(str(self.path), onerror=log_err)
|
||||
|
||||
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""HassIO addons repository."""
|
||||
@@ -101,12 +150,6 @@ class GitRepoCustom(GitRepo):
|
||||
super().__init__(coresys, path, url)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a custom addon."""
|
||||
if self.path.is_dir():
|
||||
_LOGGER.info("Remove custom addon repository %s", self.url)
|
||||
|
||||
def log_err(funct, path, _):
|
||||
"""Log error."""
|
||||
_LOGGER.warning("Can't remove %s", path)
|
||||
|
||||
shutil.rmtree(str(self.path), onerror=log_err)
|
||||
"""Remove a custom repository."""
|
||||
_LOGGER.info("Remove custom addon repository %s", self.url)
|
||||
self._remove()
|
||||
|
@@ -30,7 +30,7 @@ class Repository(CoreSysAttributes):
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return data struct repository."""
|
||||
return self._addons.data.repositories.get(self._id, {})
|
||||
return self.sys_addons.data.repositories.get(self._id, {})
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
|
@@ -1,13 +1,56 @@
|
||||
"""Util addons functions."""
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
|
||||
from ..const import (
|
||||
SECURITY_DISABLE, SECURITY_PROFILE, PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_RAWIO)
|
||||
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rating_security(addon):
|
||||
"""Return 1-5 for security rating.
|
||||
|
||||
1 = not secure
|
||||
5 = high secure
|
||||
"""
|
||||
rating = 5
|
||||
|
||||
# AppArmor
|
||||
if addon.apparmor == SECURITY_DISABLE:
|
||||
rating += -1
|
||||
elif addon.apparmor == SECURITY_PROFILE:
|
||||
rating += 1
|
||||
|
||||
# API Access
|
||||
if addon.access_hassio_api or addon.access_homeassistant_api:
|
||||
rating += -1
|
||||
|
||||
# Privileged options
|
||||
if addon.privileged in (PRIVILEGED_NET_ADMIN, PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO):
|
||||
rating += -1
|
||||
|
||||
# Not secure Networking
|
||||
if addon.host_network:
|
||||
rating += -1
|
||||
|
||||
# Full Access
|
||||
if addon.with_full_access:
|
||||
rating += -2
|
||||
|
||||
# Docker Access
|
||||
if addon.access_docker_api:
|
||||
rating = 1
|
||||
|
||||
return max(min(6, rating), 1)
|
||||
|
||||
|
||||
def get_hash_from_repository(name):
|
||||
"""Generate a hash from repository."""
|
||||
key = name.lower().encode()
|
||||
@@ -33,3 +76,20 @@ def check_installed(method):
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
||||
|
||||
|
||||
async def remove_data(folder):
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder),
|
||||
stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
||||
|
@@ -17,8 +17,13 @@ from ..const import (
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY,
|
||||
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, ATTR_PROTECTED,
|
||||
ATTR_FULL_ACCESS,
|
||||
PRIVILEGED_NET_ADMIN, PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_IPC_LOCK, PRIVILEGED_SYS_TIME, PRIVILEGED_SYS_NICE,
|
||||
PRIVILEGED_SYS_RESOURCE)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_DEVICE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,11 +62,13 @@ STARTUP_ALL = [
|
||||
]
|
||||
|
||||
PRIVILEGED_ALL = [
|
||||
"NET_ADMIN",
|
||||
"SYS_ADMIN",
|
||||
"SYS_RAWIO",
|
||||
"SYS_TIME",
|
||||
"SYS_NICE"
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_IPC_LOCK,
|
||||
PRIVILEGED_SYS_TIME,
|
||||
PRIVILEGED_SYS_NICE,
|
||||
PRIVILEGED_SYS_RESOURCE,
|
||||
]
|
||||
|
||||
BASE_IMAGE = {
|
||||
@@ -106,12 +113,16 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [vol.Match(RE_DISCOVERY)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
@@ -125,7 +136,8 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||
}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"),
|
||||
vol.Optional(ATTR_IMAGE):
|
||||
vol.Match(r"^([a-zA-Z.:\d{}]+/)*?([\w{}]+)/([\-\w{}]+)$"),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
@@ -161,8 +173,9 @@ SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
|
@@ -7,13 +7,14 @@ from aiohttp import web
|
||||
from .addons import APIAddons
|
||||
from .discovery import APIDiscovery
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .hardware import APIHardware
|
||||
from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .hassos import APIHassOS
|
||||
from .proxy import APIProxy
|
||||
from .supervisor import APISupervisor
|
||||
from .snapshots import APISnapshots
|
||||
from .services import APIServices
|
||||
from .security import security_layer
|
||||
from .security import SecurityMiddleware
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -25,26 +26,25 @@ class RestAPI(CoreSysAttributes):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.security = SecurityMiddleware(coresys)
|
||||
self.webapp = web.Application(
|
||||
middlewares=[security_layer], loop=self._loop)
|
||||
middlewares=[self.security.token_validation], loop=coresys.loop)
|
||||
|
||||
# service stuff
|
||||
self._handler = None
|
||||
self.server = None
|
||||
|
||||
# middleware
|
||||
self.webapp['coresys'] = coresys
|
||||
self._runner = web.AppRunner(self.webapp)
|
||||
self._site = None
|
||||
|
||||
async def load(self):
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
self._register_hassos()
|
||||
self._register_hardware()
|
||||
self._register_homeassistant()
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_snapshots()
|
||||
self._register_network()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
|
||||
@@ -53,197 +53,206 @@ class RestAPI(CoreSysAttributes):
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/host/info', api_host.info)
|
||||
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_post('/host/update', api_host.update)
|
||||
self.webapp.router.add_post('/host/options', api_host.options)
|
||||
self.webapp.router.add_post('/host/reload', api_host.reload)
|
||||
self.webapp.add_routes([
|
||||
web.get('/host/info', api_host.info),
|
||||
web.post('/host/reboot', api_host.reboot),
|
||||
web.post('/host/shutdown', api_host.shutdown),
|
||||
web.post('/host/reload', api_host.reload),
|
||||
web.post('/host/options', api_host.options),
|
||||
web.get('/host/services', api_host.services),
|
||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
||||
web.post('/host/services/{service}/start', api_host.service_start),
|
||||
web.post(
|
||||
'/host/services/{service}/restart', api_host.service_restart),
|
||||
web.post(
|
||||
'/host/services/{service}/reload', api_host.service_reload),
|
||||
])
|
||||
|
||||
def _register_network(self):
|
||||
"""Register network function."""
|
||||
api_net = APINetwork()
|
||||
api_net.coresys = self.coresys
|
||||
def _register_hassos(self):
|
||||
"""Register hassos function."""
|
||||
api_hassos = APIHassOS()
|
||||
api_hassos.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
self.webapp.add_routes([
|
||||
web.get('/hassos/info', api_hassos.info),
|
||||
web.post('/hassos/update', api_hassos.update),
|
||||
web.post('/hassos/update/cli', api_hassos.update_cli),
|
||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
||||
])
|
||||
|
||||
def _register_hardware(self):
|
||||
"""Register hardware function."""
|
||||
api_hardware = APIHardware()
|
||||
api_hardware.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/hardware/info', api_hardware.info),
|
||||
web.get('/hardware/audio', api_hardware.audio),
|
||||
])
|
||||
|
||||
def _register_supervisor(self):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get('/supervisor/stats', api_supervisor.stats)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||
self.webapp.add_routes([
|
||||
web.get('/supervisor/ping', api_supervisor.ping),
|
||||
web.get('/supervisor/info', api_supervisor.info),
|
||||
web.get('/supervisor/stats', api_supervisor.stats),
|
||||
web.get('/supervisor/logs', api_supervisor.logs),
|
||||
web.post('/supervisor/update', api_supervisor.update),
|
||||
web.post('/supervisor/reload', api_supervisor.reload),
|
||||
web.post('/supervisor/options', api_supervisor.options),
|
||||
])
|
||||
|
||||
def _register_homeassistant(self):
|
||||
"""Register homeassistant function."""
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
self.webapp.router.add_get('/homeassistant/stats', api_hass.stats)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop)
|
||||
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
||||
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/info', api_hass.info),
|
||||
web.get('/homeassistant/logs', api_hass.logs),
|
||||
web.get('/homeassistant/stats', api_hass.stats),
|
||||
web.post('/homeassistant/options', api_hass.options),
|
||||
web.post('/homeassistant/update', api_hass.update),
|
||||
web.post('/homeassistant/restart', api_hass.restart),
|
||||
web.post('/homeassistant/stop', api_hass.stop),
|
||||
web.post('/homeassistant/start', api_hass.start),
|
||||
web.post('/homeassistant/check', api_hass.check),
|
||||
])
|
||||
|
||||
def _register_proxy(self):
|
||||
"""Register HomeAssistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/websocket', api_proxy.websocket)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/websocket', api_proxy.websocket)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/stream', api_proxy.stream)
|
||||
self.webapp.router.add_post(
|
||||
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/', api_proxy.api)
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/', api_proxy.api),
|
||||
])
|
||||
|
||||
def _register_addons(self):
|
||||
"""Register homeassistant function."""
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/addons', api_addons.list)
|
||||
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/uninstall', api_addons.uninstall)
|
||||
self.webapp.router.add_post('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/restart', api_addons.restart)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||
self.webapp.router.add_get('/addons/{addon}/icon', api_addons.icon)
|
||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||
self.webapp.router.add_get(
|
||||
'/addons/{addon}/changelog', api_addons.changelog)
|
||||
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
||||
self.webapp.router.add_get('/addons/{addon}/stats', api_addons.stats)
|
||||
self.webapp.add_routes([
|
||||
web.get('/addons', api_addons.list),
|
||||
web.post('/addons/reload', api_addons.reload),
|
||||
web.get('/addons/{addon}/info', api_addons.info),
|
||||
web.post('/addons/{addon}/install', api_addons.install),
|
||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
||||
web.post('/addons/{addon}/start', api_addons.start),
|
||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
||||
web.post('/addons/{addon}/update', api_addons.update),
|
||||
web.post('/addons/{addon}/options', api_addons.options),
|
||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
||||
])
|
||||
|
||||
def _register_snapshots(self):
|
||||
"""Register snapshots function."""
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
||||
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
||||
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/upload', api_snapshots.upload)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/remove', api_snapshots.remove)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial)
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/download',
|
||||
api_snapshots.download)
|
||||
self.webapp.add_routes([
|
||||
web.get('/snapshots', api_snapshots.list),
|
||||
web.post('/snapshots/reload', api_snapshots.reload),
|
||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
||||
web.post('/snapshots/{snapshot}/restore/full',
|
||||
api_snapshots.restore_full),
|
||||
web.post('/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial),
|
||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
||||
])
|
||||
|
||||
def _register_services(self):
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/services', api_services.list)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/services/{service}', api_services.get_service)
|
||||
self.webapp.router.add_post(
|
||||
'/services/{service}', api_services.set_service)
|
||||
self.webapp.router.add_delete(
|
||||
'/services/{service}', api_services.del_service)
|
||||
self.webapp.add_routes([
|
||||
web.get('/services', api_services.list),
|
||||
web.get('/services/{service}', api_services.get_service),
|
||||
web.post('/services/{service}', api_services.set_service),
|
||||
web.delete('/services/{service}', api_services.del_service),
|
||||
])
|
||||
|
||||
def _register_discovery(self):
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/services/discovery', api_discovery.list)
|
||||
self.webapp.router.add_get(
|
||||
'/services/discovery/{uuid}', api_discovery.get_discovery)
|
||||
self.webapp.router.add_delete(
|
||||
'/services/discovery/{uuid}', api_discovery.del_discovery)
|
||||
self.webapp.router.add_post(
|
||||
'/services/discovery', api_discovery.set_discovery)
|
||||
self.webapp.add_routes([
|
||||
web.get('/services/discovery', api_discovery.list),
|
||||
web.get('/services/discovery/{uuid}', api_discovery.get_discovery),
|
||||
web.delete('/services/discovery/{uuid}',
|
||||
api_discovery.del_discovery),
|
||||
web.post('/services/discovery', api_discovery.set_discovery),
|
||||
])
|
||||
|
||||
def _register_panel(self):
|
||||
"""Register panel for homeassistant."""
|
||||
def create_panel_response(build_type):
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
|
||||
def create_response(panel_file):
|
||||
"""Create a function to generate a response."""
|
||||
path = Path(__file__).parent.joinpath(
|
||||
f"panel/{build_type}.html")
|
||||
path = panel_dir.joinpath(f"{panel_file!s}.html")
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.router.add_get(
|
||||
'/panel', create_panel_response('hassio-main-es5'))
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel', create_response('hassio-main-es5'))])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.router.add_get(
|
||||
'/panel_es5', create_panel_response('hassio-main-es5'))
|
||||
self.webapp.router.add_get(
|
||||
'/panel_latest', create_panel_response('hassio-main-latest'))
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
||||
])
|
||||
|
||||
# This route is for HA > 0.61
|
||||
self.webapp.router.add_get(
|
||||
'/app-es5/index.html', create_panel_response('index'))
|
||||
self.webapp.router.add_get(
|
||||
'/app-es5/hassio-app.html', create_panel_response('hassio-app'))
|
||||
# This route is for backwards compatibility with HA 0.62 - 0.70
|
||||
self.webapp.add_routes([
|
||||
web.get('/app-es5/index.html', create_response('index')),
|
||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
||||
])
|
||||
|
||||
# This route is for HA > 0.70
|
||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
self._handler = self.webapp.make_handler(loop=self._loop)
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
|
||||
|
||||
try:
|
||||
self.server = await self._loop.create_server(
|
||||
self._handler, "0.0.0.0", "80")
|
||||
await self._site.start()
|
||||
except OSError as err:
|
||||
_LOGGER.fatal(
|
||||
"Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop rest api webserver."""
|
||||
if self.server:
|
||||
self.server.close()
|
||||
await self.server.wait_closed()
|
||||
await self.webapp.shutdown()
|
||||
if not self._site:
|
||||
return
|
||||
|
||||
if self._handler:
|
||||
await self._handler.shutdown(60)
|
||||
await self.webapp.cleanup()
|
||||
# Shutdown running API
|
||||
await self._site.stop()
|
||||
await self._runner.cleanup()
|
||||
|
||||
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
||||
|
@@ -6,6 +6,7 @@ import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
@@ -17,10 +18,12 @@ from ..const import (
|
||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
|
||||
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
|
||||
ATTR_FULL_ACCESS, ATTR_PROTECTED, ATTR_RATING,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT, REQUEST_FROM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS
|
||||
from ..validate import DOCKER_PORTS, ALSA_DEVICE
|
||||
from ..exceptions import APINotSupportedError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -33,6 +36,9 @@ SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_PROTECTED): vol.Boolean(),
|
||||
})
|
||||
|
||||
|
||||
@@ -40,10 +46,10 @@ class APIAddons(CoreSysAttributes):
|
||||
"""Handle rest api for addons functions."""
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
addon = self._addons.get(request.match_info.get('addon'))
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon = self.sys_addons.get(request.match_info.get('addon'))
|
||||
if not addon:
|
||||
raise RuntimeError("Addon not exists")
|
||||
raise RuntimeError("Addon does not exist")
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
raise RuntimeError("Addon is not installed")
|
||||
@@ -62,7 +68,7 @@ class APIAddons(CoreSysAttributes):
|
||||
async def list(self, request):
|
||||
"""Return all addons / repositories ."""
|
||||
data_addons = []
|
||||
for addon in self._addons.list_addons:
|
||||
for addon in self.sys_addons.list_addons:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
@@ -79,7 +85,7 @@ class APIAddons(CoreSysAttributes):
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
for repository in self._addons.list_repositories:
|
||||
for repository in self.sys_addons.list_repositories:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
@@ -96,7 +102,7 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload all addons data."""
|
||||
await asyncio.shield(self._addons.reload(), loop=self._loop)
|
||||
await asyncio.shield(self.sys_addons.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
@@ -106,6 +112,7 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
return {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
@@ -113,6 +120,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LAST_VERSION: addon.last_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_URL: addon.url,
|
||||
@@ -123,6 +132,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
ATTR_HOST_DBUS: addon.host_dbus,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
@@ -132,6 +143,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_DOCKER_API: addon.access_docker_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
@@ -144,6 +157,11 @@ class APIAddons(CoreSysAttributes):
|
||||
"""Store user options for addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
# Have Access
|
||||
if addon.slug == request[REQUEST_FROM]:
|
||||
_LOGGER.error("Add-on can't self modify his options!")
|
||||
raise APINotSupportedError()
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||
})
|
||||
@@ -162,6 +180,9 @@ class APIAddons(CoreSysAttributes):
|
||||
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_PROTECTED in body:
|
||||
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
@@ -189,13 +210,13 @@ class APIAddons(CoreSysAttributes):
|
||||
def install(self, request):
|
||||
"""Install addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
return asyncio.shield(addon.install(), loop=self._loop)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall(), loop=self._loop)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
@@ -209,13 +230,13 @@ class APIAddons(CoreSysAttributes):
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return asyncio.shield(addon.start(), loop=self._loop)
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop(), loop=self._loop)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def update(self, request):
|
||||
@@ -225,13 +246,13 @@ class APIAddons(CoreSysAttributes):
|
||||
if addon.last_version == addon.version_installed:
|
||||
raise RuntimeError("No update available!")
|
||||
|
||||
return asyncio.shield(addon.update(), loop=self._loop)
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart(), loop=self._loop)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request):
|
||||
@@ -240,7 +261,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if not addon.need_build:
|
||||
raise RuntimeError("Only local build addons are supported")
|
||||
|
||||
return asyncio.shield(addon.rebuild(), loop=self._loop)
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
@@ -286,4 +307,4 @@ class APIAddons(CoreSysAttributes):
|
||||
raise RuntimeError("STDIN not supported by addon")
|
||||
|
||||
data = await request.read()
|
||||
return await asyncio.shield(addon.write_stdin(data), loop=self._loop)
|
||||
return await asyncio.shield(addon.write_stdin(data))
|
||||
|
@@ -21,7 +21,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self._services.discovery.get(request.match_info.get('uuid'))
|
||||
message = self.sys_discovery.get(request.match_info.get('uuid'))
|
||||
if not message:
|
||||
raise RuntimeError("Discovery message not found")
|
||||
return message
|
||||
@@ -30,7 +30,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
discovery = []
|
||||
for message in self._services.discovery.list_messages:
|
||||
for message in self.sys_discovery.list_messages:
|
||||
discovery.append({
|
||||
ATTR_PROVIDER: message.provider,
|
||||
ATTR_UUID: message.uuid,
|
||||
@@ -45,7 +45,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
message = self._services.discovery.send(
|
||||
message = self.sys_discovery.send(
|
||||
provider=request[REQUEST_FROM], **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
@@ -68,5 +68,5 @@ class APIDiscovery(CoreSysAttributes):
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
self._services.discovery.remove(message)
|
||||
self.sys_discovery.remove(message)
|
||||
return True
|
||||
|
34
hassio/api/hardware.py
Normal file
34
hassio/api/hardware.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Init file for HassIO hardware rest api."""
|
||||
import logging
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle rest api for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request):
|
||||
"""Show ALSA audio devices."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||
}
|
||||
}
|
53
hassio/api/hassos.py
Normal file
53
hassio/api/hassos.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Init file for Hass.io hassos rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST, ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle rest api for hassos functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return hassos information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update HassOS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request):
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request):
|
||||
"""Trigger config reload on HassOS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
@@ -9,7 +9,8 @@ from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
|
||||
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
|
||||
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, CONTENT_TYPE_BINARY)
|
||||
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, ATTR_MACHINE,
|
||||
ATTR_REFRESH_TOKEN, CONTENT_TYPE_BINARY)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import NETWORK_PORT, DOCKER_IMAGE
|
||||
|
||||
@@ -20,15 +21,16 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
||||
vol.Any(None, vol.Coerce(str)),
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -43,15 +45,16 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self._homeassistant.version,
|
||||
ATTR_LAST_VERSION: self._homeassistant.last_version,
|
||||
ATTR_IMAGE: self._homeassistant.image,
|
||||
ATTR_CUSTOM: self._homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self._homeassistant.boot,
|
||||
ATTR_PORT: self._homeassistant.api_port,
|
||||
ATTR_SSL: self._homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self._homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self._homeassistant.wait_boot,
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||
ATTR_PORT: self.sys_homeassistant.api_port,
|
||||
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -60,34 +63,36 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
self._homeassistant.image = body[ATTR_IMAGE]
|
||||
self._homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self._homeassistant.boot = body[ATTR_BOOT]
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
|
||||
if ATTR_PORT in body:
|
||||
self._homeassistant.api_port = body[ATTR_PORT]
|
||||
self.sys_homeassistant.api_port = body[ATTR_PORT]
|
||||
|
||||
if ATTR_PASSWORD in body:
|
||||
self._homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
|
||||
if ATTR_SSL in body:
|
||||
self._homeassistant.api_ssl = body[ATTR_SSL]
|
||||
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||
|
||||
if ATTR_WATCHDOG in body:
|
||||
self._homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self._homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
self._homeassistant.save_data()
|
||||
return True
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self._homeassistant.stats()
|
||||
stats = await self.sys_homeassistant.stats()
|
||||
if not stats:
|
||||
raise RuntimeError("No stats available")
|
||||
|
||||
@@ -105,38 +110,34 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
async def update(self, request):
|
||||
"""Update homeassistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self._homeassistant.last_version)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
|
||||
|
||||
if version == self._homeassistant.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self._homeassistant.update(version), loop=self._loop)
|
||||
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop homeassistant."""
|
||||
return asyncio.shield(self._homeassistant.stop(), loop=self._loop)
|
||||
return asyncio.shield(self.sys_homeassistant.stop())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
"""Start homeassistant."""
|
||||
return asyncio.shield(self._homeassistant.start(), loop=self._loop)
|
||||
return asyncio.shield(self.sys_homeassistant.start())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart homeassistant."""
|
||||
return asyncio.shield(self._homeassistant.restart(), loop=self._loop)
|
||||
return asyncio.shield(self.sys_homeassistant.restart())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return homeassistant docker logs."""
|
||||
return self._homeassistant.logs()
|
||||
return self.sys_homeassistant.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request):
|
||||
"""Check config of homeassistant."""
|
||||
result = await self._homeassistant.check_config()
|
||||
result = await self.sys_homeassistant.check_config()
|
||||
if not result.valid:
|
||||
raise RuntimeError(result.log)
|
||||
|
||||
|
@@ -4,23 +4,19 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process_hostcontrol, api_process, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
|
||||
ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL, ATTR_OPERATING_SYSTEM,
|
||||
ATTR_CHASSIS, ATTR_DEPLOYMENT, ATTR_STATE, ATTR_NAME, ATTR_DESCRIPTON,
|
||||
ATTR_SERVICES, ATTR_CPE)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import ALSA_CHANNEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SERVICE = 'service'
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
@@ -31,62 +27,75 @@ class APIHost(CoreSysAttributes):
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_TYPE: self._host_control.type,
|
||||
ATTR_VERSION: self._host_control.version,
|
||||
ATTR_LAST_VERSION: self._host_control.last_version,
|
||||
ATTR_FEATURES: self._host_control.features,
|
||||
ATTR_HOSTNAME: self._host_control.hostname,
|
||||
ATTR_OS: self._host_control.os_info,
|
||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_FEATURES: self.sys_host.supperted_features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_KERNEL: self.sys_host.info.kernel,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Process host options."""
|
||||
"""Edit host settings."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self._config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self._config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
self._config.save_data()
|
||||
return True
|
||||
|
||||
@api_process_hostcontrol
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return self._host_control.reboot()
|
||||
|
||||
@api_process_hostcontrol
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return self._host_control.shutdown()
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def reload(self, request):
|
||||
"""Reload host data."""
|
||||
await self._host_control.load()
|
||||
return True
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self._host_control.last_version)
|
||||
|
||||
if version == self._host_control.version:
|
||||
raise RuntimeError(f"Version {version} is already in use")
|
||||
|
||||
return await asyncio.shield(
|
||||
self._host_control.update(version=version), loop=self._loop)
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
await asyncio.shield(
|
||||
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
|
||||
|
||||
@api_process
|
||||
async def hardware(self, request):
|
||||
"""Return local hardware infos."""
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return asyncio.shield(self.sys_host.control.reboot())
|
||||
|
||||
@api_process
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return asyncio.shield(self.sys_host.control.shutdown())
|
||||
|
||||
@api_process
|
||||
def reload(self, request):
|
||||
"""Reload host data."""
|
||||
return asyncio.shield(self.sys_host.reload())
|
||||
|
||||
@api_process
|
||||
async def services(self, request):
|
||||
"""Return list of available services."""
|
||||
services = []
|
||||
for unit in self.sys_host.services:
|
||||
services.append({
|
||||
ATTR_NAME: unit.name,
|
||||
ATTR_DESCRIPTON: unit.description,
|
||||
ATTR_STATE: unit.state,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_SERIAL: list(self._hardware.serial_devices),
|
||||
ATTR_INPUT: list(self._hardware.input_devices),
|
||||
ATTR_DISK: list(self._hardware.disk_devices),
|
||||
ATTR_GPIO: list(self._hardware.gpio_devices),
|
||||
ATTR_AUDIO: self._hardware.audio_devices,
|
||||
ATTR_SERVICES: services
|
||||
}
|
||||
|
||||
@api_process
|
||||
def service_start(self, request):
|
||||
"""Start a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.start(unit))
|
||||
|
||||
@api_process
|
||||
def service_stop(self, request):
|
||||
"""Stop a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.stop(unit))
|
||||
|
||||
@api_process
|
||||
def service_reload(self, request):
|
||||
"""Reload a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.reload(unit))
|
||||
|
||||
@api_process
|
||||
def service_restart(self, request):
|
||||
"""Restart a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.restart(unit))
|
||||
|
@@ -1,38 +0,0 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_process_hostcontrol, api_validate
|
||||
from ..const import ATTR_HOSTNAME
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APINetwork(CoreSysAttributes):
|
||||
"""Handle rest api for network functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show network settings."""
|
||||
return {
|
||||
ATTR_HOSTNAME: self._host_control.hostname,
|
||||
}
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def options(self, request):
|
||||
"""Edit network settings."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
if self._host_control.hostname != body[ATTR_HOSTNAME]:
|
||||
await self._host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||
|
||||
return True
|
1
hassio/api/panel/chunk.0ef4ef1053fe3d5107b5.js
Normal file
1
hassio/api/panel/chunk.0ef4ef1053fe3d5107b5.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.0ef4ef1053fe3d5107b5.js.gz
Normal file
BIN
hassio/api/panel/chunk.0ef4ef1053fe3d5107b5.js.gz
Normal file
Binary file not shown.
2
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js
Normal file
2
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js
Normal file
File diff suppressed because one or more lines are too long
419
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js.LICENSE
Normal file
419
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js.LICENSE
Normal file
@@ -0,0 +1,419 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js.gz
Normal file
BIN
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.c77b56beea1d4547ff5f.js
Normal file
1
hassio/api/panel/chunk.c77b56beea1d4547ff5f.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.c77b56beea1d4547ff5f.js.gz
Normal file
BIN
hassio/api/panel/chunk.c77b56beea1d4547ff5f.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.c93f37c558ff32991708.js
Normal file
1
hassio/api/panel/chunk.c93f37c558ff32991708.js
Normal file
@@ -0,0 +1 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[5],{104:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(99),i=t.n(e),o=t(97),u=t.n(o),a=i.a,c=u.a}}]);
|
BIN
hassio/api/panel/chunk.c93f37c558ff32991708.js.gz
Normal file
BIN
hassio/api/panel/chunk.c93f37c558ff32991708.js.gz
Normal file
Binary file not shown.
2
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js
Normal file
2
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js
Normal file
File diff suppressed because one or more lines are too long
389
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js.LICENSE
Normal file
389
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js.LICENSE
Normal file
@@ -0,0 +1,389 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js.gz
Normal file
BIN
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.ff92199b0d422767d108.js
Normal file
1
hassio/api/panel/chunk.ff92199b0d422767d108.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.ff92199b0d422767d108.js.gz
Normal file
BIN
hassio/api/panel/chunk.ff92199b0d422767d108.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/entrypoint.js
Normal file
1
hassio/api/panel/entrypoint.js
Normal file
@@ -0,0 +1 @@
|
||||
!function(e){function n(n){for(var t,o,i=n[0],u=n[1],a=0,l=[];a<i.length;a++)o=i[a],r[o]&&l.push(r[o][0]),r[o]=0;for(t in u)Object.prototype.hasOwnProperty.call(u,t)&&(e[t]=u[t]);for(f&&f(n);l.length;)l.shift()()}var t={},r={6:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var u,a=document.getElementsByTagName("head")[0],f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"f3880aa331d3ef2ddf32",1:"a8e86d80be46b3b6e16d",2:"0ef4ef1053fe3d5107b5",3:"ff92199b0d422767d108",4:"c77b56beea1d4547ff5f",5:"c93f37c558ff32991708"}[e]+".js"}(e),u=function(n){f.onerror=f.onload=null,clearTimeout(l);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,t[1](u)}r[e]=void 0}};var l=setTimeout(function(){u({type:"timeout",target:f})},12e4);f.onerror=f.onload=u,a.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=n,i=i.slice();for(var a=0;a<i.length;a++)n(i[a]);var f=u;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(3)]).then(t.bind(null,1)),Promise.all([t.e(0),t.e(1),t.e(2)]).then(t.bind(null,2))})}]);
|
BIN
hassio/api/panel/entrypoint.js.gz
Normal file
BIN
hassio/api/panel/entrypoint.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -11,27 +11,28 @@
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
||||
</head>
|
||||
<body>
|
||||
<hassio-app></hassio-app>
|
||||
<script>
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.head.appendChild(e);
|
||||
}
|
||||
if (!window.parent.HASS_DEV) {
|
||||
addScript('/frontend_es5/custom-elements-es5-adapter.js');
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.write(e.outerHTML);
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
</script>
|
||||
<!--
|
||||
Disabled while we make Home Assistant able to serve the right files.
|
||||
<script src="./app.js"></script>
|
||||
-->
|
||||
<link rel='import' href='./hassio-app.html'>
|
||||
<link rel='import' href='/static/mdi.html' async>
|
||||
</body>
|
||||
</html>
|
||||
|
Binary file not shown.
@@ -1,15 +1,18 @@
|
||||
"""Utils for HomeAssistant Proxy."""
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPInternalServerError
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadGateway, HTTPInternalServerError, HTTPUnauthorized)
|
||||
from aiohttp.hdrs import CONTENT_TYPE
|
||||
import async_timeout
|
||||
|
||||
from ..const import HEADER_HA_ACCESS
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HomeAssistantAuthError, HomeAssistantAPIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,50 +23,48 @@ class APIProxy(CoreSysAttributes):
|
||||
def _check_access(self, request):
|
||||
"""Check the Hass.io token."""
|
||||
hassio_token = request.headers.get(HEADER_HA_ACCESS)
|
||||
addon = self._addons.from_uuid(hassio_token)
|
||||
addon = self.sys_addons.from_uuid(hassio_token)
|
||||
|
||||
if not addon:
|
||||
_LOGGER.warning("Unknown Home-Assistant API access!")
|
||||
_LOGGER.warning("Unknown HomeAssistant API access!")
|
||||
elif not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Not permitted API access: %s", addon.slug)
|
||||
else:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
return
|
||||
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
@asynccontextmanager
|
||||
async def _api_client(self, request, path, timeout=300):
|
||||
"""Return a client request with proxy origin for Home-Assistant."""
|
||||
url = f"{self._homeassistant.api_url}/api/{path}"
|
||||
|
||||
try:
|
||||
data = None
|
||||
headers = {}
|
||||
method = getattr(self._websession_ssl, request.method.lower())
|
||||
params = request.query or None
|
||||
|
||||
# read data
|
||||
with async_timeout.timeout(30, loop=self._loop):
|
||||
with async_timeout.timeout(30):
|
||||
data = await request.read()
|
||||
|
||||
if data:
|
||||
headers.update({CONTENT_TYPE: request.content_type})
|
||||
content_type = request.content_type
|
||||
else:
|
||||
content_type = None
|
||||
|
||||
# need api password?
|
||||
if self._homeassistant.api_password:
|
||||
headers = {HEADER_HA_ACCESS: self._homeassistant.api_password}
|
||||
|
||||
# reset headers
|
||||
if not headers:
|
||||
headers = None
|
||||
|
||||
client = await method(
|
||||
url, data=data, headers=headers, timeout=timeout,
|
||||
params=params
|
||||
)
|
||||
|
||||
return client
|
||||
async with self.sys_homeassistant.make_request(
|
||||
request.method.lower(), f'api/{path}',
|
||||
content_type=content_type,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
) as resp:
|
||||
yield resp
|
||||
return
|
||||
|
||||
except HomeAssistantAuthError:
|
||||
_LOGGER.error("Authenticate error on API for request %s", path)
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Error on API for request %s", path)
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Client error on API %s request %s.", path, err)
|
||||
|
||||
_LOGGER.error("Client error on API %s request %s", path, err)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Client timeout error on API request %s.", path)
|
||||
_LOGGER.error("Client timeout error on API request %s", path)
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
@@ -72,30 +73,25 @@ class APIProxy(CoreSysAttributes):
|
||||
self._check_access(request)
|
||||
|
||||
_LOGGER.info("Home-Assistant EventStream start")
|
||||
client = await self._api_client(request, 'stream', timeout=None)
|
||||
async with self._api_client(request, 'stream', timeout=None) as client:
|
||||
response = web.StreamResponse()
|
||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||
try:
|
||||
await response.prepare(request)
|
||||
while True:
|
||||
data = await client.content.read(10)
|
||||
if not data:
|
||||
break
|
||||
await response.write(data)
|
||||
|
||||
response = web.StreamResponse()
|
||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||
try:
|
||||
await response.prepare(request)
|
||||
while True:
|
||||
data = await client.content.read(10)
|
||||
if not data:
|
||||
await response.write_eof()
|
||||
break
|
||||
response.write(data)
|
||||
except aiohttp.ClientError:
|
||||
pass
|
||||
|
||||
except aiohttp.ClientError:
|
||||
await response.write_eof()
|
||||
finally:
|
||||
client.close()
|
||||
_LOGGER.info("Home-Assistant EventStream close")
|
||||
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
finally:
|
||||
client.close()
|
||||
_LOGGER.info("Home-Assistant EventStream close")
|
||||
|
||||
return response
|
||||
return response
|
||||
|
||||
async def api(self, request):
|
||||
"""Proxy HomeAssistant API Requests."""
|
||||
@@ -103,38 +99,63 @@ class APIProxy(CoreSysAttributes):
|
||||
|
||||
# Normal request
|
||||
path = request.match_info.get('path', '')
|
||||
client = await self._api_client(request, path)
|
||||
|
||||
data = await client.read()
|
||||
return web.Response(
|
||||
body=data,
|
||||
status=client.status,
|
||||
content_type=client.content_type
|
||||
)
|
||||
async with self._api_client(request, path) as client:
|
||||
data = await client.read()
|
||||
return web.Response(
|
||||
body=data,
|
||||
status=client.status,
|
||||
content_type=client.content_type
|
||||
)
|
||||
|
||||
async def _websocket_client(self):
|
||||
"""Initialize a websocket api connection."""
|
||||
url = f"{self._homeassistant.api_url}/api/websocket"
|
||||
url = f"{self.sys_homeassistant.api_url}/api/websocket"
|
||||
|
||||
try:
|
||||
client = await self._websession_ssl.ws_connect(
|
||||
client = await self.sys_websession_ssl.ws_connect(
|
||||
url, heartbeat=60, verify_ssl=False)
|
||||
|
||||
# handle authentication
|
||||
for _ in range(2):
|
||||
data = await client.receive_json()
|
||||
if data.get('type') == 'auth_ok':
|
||||
return client
|
||||
elif data.get('type') == 'auth_required':
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'api_password': self._homeassistant.api_password,
|
||||
})
|
||||
data = await client.receive_json()
|
||||
|
||||
_LOGGER.error("Authentication to Home-Assistant websocket")
|
||||
if data.get('type') == 'auth_ok':
|
||||
return client
|
||||
|
||||
except (aiohttp.ClientError, RuntimeError) as err:
|
||||
if data.get('type') != 'auth_required':
|
||||
# Invalid protocol
|
||||
_LOGGER.error(
|
||||
'Got unexpected response from HA websocket: %s', data)
|
||||
raise HTTPBadGateway()
|
||||
|
||||
if self.sys_homeassistant.refresh_token:
|
||||
await self.sys_homeassistant.ensure_access_token()
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'access_token': self.sys_homeassistant.access_token,
|
||||
})
|
||||
else:
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'api_password': self.sys_homeassistant.api_password,
|
||||
})
|
||||
|
||||
data = await client.receive_json()
|
||||
|
||||
if data.get('type') == 'auth_ok':
|
||||
return client
|
||||
|
||||
# Renew the Token is invalid
|
||||
if (data.get('type') == 'invalid_auth' and
|
||||
self.sys_homeassistant.refresh_token):
|
||||
self.sys_homeassistant.access_token = None
|
||||
return await self._websocket_client()
|
||||
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Client error on websocket API %s.", err)
|
||||
except HomeAssistantAuthError as err:
|
||||
_LOGGER.error("Failed authentication to HomeAssistant websocket")
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
@@ -150,22 +171,28 @@ class APIProxy(CoreSysAttributes):
|
||||
try:
|
||||
await server.send_json({
|
||||
'type': 'auth_required',
|
||||
'ha_version': self._homeassistant.version,
|
||||
'ha_version': self.sys_homeassistant.version,
|
||||
})
|
||||
|
||||
# Check API access
|
||||
response = await server.receive_json()
|
||||
hassio_token = response.get('api_password')
|
||||
addon = self._addons.from_uuid(hassio_token)
|
||||
hassio_token = (response.get('api_password') or
|
||||
response.get('access_token'))
|
||||
addon = self.sys_addons.from_uuid(hassio_token)
|
||||
|
||||
if not addon:
|
||||
if not addon or not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Unauthorized websocket access!")
|
||||
else:
|
||||
_LOGGER.info("Websocket access from %s", addon.slug)
|
||||
await server.send_json({
|
||||
'type': 'auth_invalid',
|
||||
'message': 'Invalid access',
|
||||
})
|
||||
return server
|
||||
|
||||
_LOGGER.info("Websocket access from %s", addon.slug)
|
||||
|
||||
await server.send_json({
|
||||
'type': 'auth_ok',
|
||||
'ha_version': self._homeassistant.version,
|
||||
'ha_version': self.sys_homeassistant.version,
|
||||
})
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||
@@ -180,16 +207,16 @@ class APIProxy(CoreSysAttributes):
|
||||
server_read = None
|
||||
while not server.closed and not client.closed:
|
||||
if not client_read:
|
||||
client_read = asyncio.ensure_future(
|
||||
client.receive_str(), loop=self._loop)
|
||||
client_read = self.sys_create_task(
|
||||
client.receive_str())
|
||||
if not server_read:
|
||||
server_read = asyncio.ensure_future(
|
||||
server.receive_str(), loop=self._loop)
|
||||
server_read = self.sys_create_task(
|
||||
server.receive_str())
|
||||
|
||||
# wait until data need to be processed
|
||||
await asyncio.wait(
|
||||
[client_read, server_read],
|
||||
loop=self._loop, return_when=asyncio.FIRST_COMPLETED
|
||||
return_when=asyncio.FIRST_COMPLETED
|
||||
)
|
||||
|
||||
# server
|
||||
|
@@ -6,6 +6,7 @@ from aiohttp.web import middleware
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
|
||||
from ..const import HEADER_TOKEN, REQUEST_FROM
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -16,35 +17,43 @@ NO_SECURITY_CHECK = set((
|
||||
))
|
||||
|
||||
|
||||
@middleware
|
||||
async def security_layer(request, handler):
|
||||
"""Check security access of this layer."""
|
||||
coresys = request.app['coresys']
|
||||
hassio_token = request.headers.get(HEADER_TOKEN)
|
||||
class SecurityMiddleware(CoreSysAttributes):
|
||||
"""Security middleware functions."""
|
||||
|
||||
# Ignore security check
|
||||
for rule in NO_SECURITY_CHECK:
|
||||
if rule.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
def __init__(self, coresys):
|
||||
"""Initialize security middleware."""
|
||||
self.coresys = coresys
|
||||
|
||||
@middleware
|
||||
async def token_validation(self, request, handler):
|
||||
"""Check security access of this layer."""
|
||||
hassio_token = request.headers.get(HEADER_TOKEN)
|
||||
|
||||
# Ignore security check
|
||||
for rule in NO_SECURITY_CHECK:
|
||||
if rule.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
return await handler(request)
|
||||
|
||||
# Home-Assistant
|
||||
if hassio_token == self.sys_homeassistant.uuid:
|
||||
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
||||
request[REQUEST_FROM] = 'homeassistant'
|
||||
|
||||
# Host
|
||||
if hassio_token == self.sys_machine_id:
|
||||
_LOGGER.debug("%s access from Host", request.path)
|
||||
request[REQUEST_FROM] = 'host'
|
||||
|
||||
# Add-on
|
||||
addon = self.sys_addons.from_uuid(hassio_token) \
|
||||
if hassio_token else None
|
||||
if addon:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
request[REQUEST_FROM] = addon.slug
|
||||
|
||||
if request.get(REQUEST_FROM):
|
||||
return await handler(request)
|
||||
|
||||
# Need to be removed later
|
||||
if not hassio_token:
|
||||
_LOGGER.warning("Invalid token for access %s", request.path)
|
||||
request[REQUEST_FROM] = 'UNKNOWN'
|
||||
return await handler(request)
|
||||
|
||||
# Home-Assistant
|
||||
if hassio_token == coresys.homeassistant.uuid:
|
||||
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
||||
request[REQUEST_FROM] = 'homeassistant'
|
||||
return await handler(request)
|
||||
|
||||
# Add-on
|
||||
addon = coresys.addons.from_uuid(hassio_token)
|
||||
if addon:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
request[REQUEST_FROM] = addon.slug
|
||||
return await handler(request)
|
||||
|
||||
raise HTTPUnauthorized()
|
||||
raise HTTPUnauthorized()
|
||||
|
@@ -10,10 +10,10 @@ class APIServices(CoreSysAttributes):
|
||||
"""Handle rest api for services functions."""
|
||||
|
||||
def _extract_service(self, request):
|
||||
"""Return service and if not exists trow a exception."""
|
||||
service = self._services.get(request.match_info.get('service'))
|
||||
"""Return service, throw an exception if it doesn't exist."""
|
||||
service = self.sys_services.get(request.match_info.get('service'))
|
||||
if not service:
|
||||
raise RuntimeError("Service not exists")
|
||||
raise RuntimeError("Service does not exist")
|
||||
|
||||
return service
|
||||
|
||||
@@ -21,7 +21,7 @@ class APIServices(CoreSysAttributes):
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
services = []
|
||||
for service in self._services.list_services:
|
||||
for service in self.sys_services.list_services:
|
||||
services.append({
|
||||
ATTR_SLUG: service.slug,
|
||||
ATTR_AVAILABLE: service.enabled,
|
||||
|
@@ -49,17 +49,17 @@ class APISnapshots(CoreSysAttributes):
|
||||
"""Handle rest api for snapshot functions."""
|
||||
|
||||
def _extract_snapshot(self, request):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
snapshot = self._snapshots.get(request.match_info.get('snapshot'))
|
||||
"""Return snapshot, throw an exception if it doesn't exist."""
|
||||
snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
|
||||
if not snapshot:
|
||||
raise RuntimeError("Snapshot not exists")
|
||||
raise RuntimeError("Snapshot does not exist")
|
||||
return snapshot
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return snapshot list."""
|
||||
data_snapshots = []
|
||||
for snapshot in self._snapshots.list_snapshots:
|
||||
for snapshot in self.sys_snapshots.list_snapshots:
|
||||
data_snapshots.append({
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
@@ -75,7 +75,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload snapshot list."""
|
||||
await asyncio.shield(self._snapshots.reload(), loop=self._loop)
|
||||
await asyncio.shield(self.sys_snapshots.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
@@ -110,7 +110,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
"""Full-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||
snapshot = await asyncio.shield(
|
||||
self._snapshots.do_snapshot_full(**body), loop=self._loop)
|
||||
self.sys_snapshots.do_snapshot_full(**body))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
@@ -121,7 +121,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
"""Partial-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
snapshot = await asyncio.shield(
|
||||
self._snapshots.do_snapshot_partial(**body), loop=self._loop)
|
||||
self.sys_snapshots.do_snapshot_partial(**body))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
@@ -134,9 +134,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self._snapshots.do_restore_full(snapshot, **body),
|
||||
loop=self._loop
|
||||
)
|
||||
self.sys_snapshots.do_restore_full(snapshot, **body))
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
@@ -145,15 +143,13 @@ class APISnapshots(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self._snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self._loop
|
||||
)
|
||||
self.sys_snapshots.do_restore_partial(snapshot, **body))
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return self._snapshots.remove(snapshot)
|
||||
return self.sys_snapshots.remove(snapshot)
|
||||
|
||||
async def download(self, request):
|
||||
"""Download a snapshot file."""
|
||||
@@ -167,7 +163,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
@api_process
|
||||
async def upload(self, request):
|
||||
"""Upload a snapshot file."""
|
||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp_dir:
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
||||
tar_file = Path(temp_dir, f"snapshot.tar")
|
||||
|
||||
try:
|
||||
@@ -183,7 +179,7 @@ class APISnapshots(CoreSysAttributes):
|
||||
return False
|
||||
|
||||
snapshot = await asyncio.shield(
|
||||
self._snapshots.import_snapshot(tar_file), loop=self._loop)
|
||||
self.sys_snapshots.import_snapshot(tar_file))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
|
@@ -6,20 +6,19 @@ import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES
|
||||
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES, CHANNELS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
@@ -42,7 +41,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self._addons.list_addons:
|
||||
for addon in self.sys_addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -58,13 +57,13 @@ class APISupervisor(CoreSysAttributes):
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self._updater.version_hassio,
|
||||
ATTR_BETA_CHANNEL: self._updater.beta_channel,
|
||||
ATTR_ARCH: self._arch,
|
||||
ATTR_WAIT_BOOT: self._config.wait_boot,
|
||||
ATTR_TIMEZONE: self._config.timezone,
|
||||
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_ARCH: self.sys_arch,
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories,
|
||||
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -72,27 +71,27 @@ class APISupervisor(CoreSysAttributes):
|
||||
"""Set supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self._updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||
if ATTR_CHANNEL in body:
|
||||
self.sys_updater.channel = body[ATTR_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self._config.timezone = body[ATTR_TIMEZONE]
|
||||
self.sys_config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self._config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self._addons.load_repositories(new))
|
||||
await asyncio.shield(self.sys_addons.load_repositories(new))
|
||||
|
||||
self._updater.save_data()
|
||||
self._config.save_data()
|
||||
self.sys_updater.save_data()
|
||||
self.sys_config.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self._supervisor.stats()
|
||||
stats = await self.sys_supervisor.stats()
|
||||
if not stats:
|
||||
raise RuntimeError("No stats available")
|
||||
|
||||
@@ -110,22 +109,22 @@ class APISupervisor(CoreSysAttributes):
|
||||
async def update(self, request):
|
||||
"""Update supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self._updater.version_hassio)
|
||||
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
|
||||
|
||||
if version == self._supervisor.version:
|
||||
if version == self.sys_supervisor.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self._supervisor.update(version), loop=self._loop)
|
||||
self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload addons, config ect."""
|
||||
"""Reload addons, config etc."""
|
||||
tasks = [
|
||||
self._updater.reload(),
|
||||
self.sys_updater.reload(),
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
asyncio.wait(tasks, loop=self._loop), loop=self._loop)
|
||||
asyncio.wait(tasks))
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
@@ -136,4 +135,4 @@ class APISupervisor(CoreSysAttributes):
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return supervisor docker logs."""
|
||||
return self._supervisor.logs()
|
||||
return self.sys_supervisor.logs()
|
||||
|
@@ -1,16 +1,15 @@
|
||||
"""Init file for HassIO util for rest api."""
|
||||
import json
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPServiceUnavailable
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY)
|
||||
from ..exceptions import HassioError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,6 +30,8 @@ def api_process(method):
|
||||
"""Return api information."""
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except HassioError:
|
||||
return api_return_error()
|
||||
except RuntimeError as err:
|
||||
return api_return_error(message=str(err))
|
||||
|
||||
@@ -38,37 +39,13 @@ def api_process(method):
|
||||
return api_return_ok(data=answer)
|
||||
if isinstance(answer, web.Response):
|
||||
return answer
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
elif isinstance(answer, bool) and not answer:
|
||||
return api_return_error()
|
||||
return api_return_ok()
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
def api_process_hostcontrol(method):
|
||||
"""Wrap HostControl calls to rest api."""
|
||||
async def wrap_hostcontrol(api, *args, **kwargs):
|
||||
"""Return host information."""
|
||||
# pylint: disable=protected-access
|
||||
if not api._host_control.active:
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
return api_return_error(message=str(err))
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
elif answer is None:
|
||||
return api_return_error("Function is not supported")
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
|
||||
return wrap_hostcontrol
|
||||
|
||||
|
||||
def api_process_raw(content):
|
||||
"""Wrap content_type into function."""
|
||||
def wrap_method(method):
|
||||
@@ -81,6 +58,9 @@ def api_process_raw(content):
|
||||
except RuntimeError as err:
|
||||
msg_data = str(err).encode()
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
except HassioError:
|
||||
msg_data = b''
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
|
||||
return web.Response(body=msg_data, content_type=msg_type)
|
||||
|
||||
@@ -89,7 +69,7 @@ def api_process_raw(content):
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return a API error message."""
|
||||
"""Return an API error message."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
@@ -97,7 +77,7 @@ def api_return_error(message=None):
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
"""Return a API ok answer."""
|
||||
"""Return an API ok answer."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_OK,
|
||||
JSON_DATA: data or {},
|
||||
@@ -113,9 +93,3 @@ async def api_validate(schema, request):
|
||||
raise RuntimeError(humanize_error(data, ex)) from None
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def hash_password(password):
|
||||
"""Hash and salt our passwords."""
|
||||
key = ")*()*SALT_HASSIO2123{}6554547485HSKA!!*JSLAfdasda$".format(password)
|
||||
return hashlib.sha256(key.encode()).hexdigest()
|
||||
|
@@ -7,6 +7,7 @@ from pathlib import Path
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .core import HassIO
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .const import SOCKET_DOCKER
|
||||
@@ -17,27 +18,46 @@ from .snapshots import SnapshotManager
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
from .services import ServiceManager
|
||||
from .services import Discovery
|
||||
from .host import HostManager
|
||||
from .dbus import DBusManager
|
||||
from .hassos import HassOS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENV_SHARE = 'SUPERVISOR_SHARE'
|
||||
ENV_NAME = 'SUPERVISOR_NAME'
|
||||
ENV_REPO = 'HOMEASSISTANT_REPOSITORY'
|
||||
|
||||
MACHINE_ID = Path('/etc/machine-id')
|
||||
|
||||
|
||||
def initialize_coresys(loop):
|
||||
"""Initialize HassIO coresys/objects."""
|
||||
coresys = CoreSys(loop)
|
||||
|
||||
# Initialize core objects
|
||||
coresys.core = HassIO(coresys)
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
coresys.homeassistant = HomeAssistant(coresys)
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.hassos = HassOS(coresys)
|
||||
|
||||
# bootstrap config
|
||||
initialize_system_data(coresys)
|
||||
|
||||
# Set Machine/Host ID
|
||||
if MACHINE_ID.exists():
|
||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
||||
|
||||
return coresys
|
||||
|
||||
|
||||
@@ -46,10 +66,11 @@ def initialize_system_data(coresys):
|
||||
config = coresys.config
|
||||
|
||||
# homeassistant config folder
|
||||
if not config.path_config.is_dir():
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home-Assistant config folder %s", config.path_config)
|
||||
config.path_config.mkdir()
|
||||
"Create Home-Assistant config folder %s",
|
||||
config.path_homeassistant)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
if not config.path_ssl.is_dir():
|
||||
@@ -87,6 +108,11 @@ def initialize_system_data(coresys):
|
||||
_LOGGER.info("Create hassio share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
# apparmor folder
|
||||
if not config.path_apparmor.is_dir():
|
||||
_LOGGER.info("Create hassio apparmor folder %s", config.path_apparmor)
|
||||
config.path_apparmor.mkdir()
|
||||
|
||||
return config
|
||||
|
||||
|
||||
@@ -131,8 +157,7 @@ def initialize_logging():
|
||||
def check_environment():
|
||||
"""Check if all environment are exists."""
|
||||
# check environment variables
|
||||
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
||||
'HOMEASSISTANT_REPOSITORY'):
|
||||
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
|
||||
try:
|
||||
os.environ[key]
|
||||
except KeyError:
|
||||
@@ -146,7 +171,12 @@ def check_environment():
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which('socat'):
|
||||
_LOGGER.fatal("Can0t find socat program!")
|
||||
_LOGGER.fatal("Can't find socat program!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which('gdbus'):
|
||||
_LOGGER.fatal("Can't find gdbus program!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -2,11 +2,14 @@
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||
ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
@@ -25,9 +28,12 @@ ADDONS_DATA = PurePath("addons/data")
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
APPARMOR_DATA = PurePath("apparmor")
|
||||
|
||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||
|
||||
RE_TIMEZONE = re.compile(r"time_zone: (?P<timezone>[\w/\-+]+)")
|
||||
|
||||
|
||||
class CoreConfig(JsonConfig):
|
||||
"""Hold all core config data."""
|
||||
@@ -39,7 +45,21 @@ class CoreConfig(JsonConfig):
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
return self._data[ATTR_TIMEZONE]
|
||||
config_file = Path(self.path_homeassistant, 'configuration.yaml')
|
||||
try:
|
||||
assert config_file.exists()
|
||||
configuration = config_file.read_text()
|
||||
|
||||
data = RE_TIMEZONE.search(configuration)
|
||||
assert data
|
||||
|
||||
timezone = data.group('timezone')
|
||||
pytz.timezone(timezone)
|
||||
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
|
||||
_LOGGER.debug("Can't parse HomeAssistant timezone")
|
||||
return self._data[ATTR_TIMEZONE]
|
||||
|
||||
return timezone
|
||||
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
@@ -82,12 +102,12 @@ class CoreConfig(JsonConfig):
|
||||
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
||||
|
||||
@property
|
||||
def path_extern_config(self):
|
||||
def path_extern_homeassistant(self):
|
||||
"""Return config path extern for docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
||||
|
||||
@property
|
||||
def path_config(self):
|
||||
def path_homeassistant(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
||||
|
||||
@@ -136,6 +156,11 @@ class CoreConfig(JsonConfig):
|
||||
"""Return hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_tmp(self):
|
||||
"""Return hass.io temp folder for docker."""
|
||||
return PurePath(self.path_extern_hassio, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
@@ -151,6 +176,11 @@ class CoreConfig(JsonConfig):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return root apparmor profile folder."""
|
||||
return Path(HASSIO_DATA, APPARMOR_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder extern for docker."""
|
||||
@@ -174,23 +204,3 @@ class CoreConfig(JsonConfig):
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA audio output card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_OUTPUT)
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set ALSA audio output card,dev."""
|
||||
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA audio input card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_INPUT)
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set ALSA audio input card,dev."""
|
||||
self._data[ATTR_AUDIO_INPUT] = value
|
||||
|
@@ -2,12 +2,17 @@
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = '0.92'
|
||||
HASSIO_VERSION = '127'
|
||||
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/{}/version.json')
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
URL_HASSIO_VERSION = \
|
||||
"https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
URL_HASSIO_APPARMOR = \
|
||||
"https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
||||
|
||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||
URL_HASSOS_OTA = (
|
||||
"https://github.com/home-assistant/hassos/releases/download/"
|
||||
"{version}/hassos_{board}-{version}.raucb")
|
||||
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
@@ -18,7 +23,6 @@ FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||
|
||||
DOCKER_NETWORK = 'hassio'
|
||||
DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23')
|
||||
@@ -27,6 +31,7 @@ DOCKER_NETWORK_RANGE = ip_network('172.30.33.0/24')
|
||||
LABEL_VERSION = 'io.hass.version'
|
||||
LABEL_ARCH = 'io.hass.arch'
|
||||
LABEL_TYPE = 'io.hass.type'
|
||||
LABEL_MACHINE = 'io.hass.machine'
|
||||
|
||||
META_ADDON = 'addon'
|
||||
META_SUPERVISOR = 'supervisor'
|
||||
@@ -45,14 +50,16 @@ CONTENT_TYPE_JSON = 'application/json'
|
||||
CONTENT_TYPE_TEXT = 'text/plain'
|
||||
CONTENT_TYPE_TAR = 'application/tar'
|
||||
HEADER_HA_ACCESS = 'x-ha-access'
|
||||
HEADER_TOKEN = 'X-HASSIO-KEY'
|
||||
HEADER_TOKEN = 'x-hassio-key'
|
||||
|
||||
ENV_TOKEN = 'HASSIO_TOKEN'
|
||||
ENV_TIME = 'TZ'
|
||||
|
||||
REQUEST_FROM = 'HASSIO_FROM'
|
||||
|
||||
ATTR_MACHINE = 'machine'
|
||||
ATTR_WAIT_BOOT = 'wait_boot'
|
||||
ATTR_DEPLOYMENT = 'deployment'
|
||||
ATTR_WATCHDOG = 'watchdog'
|
||||
ATTR_CHANGELOG = 'changelog'
|
||||
ATTR_DATE = 'date'
|
||||
@@ -61,16 +68,18 @@ ATTR_LONG_DESCRIPTION = 'long_description'
|
||||
ATTR_HOSTNAME = 'hostname'
|
||||
ATTR_TIMEZONE = 'timezone'
|
||||
ATTR_ARGS = 'args'
|
||||
ATTR_OS = 'os'
|
||||
ATTR_OPERATING_SYSTEM = 'operating_system'
|
||||
ATTR_CHASSIS = 'chassis'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_SOURCE = 'source'
|
||||
ATTR_FEATURES = 'features'
|
||||
ATTR_ADDONS = 'addons'
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_VERSION_LATEST = 'version_latest'
|
||||
ATTR_AUTO_UART = 'auto_uart'
|
||||
ATTR_LAST_BOOT = 'last_boot'
|
||||
ATTR_LAST_VERSION = 'last_version'
|
||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||
ATTR_CHANNEL = 'channel'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_SLUG = 'slug'
|
||||
ATTR_DESCRIPTON = 'description'
|
||||
@@ -158,6 +167,21 @@ ATTR_SERVICES = 'services'
|
||||
ATTR_DISCOVERY = 'discovery'
|
||||
ATTR_PROTECTED = 'protected'
|
||||
ATTR_CRYPTO = 'crypto'
|
||||
ATTR_BRANCH = 'branch'
|
||||
ATTR_KERNEL = 'kernel'
|
||||
ATTR_APPARMOR = 'apparmor'
|
||||
ATTR_DEVICETREE = 'devicetree'
|
||||
ATTR_CPE = 'cpe'
|
||||
ATTR_BOARD = 'board'
|
||||
ATTR_HASSOS = 'hassos'
|
||||
ATTR_HASSOS_CLI = 'hassos_cli'
|
||||
ATTR_VERSION_CLI = 'version_cli'
|
||||
ATTR_VERSION_CLI_LATEST = 'version_cli_latest'
|
||||
ATTR_REFRESH_TOKEN = 'refresh_token'
|
||||
ATTR_DOCKER_API = 'docker_api'
|
||||
ATTR_FULL_ACCESS = 'full_access'
|
||||
ATTR_PROTECTED = 'protected'
|
||||
ATTR_RATING = 'rating'
|
||||
|
||||
SERVICE_MQTT = 'mqtt'
|
||||
|
||||
@@ -185,6 +209,10 @@ ARCH_AARCH64 = 'aarch64'
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_I386 = 'i386'
|
||||
|
||||
CHANNEL_STABLE = 'stable'
|
||||
CHANNEL_BETA = 'beta'
|
||||
CHANNEL_DEV = 'dev'
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
|
||||
@@ -197,3 +225,21 @@ SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
|
||||
CRYPTO_AES128 = 'aes128'
|
||||
|
||||
SECURITY_PROFILE = 'profile'
|
||||
SECURITY_DEFAULT = 'default'
|
||||
SECURITY_DISABLE = 'disable'
|
||||
|
||||
PRIVILEGED_NET_ADMIN = 'NET_ADMIN'
|
||||
PRIVILEGED_SYS_ADMIN = 'SYS_ADMIN'
|
||||
PRIVILEGED_SYS_RAWIO = 'SYS_RAWIO'
|
||||
PRIVILEGED_IPC_LOCK = 'IPC_LOCK'
|
||||
PRIVILEGED_SYS_TIME = 'SYS_TIME'
|
||||
PRIVILEGED_SYS_NICE = 'SYS_NICE'
|
||||
PRIVILEGED_SYS_RESOURCE = 'SYS_RESOURCE'
|
||||
|
||||
FEATURES_SHUTDOWN = 'shutdown'
|
||||
FEATURES_REBOOT = 'reboot'
|
||||
FEATURES_HASSOS = 'hassos'
|
||||
FEATURES_HOSTNAME = 'hostname'
|
||||
FEATURES_SERVICES = 'services'
|
||||
|
113
hassio/core.py
113
hassio/core.py
@@ -1,11 +1,14 @@
|
||||
"""Main file for HassIO."""
|
||||
from contextlib import suppress
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
|
||||
from .utils.dt import fetch_timezone
|
||||
from .exceptions import HassioError, HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,99 +22,119 @@ class HassIO(CoreSysAttributes):
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# update timezone
|
||||
if self._config.timezone == 'UTC':
|
||||
self._config.timezone = await fetch_timezone(self._websession)
|
||||
# Load Supervisor
|
||||
await self.sys_supervisor.load()
|
||||
|
||||
# supervisor
|
||||
await self._supervisor.load()
|
||||
# Load DBus
|
||||
await self.sys_dbus.load()
|
||||
|
||||
# hostcontrol
|
||||
await self._host_control.load()
|
||||
# Load Host
|
||||
await self.sys_host.load()
|
||||
|
||||
# Load homeassistant
|
||||
await self._homeassistant.load()
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load addons
|
||||
await self._addons.load()
|
||||
# Load Home Assistant
|
||||
await self.sys_homeassistant.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
# rest api views
|
||||
await self._api.load()
|
||||
await self.sys_api.load()
|
||||
|
||||
# load last available data
|
||||
await self._updater.load()
|
||||
await self.sys_updater.load()
|
||||
|
||||
# load last available data
|
||||
await self._snapshots.load()
|
||||
await self.sys_snapshots.load()
|
||||
|
||||
# load services
|
||||
await self._services.load()
|
||||
await self.sys_services.load()
|
||||
|
||||
# start dns forwarding
|
||||
self._loop.create_task(self._dns.start())
|
||||
|
||||
# start addon mark as initialize
|
||||
await self._addons.auto_boot(STARTUP_INITIALIZE)
|
||||
self.sys_create_task(self.sys_dns.start())
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
if not self._updater.beta_channel and self._supervisor.need_update:
|
||||
if await self._supervisor.update():
|
||||
# on dev mode, only read new versions
|
||||
if not self.sys_dev and self.sys_supervisor.need_update:
|
||||
if await self.sys_supervisor.update():
|
||||
return
|
||||
else:
|
||||
_LOGGER.info("Ignore Hass.io auto updates on beta mode")
|
||||
_LOGGER.info("Ignore Hass.io auto updates on dev channel")
|
||||
|
||||
# start api
|
||||
await self._api.start()
|
||||
_LOGGER.info("Start API on %s", self._docker.network.supervisor)
|
||||
await self.sys_api.start()
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if self._hardware.last_boot == self._config.last_boot:
|
||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
||||
_LOGGER.info("Hass.io reboot detected")
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self._services.reset()
|
||||
self.sys_services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self._addons.auto_boot(STARTUP_SYSTEM)
|
||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self._addons.auto_boot(STARTUP_SERVICES)
|
||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
if self._homeassistant.boot:
|
||||
await self._homeassistant.start()
|
||||
if self.sys_homeassistant.boot:
|
||||
with suppress(HomeAssistantError):
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
# start addon mark as application
|
||||
await self._addons.auto_boot(STARTUP_APPLICATION)
|
||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self._config.last_boot = self._hardware.last_boot
|
||||
self._config.save_data()
|
||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
||||
self.sys_config.save_data()
|
||||
|
||||
finally:
|
||||
# Add core tasks into scheduler
|
||||
await self._tasks.load()
|
||||
await self.sys_tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self._homeassistant.version == 'landingpage':
|
||||
self._loop.create_task(self._homeassistant.install())
|
||||
if self.sys_homeassistant.version == 'landingpage':
|
||||
self.sys_create_task(self.sys_homeassistant.install())
|
||||
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self._scheduler.suspend = True
|
||||
|
||||
# process stop tasks
|
||||
self._websession.close()
|
||||
self._websession_ssl.close()
|
||||
self.sys_scheduler.suspend = True
|
||||
|
||||
# process async stop tasks
|
||||
await asyncio.wait(
|
||||
[self._api.stop(), self._dns.stop()], loop=self._loop)
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await asyncio.wait([
|
||||
self.sys_api.stop(),
|
||||
self.sys_dns.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close()
|
||||
])
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Force Shutdown!")
|
||||
|
||||
_LOGGER.info("Hass.io is down")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown all running containers in correct order."""
|
||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
||||
|
||||
# Close Home Assistant
|
||||
with suppress(HassioError):
|
||||
await self.sys_homeassistant.stop()
|
||||
|
||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
||||
|
@@ -2,21 +2,22 @@
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .const import CHANNEL_DEV
|
||||
from .config import CoreConfig
|
||||
from .docker import DockerAPI
|
||||
from .misc.dns import DNSForward
|
||||
from .misc.hardware import Hardware
|
||||
from .misc.host_control import HostControl
|
||||
from .misc.scheduler import Scheduler
|
||||
|
||||
|
||||
class CoreSys(object):
|
||||
class CoreSys:
|
||||
"""Class that handle all shared data."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize coresys."""
|
||||
# Static attributes
|
||||
self.exit_code = 0
|
||||
self.machine_id = None
|
||||
|
||||
# External objects
|
||||
self._loop = loop
|
||||
@@ -30,9 +31,9 @@ class CoreSys(object):
|
||||
self._docker = DockerAPI()
|
||||
self._scheduler = Scheduler(loop=loop)
|
||||
self._dns = DNSForward(loop=loop)
|
||||
self._host_control = HostControl(loop=loop)
|
||||
|
||||
# Internal objects pointers
|
||||
self._core = None
|
||||
self._homeassistant = None
|
||||
self._supervisor = None
|
||||
self._addons = None
|
||||
@@ -40,7 +41,11 @@ class CoreSys(object):
|
||||
self._updater = None
|
||||
self._snapshots = None
|
||||
self._tasks = None
|
||||
self._host = None
|
||||
self._dbus = None
|
||||
self._hassos = None
|
||||
self._services = None
|
||||
self._discovery = None
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
@@ -49,6 +54,23 @@ class CoreSys(object):
|
||||
return self._supervisor.arch
|
||||
return None
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return running machine type of hass.io system."""
|
||||
if self._homeassistant:
|
||||
return self._homeassistant.machine
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
"""Return True if we run dev modus."""
|
||||
return self._updater.channel == CHANNEL_DEV
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return timezone."""
|
||||
return self._config.timezone
|
||||
|
||||
@property
|
||||
def loop(self):
|
||||
"""Return loop object."""
|
||||
@@ -90,9 +112,16 @@ class CoreSys(object):
|
||||
return self._dns
|
||||
|
||||
@property
|
||||
def host_control(self):
|
||||
"""Return HostControl object."""
|
||||
return self._host_control
|
||||
def core(self):
|
||||
"""Return HassIO object."""
|
||||
return self._core
|
||||
|
||||
@core.setter
|
||||
def core(self, value):
|
||||
"""Set a HassIO object."""
|
||||
if self._core:
|
||||
raise RuntimeError("HassIO already set!")
|
||||
self._core = value
|
||||
|
||||
@property
|
||||
def homeassistant(self):
|
||||
@@ -125,7 +154,7 @@ class CoreSys(object):
|
||||
|
||||
@api.setter
|
||||
def api(self, value):
|
||||
"""Set a API object."""
|
||||
"""Set an API object."""
|
||||
if self._api:
|
||||
raise RuntimeError("API already set!")
|
||||
self._api = value
|
||||
@@ -190,14 +219,70 @@ class CoreSys(object):
|
||||
raise RuntimeError("Services already set!")
|
||||
self._services = value
|
||||
|
||||
@property
|
||||
def discovery(self):
|
||||
"""Return ServiceManager object."""
|
||||
return self._discovery
|
||||
|
||||
class CoreSysAttributes(object):
|
||||
@discovery.setter
|
||||
def discovery(self, value):
|
||||
"""Set a Discovery object."""
|
||||
if self._discovery:
|
||||
raise RuntimeError("Discovery already set!")
|
||||
self._discovery = value
|
||||
|
||||
@property
|
||||
def dbus(self):
|
||||
"""Return DBusManager object."""
|
||||
return self._dbus
|
||||
|
||||
@dbus.setter
|
||||
def dbus(self, value):
|
||||
"""Set a DBusManager object."""
|
||||
if self._dbus:
|
||||
raise RuntimeError("DBusManager already set!")
|
||||
self._dbus = value
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
"""Return HostManager object."""
|
||||
return self._host
|
||||
|
||||
@host.setter
|
||||
def host(self, value):
|
||||
"""Set a HostManager object."""
|
||||
if self._host:
|
||||
raise RuntimeError("HostManager already set!")
|
||||
self._host = value
|
||||
|
||||
@property
|
||||
def hassos(self):
|
||||
"""Return HassOS object."""
|
||||
return self._hassos
|
||||
|
||||
@hassos.setter
|
||||
def hassos(self, value):
|
||||
"""Set a HassOS object."""
|
||||
if self._hassos:
|
||||
raise RuntimeError("HassOS already set!")
|
||||
self._hassos = value
|
||||
|
||||
def run_in_executor(self, funct, *args):
|
||||
"""Wrapper for executor pool."""
|
||||
return self._loop.run_in_executor(None, funct, *args)
|
||||
|
||||
def create_task(self, coroutine):
|
||||
"""Wrapper for async task."""
|
||||
return self._loop.create_task(coroutine)
|
||||
|
||||
|
||||
class CoreSysAttributes:
|
||||
"""Inheret basic CoreSysAttributes."""
|
||||
|
||||
coresys = None
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Mapping to coresys."""
|
||||
if hasattr(self.coresys, name[1:]):
|
||||
return getattr(self.coresys, name[1:])
|
||||
raise AttributeError(f"Can't find {name} on {self.__class__}")
|
||||
if name.startswith("sys_") and hasattr(self.coresys, name[4:]):
|
||||
return getattr(self.coresys, name[4:])
|
||||
raise AttributeError(f"Can't resolve {name} on {self}")
|
||||
|
39
hassio/dbus/__init__.py
Normal file
39
hassio/dbus/__init__.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""DBus interface objects."""
|
||||
|
||||
from .systemd import Systemd
|
||||
from .hostname import Hostname
|
||||
from .rauc import Rauc
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class DBusManager(CoreSysAttributes):
|
||||
"""DBus Interface handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize DBus Interface."""
|
||||
self.coresys = coresys
|
||||
|
||||
self._systemd = Systemd()
|
||||
self._hostname = Hostname()
|
||||
self._rauc = Rauc()
|
||||
|
||||
@property
|
||||
def systemd(self):
|
||||
"""Return Systemd Interface."""
|
||||
return self._systemd
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return hostname Interface."""
|
||||
return self._hostname
|
||||
|
||||
@property
|
||||
def rauc(self):
|
||||
"""Return rauc Interface."""
|
||||
return self._rauc
|
||||
|
||||
async def load(self):
|
||||
"""Connect interfaces to dbus."""
|
||||
await self.systemd.connect()
|
||||
await self.hostname.connect()
|
||||
await self.rauc.connect()
|
39
hassio/dbus/hostname.py
Normal file
39
hassio/dbus/hostname.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""DBus interface for hostname."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'org.freedesktop.hostname1'
|
||||
DBUS_OBJECT = '/org/freedesktop/hostname1'
|
||||
|
||||
|
||||
class Hostname(DBusInterface):
|
||||
"""Handle DBus interface for hostname/system."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect do bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to hostname")
|
||||
|
||||
@dbus_connected
|
||||
def set_static_hostname(self, hostname):
|
||||
"""Change local hostname.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.SetStaticHostname(hostname, False)
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return local host informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(DBUS_NAME)
|
18
hassio/dbus/interface.py
Normal file
18
hassio/dbus/interface.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Interface class for dbus wrappers."""
|
||||
|
||||
|
||||
class DBusInterface:
|
||||
"""Handle DBus interface for hostname/system."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize systemd."""
|
||||
self.dbus = None
|
||||
|
||||
@property
|
||||
def is_connected(self):
|
||||
"""Return True, if they is connected to dbus."""
|
||||
return self.dbus is not None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect do bus."""
|
||||
raise NotImplementedError()
|
55
hassio/dbus/rauc.py
Normal file
55
hassio/dbus/rauc.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""DBus interface for rauc."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'de.pengutronix.rauc'
|
||||
DBUS_OBJECT = '/'
|
||||
|
||||
|
||||
class Rauc(DBusInterface):
|
||||
"""Handle DBus interface for rauc."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect do bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to rauc")
|
||||
|
||||
@dbus_connected
|
||||
def install(self, raucb_file):
|
||||
"""Install rauc bundle file.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.Install(raucb_file)
|
||||
|
||||
@dbus_connected
|
||||
def get_slot_status(self):
|
||||
"""Get slot status.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.GetSlotStatus()
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return rauc informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
||||
|
||||
@dbus_connected
|
||||
def signal_completed(self):
|
||||
"""Return a signal wrapper for completed signal.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
79
hassio/dbus/systemd.py
Normal file
79
hassio/dbus/systemd.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""Interface to Systemd over dbus."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'org.freedesktop.systemd1'
|
||||
DBUS_OBJECT = '/org/freedesktop/systemd1'
|
||||
|
||||
|
||||
class Systemd(DBusInterface):
|
||||
"""Systemd function handler."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect do bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to systemd")
|
||||
|
||||
@dbus_connected
|
||||
def reboot(self):
|
||||
"""Reboot host computer.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.Reboot()
|
||||
|
||||
@dbus_connected
|
||||
def power_off(self):
|
||||
"""Power off host computer.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.PowerOff()
|
||||
|
||||
@dbus_connected
|
||||
def start_unit(self, unit, mode):
|
||||
"""Start a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.StartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def stop_unit(self, unit, mode):
|
||||
"""Stop a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.StopUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def reload_unit(self, unit, mode):
|
||||
"""Reload a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.ReloadOrRestartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def restart_unit(self, unit, mode):
|
||||
"""Restart a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.RestartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def list_units(self):
|
||||
"""Return a list of available systemd services.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.ListUnits()
|
14
hassio/dbus/utils.py
Normal file
14
hassio/dbus/utils.py
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Utils for dbus."""
|
||||
|
||||
from ..exceptions import DBusNotConnectedError
|
||||
|
||||
|
||||
def dbus_connected(method):
|
||||
"""Wrapper for check if dbus is connected."""
|
||||
def wrap_dbus(api, *args, **kwargs):
|
||||
"""Check if dbus is connected before call a method."""
|
||||
if api.dbus is None:
|
||||
raise DBusNotConnectedError()
|
||||
return method(api, *args, **kwargs)
|
||||
|
||||
return wrap_dbus
|
@@ -1,8 +1,8 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
from contextlib import suppress
|
||||
from collections import namedtuple
|
||||
import logging
|
||||
|
||||
import attr
|
||||
import docker
|
||||
|
||||
from .network import DockerNetwork
|
||||
@@ -10,10 +10,11 @@ from ..const import SOCKET_DOCKER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CommandReturn = namedtuple('CommandReturn', ['exit_code', 'output'])
|
||||
# pylint: disable=invalid-name
|
||||
CommandReturn = attr.make_class('CommandReturn', ['exit_code', 'output'])
|
||||
|
||||
|
||||
class DockerAPI(object):
|
||||
class DockerAPI:
|
||||
"""Docker hassio wrapper.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
@@ -23,7 +24,7 @@ class DockerAPI(object):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.docker = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
|
||||
version='auto', timeout=300)
|
||||
version='auto', timeout=900)
|
||||
self.network = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
|
@@ -1,16 +1,17 @@
|
||||
"""Init file for HassIO addon docker object."""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from .utils import docker_process
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (
|
||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN,
|
||||
ENV_TIME)
|
||||
ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,7 +29,7 @@ class DockerAddon(DockerInterface):
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return addon of docker image."""
|
||||
return self._addons.get(self._id)
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
@@ -52,7 +53,7 @@ class DockerAddon(DockerInterface):
|
||||
"""Return arch of docker image."""
|
||||
if not self.addon.legacy:
|
||||
return super().arch
|
||||
return self._arch
|
||||
return self.sys_arch
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -66,6 +67,11 @@ class DockerAddon(DockerInterface):
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def full_access(self):
|
||||
"""Return True if full access is enabled."""
|
||||
return not self.addon.protected and self.addon.with_full_access
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of addon."""
|
||||
@@ -85,7 +91,7 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
ENV_TIME: self._config.timezone,
|
||||
ENV_TIME: self.sys_timezone,
|
||||
ENV_TOKEN: self.addon.uuid,
|
||||
}
|
||||
|
||||
@@ -100,7 +106,7 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
# Auto mapping UART devices
|
||||
if self.addon.auto_uart:
|
||||
for device in self._hardware.serial_devices:
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
|
||||
# Return None if no devices is present
|
||||
@@ -121,14 +127,20 @@ class DockerAddon(DockerInterface):
|
||||
@property
|
||||
def security_opt(self):
|
||||
"""Controlling security opt."""
|
||||
privileged = self.addon.privileged or []
|
||||
security = []
|
||||
|
||||
# Disable AppArmor sinse it make troubles wit SYS_ADMIN
|
||||
if 'SYS_ADMIN' in privileged:
|
||||
return [
|
||||
"apparmor:unconfined",
|
||||
]
|
||||
return None
|
||||
# AppArmor
|
||||
apparmor = self.sys_host.apparmor.available
|
||||
if not apparmor or self.addon.apparmor == SECURITY_DISABLE:
|
||||
security.append("apparmor:unconfined")
|
||||
elif self.addon.apparmor == SECURITY_PROFILE:
|
||||
security.append(f"apparmor={self.addon.slug}")
|
||||
|
||||
# Disable Seccomp / We don't support it official and it
|
||||
# make troubles on some kind of host systems.
|
||||
security.append("seccomp=unconfined")
|
||||
|
||||
return security
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
@@ -142,8 +154,8 @@ class DockerAddon(DockerInterface):
|
||||
def network_mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
return {
|
||||
'homeassistant': self._docker.network.gateway,
|
||||
'hassio': self._docker.network.supervisor,
|
||||
'homeassistant': self.sys_docker.network.gateway,
|
||||
'hassio': self.sys_docker.network.supervisor,
|
||||
}
|
||||
|
||||
@property
|
||||
@@ -166,52 +178,77 @@ class DockerAddon(DockerInterface):
|
||||
# setup config mappings
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_config): {
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_ssl): {
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_addons_local): {
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_backup): {
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self._config.path_extern_share): {
|
||||
str(self.sys_config.path_extern_share): {
|
||||
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
|
||||
# init other hardware mappings
|
||||
# Init other hardware mappings
|
||||
|
||||
# GPIO support
|
||||
if self.addon.with_gpio:
|
||||
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
|
||||
if not Path(gpio_path).exists():
|
||||
continue
|
||||
volumes.update({
|
||||
gpio_path: {
|
||||
'bind': gpio_path, 'mode': 'rw'
|
||||
},
|
||||
})
|
||||
|
||||
# DeviceTree support
|
||||
if self.addon.with_devicetree:
|
||||
volumes.update({
|
||||
"/sys/class/gpio": {
|
||||
'bind': "/sys/class/gpio", 'mode': 'rw'
|
||||
},
|
||||
"/sys/devices/platform/soc": {
|
||||
'bind': "/sys/devices/platform/soc", 'mode': 'rw'
|
||||
"/sys/firmware/devicetree/base": {
|
||||
'bind': "/device-tree", 'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# host dbus system
|
||||
# Docker API support
|
||||
if not self.addon.protected and self.addon.access_docker_api:
|
||||
volumes.update({
|
||||
"/var/run/docker.sock": {
|
||||
'bind': "/var/run/docker.sock", 'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Host dbus system
|
||||
if self.addon.host_dbus:
|
||||
volumes.update({
|
||||
"/var/run/dbus": {
|
||||
'bind': "/var/run/dbus", 'mode': 'rw'
|
||||
}})
|
||||
|
||||
# ALSA configuration
|
||||
if self.addon.with_audio:
|
||||
volumes.update({
|
||||
str(self.addon.path_extern_asound): {
|
||||
'bind': "/etc/asound.conf", 'mode': 'ro'
|
||||
}})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
@@ -222,15 +259,21 @@ class DockerAddon(DockerInterface):
|
||||
if self._is_running():
|
||||
return True
|
||||
|
||||
# Security check
|
||||
if not self.addon.protected:
|
||||
_LOGGER.warning(
|
||||
"%s run with disabled proteced mode!", self.addon.name)
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self._docker.run(
|
||||
ret = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
init=True,
|
||||
privileged=self.full_access,
|
||||
ipc_mode=self.ipc,
|
||||
stdin_open=self.addon.with_stdin,
|
||||
network_mode=self.network_mode,
|
||||
@@ -269,7 +312,7 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||
try:
|
||||
image, log = self._docker.images.build(
|
||||
image, log = self.sys_docker.images.build(
|
||||
**build_env.get_docker_args(tag))
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||
@@ -285,10 +328,10 @@ class DockerAddon(DockerInterface):
|
||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
return self._loop.run_in_executor(None, self._export_image, path)
|
||||
return self.sys_run_in_executor(self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
@@ -296,7 +339,7 @@ class DockerAddon(DockerInterface):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self._docker.api.get_image(self.image)
|
||||
image = self.sys_docker.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||
return False
|
||||
@@ -313,10 +356,10 @@ class DockerAddon(DockerInterface):
|
||||
_LOGGER.info("Export image %s done", self.image)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
return self._loop.run_in_executor(None, self._import_image, path, tag)
|
||||
return self.sys_run_in_executor(self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
@@ -325,9 +368,9 @@ class DockerAddon(DockerInterface):
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self._docker.api.load_image(read_tar, quiet=True)
|
||||
self.sys_docker.api.load_image(read_tar, quiet=True)
|
||||
|
||||
image = self._docker.images.get(self.image)
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||
@@ -338,10 +381,10 @@ class DockerAddon(DockerInterface):
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def write_stdin(self, data):
|
||||
"""Write to add-on stdin."""
|
||||
return self._loop.run_in_executor(None, self._write_stdin, data)
|
||||
return self.sys_run_in_executor(self._write_stdin, data)
|
||||
|
||||
def _write_stdin(self, data):
|
||||
"""Write to add-on stdin.
|
||||
@@ -353,7 +396,7 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
try:
|
||||
# load needed docker objects
|
||||
container = self._docker.containers.get(self.name)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||
|
37
hassio/docker/hassos_cli.py
Normal file
37
hassio/docker/hassos_cli.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""HassOS Cli docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Docker hassio wrapper for HassOS Cli."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS cli image."""
|
||||
return f"homeassistant/{self.sys_arch}-hassio-cli"
|
||||
|
||||
def _stop(self):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find a HassOS cli %s", self.image)
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info("Found HassOS cli %s with version %s",
|
||||
self.image, self.version)
|
@@ -4,7 +4,7 @@ import logging
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..const import ENV_TOKEN, ENV_TIME
|
||||
from ..const import ENV_TOKEN, ENV_TIME, LABEL_MACHINE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,10 +14,17 @@ HASS_DOCKER_NAME = 'homeassistant'
|
||||
class DockerHomeAssistant(DockerInterface):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return machine of Home-Assistant docker image."""
|
||||
if self._meta and LABEL_MACHINE in self._meta['Config']['Labels']:
|
||||
return self._meta['Config']['Labels'][LABEL_MACHINE]
|
||||
return None
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of docker image."""
|
||||
return self._homeassistant.image
|
||||
return self.sys_homeassistant.image
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -28,7 +35,7 @@ class DockerHomeAssistant(DockerInterface):
|
||||
def devices(self):
|
||||
"""Create list of special device to map into docker."""
|
||||
devices = []
|
||||
for device in self._hardware.serial_devices:
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
return devices or None
|
||||
|
||||
@@ -43,7 +50,7 @@ class DockerHomeAssistant(DockerInterface):
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self._docker.run(
|
||||
ret = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
@@ -53,16 +60,16 @@ class DockerHomeAssistant(DockerInterface):
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self._docker.network.supervisor,
|
||||
ENV_TIME: self._config.timezone,
|
||||
ENV_TOKEN: self._homeassistant.uuid,
|
||||
'HASSIO': self.sys_docker.network.supervisor,
|
||||
ENV_TIME: self.sys_timezone,
|
||||
ENV_TOKEN: self.sys_homeassistant.uuid,
|
||||
},
|
||||
volumes={
|
||||
str(self._config.path_extern_config):
|
||||
str(self.sys_config.path_extern_homeassistant):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self._config.path_extern_ssl):
|
||||
str(self.sys_config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self._config.path_extern_share):
|
||||
str(self.sys_config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
}
|
||||
)
|
||||
@@ -78,26 +85,31 @@ class DockerHomeAssistant(DockerInterface):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
return self._docker.run_command(
|
||||
return self.sys_docker.run_command(
|
||||
self.image,
|
||||
command,
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={
|
||||
ENV_TIME: self._config.timezone,
|
||||
ENV_TIME: self.sys_timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self._config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'ro'},
|
||||
str(self._config.path_extern_ssl):
|
||||
str(self.sys_config.path_extern_homeassistant):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.sys_config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.sys_config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'ro'},
|
||||
}
|
||||
)
|
||||
|
||||
def is_initialize(self):
|
||||
"""Return True if docker container exists."""
|
||||
return self._loop.run_in_executor(None, self._is_initialize)
|
||||
return self.sys_run_in_executor(self._is_initialize)
|
||||
|
||||
def _is_initialize(self):
|
||||
"""Return True if docker container exists.
|
||||
@@ -105,7 +117,7 @@ class DockerHomeAssistant(DockerInterface):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self._docker.containers.get(self.name)
|
||||
self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
|
@@ -5,10 +5,10 @@ import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .utils import docker_process
|
||||
from .stats import DockerStats
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,7 +20,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self._meta = None
|
||||
self.lock = asyncio.Lock(loop=self._loop)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
@@ -58,10 +58,10 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.lock.locked()
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
return self._loop.run_in_executor(None, self._install, tag)
|
||||
return self.sys_run_in_executor(self._install, tag)
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image.
|
||||
@@ -70,7 +70,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||
image = self._docker.images.pull(f"{self.image}:{tag}")
|
||||
image = self.sys_docker.images.pull(f"{self.image}:{tag}")
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self._meta = image.attrs
|
||||
@@ -83,7 +83,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
def exists(self):
|
||||
"""Return True if docker image exists in local repo."""
|
||||
return self._loop.run_in_executor(None, self._exists)
|
||||
return self.sys_run_in_executor(self._exists)
|
||||
|
||||
def _exists(self):
|
||||
"""Return True if docker image exists in local repo.
|
||||
@@ -91,7 +91,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self._docker.images.get(self.image)
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
assert f"{self.image}:{self.version}" in image.tags
|
||||
except (docker.errors.DockerException, AssertionError):
|
||||
return False
|
||||
@@ -103,7 +103,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self._loop.run_in_executor(None, self._is_running)
|
||||
return self.sys_run_in_executor(self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
"""Return True if docker is Running.
|
||||
@@ -111,8 +111,8 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
image = self._docker.images.get(self.image)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -120,16 +120,16 @@ class DockerInterface(CoreSysAttributes):
|
||||
if container.status != 'running':
|
||||
return False
|
||||
|
||||
# we run on a old image, stop and start it
|
||||
# we run on an old image, stop and start it
|
||||
if container.image.id != image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
return self._loop.run_in_executor(None, self._attach)
|
||||
return self.sys_run_in_executor(self._attach)
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
@@ -138,9 +138,9 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""
|
||||
try:
|
||||
if self.image:
|
||||
self._meta = self._docker.images.get(self.image).attrs
|
||||
self._meta = self.sys_docker.images.get(self.image).attrs
|
||||
else:
|
||||
self._meta = self._docker.containers.get(self.name).attrs
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -149,10 +149,10 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def run(self):
|
||||
"""Run docker image."""
|
||||
return self._loop.run_in_executor(None, self._run)
|
||||
return self.sys_run_in_executor(self._run)
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
@@ -161,10 +161,10 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
return self._loop.run_in_executor(None, self._stop)
|
||||
return self.sys_run_in_executor(self._stop)
|
||||
|
||||
def _stop(self):
|
||||
"""Stop/remove and remove docker container.
|
||||
@@ -172,7 +172,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -187,10 +187,10 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def remove(self):
|
||||
"""Remove docker images."""
|
||||
return self._loop.run_in_executor(None, self._remove)
|
||||
return self.sys_run_in_executor(self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker images.
|
||||
@@ -205,11 +205,11 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self._docker.images.remove(
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:latest", force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self._docker.images.remove(
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:{self.version}", force=True)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
@@ -219,10 +219,10 @@ class DockerInterface(CoreSysAttributes):
|
||||
self._meta = None
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def update(self, tag):
|
||||
"""Update a docker image."""
|
||||
return self._loop.run_in_executor(None, self._update, tag)
|
||||
return self.sys_run_in_executor(self._update, tag)
|
||||
|
||||
def _update(self, tag):
|
||||
"""Update a docker image.
|
||||
@@ -247,7 +247,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self._loop.run_in_executor(None, self._logs)
|
||||
return self.sys_run_in_executor(self._logs)
|
||||
|
||||
def _logs(self):
|
||||
"""Return docker logs of container.
|
||||
@@ -255,7 +255,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
@@ -264,35 +264,10 @@ class DockerInterface(CoreSysAttributes):
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s: %s", self.image, err)
|
||||
|
||||
@docker_process
|
||||
def restart(self):
|
||||
"""Restart docker container."""
|
||||
return self._loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self._loop.run_in_executor(None, self._cleanup)
|
||||
return self.sys_run_in_executor(self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
@@ -300,25 +275,25 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self._docker.images.get(self.image)
|
||||
latest = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return False
|
||||
|
||||
for image in self._docker.images.list(name=self.image):
|
||||
for image in self.sys_docker.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||
self._docker.images.remove(image.id, force=True)
|
||||
self.sys_docker.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def execute_command(self, command):
|
||||
"""Create a temporary container and run command."""
|
||||
return self._loop.run_in_executor(None, self._execute_command, command)
|
||||
return self.sys_run_in_executor(self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
@@ -329,7 +304,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
def stats(self):
|
||||
"""Read and return stats from container."""
|
||||
return self._loop.run_in_executor(None, self._stats)
|
||||
return self.sys_run_in_executor(self._stats)
|
||||
|
||||
def _stats(self):
|
||||
"""Create a temporary container and run command.
|
||||
@@ -337,7 +312,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return None
|
||||
|
||||
|
@@ -8,7 +8,7 @@ from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerNetwork(object):
|
||||
class DockerNetwork:
|
||||
"""Internal HassIO Network.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
|
@@ -2,7 +2,7 @@
|
||||
from contextlib import suppress
|
||||
|
||||
|
||||
class DockerStats(object):
|
||||
class DockerStats:
|
||||
"""Hold stats data from container inside."""
|
||||
|
||||
def __init__(self, stats):
|
||||
|
@@ -11,7 +11,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
"""Docker hassio wrapper for Supervisor."""
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -24,7 +24,7 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -33,9 +33,10 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
self.image, self.version)
|
||||
|
||||
# if already attach
|
||||
if container in self._docker.network.containers:
|
||||
if container in self.sys_docker.network.containers:
|
||||
return True
|
||||
|
||||
# attach to network
|
||||
return self._docker.network.attach_container(
|
||||
container, alias=['hassio'], ipv4=self._docker.network.supervisor)
|
||||
return self.sys_docker.network.attach_container(
|
||||
container, alias=['hassio'],
|
||||
ipv4=self.sys_docker.network.supervisor)
|
||||
|
@@ -1,20 +0,0 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def docker_process(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api.lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api.lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
129
hassio/exceptions.py
Normal file
129
hassio/exceptions.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""Core Exceptions."""
|
||||
|
||||
|
||||
class HassioError(Exception):
|
||||
"""Root exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HassioNotSupportedError(HassioError):
|
||||
"""Function is not supported."""
|
||||
pass
|
||||
|
||||
|
||||
# HomeAssistant
|
||||
|
||||
class HomeAssistantError(HassioError):
|
||||
"""Home Assistant exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantUpdateError(HomeAssistantError):
|
||||
"""Error on update of a Home Assistant."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantAPIError(HomeAssistantError):
|
||||
"""Home Assistant API exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantAuthError(HomeAssistantAPIError):
|
||||
"""Home Assistant Auth API exception."""
|
||||
pass
|
||||
|
||||
|
||||
# HassOS
|
||||
|
||||
class HassOSError(HassioError):
|
||||
"""HassOS exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HassOSUpdateError(HassOSError):
|
||||
"""Error on update of a HassOS."""
|
||||
pass
|
||||
|
||||
|
||||
class HassOSNotSupportedError(HassioNotSupportedError):
|
||||
"""Function not supported by HassOS."""
|
||||
pass
|
||||
|
||||
|
||||
# Updater
|
||||
|
||||
class HassioUpdaterError(HassioError):
|
||||
"""Error on Updater."""
|
||||
pass
|
||||
|
||||
|
||||
# Host
|
||||
|
||||
class HostError(HassioError):
|
||||
"""Internal Host error."""
|
||||
pass
|
||||
|
||||
|
||||
class HostNotSupportedError(HassioNotSupportedError):
|
||||
"""Host function is not supprted."""
|
||||
pass
|
||||
|
||||
|
||||
class HostServiceError(HostError):
|
||||
"""Host service functions fails."""
|
||||
pass
|
||||
|
||||
|
||||
class HostAppArmorError(HostError):
|
||||
"""Host apparmor functions fails."""
|
||||
pass
|
||||
|
||||
|
||||
# API
|
||||
|
||||
class APIError(HassioError):
|
||||
"""API errors."""
|
||||
pass
|
||||
|
||||
|
||||
class APINotSupportedError(HassioNotSupportedError):
|
||||
"""API not supported error."""
|
||||
pass
|
||||
|
||||
|
||||
# utils/gdbus
|
||||
|
||||
class DBusError(HassioError):
|
||||
"""DBus generic error."""
|
||||
pass
|
||||
|
||||
|
||||
class DBusNotConnectedError(HostNotSupportedError):
|
||||
"""DBus is not connected and call a method."""
|
||||
|
||||
|
||||
class DBusFatalError(DBusError):
|
||||
"""DBus call going wrong."""
|
||||
pass
|
||||
|
||||
|
||||
class DBusParseError(DBusError):
|
||||
"""DBus parse error."""
|
||||
pass
|
||||
|
||||
|
||||
# util/apparmor
|
||||
|
||||
class AppArmorError(HostAppArmorError):
|
||||
"""General AppArmor error."""
|
||||
pass
|
||||
|
||||
|
||||
class AppArmorFileError(AppArmorError):
|
||||
"""AppArmor profile file error."""
|
||||
pass
|
||||
|
||||
|
||||
class AppArmorInvalidError(AppArmorError):
|
||||
"""AppArmor profile validate error."""
|
||||
pass
|
186
hassio/hassos.py
Normal file
186
hassio/hassos.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""HassOS support on supervisor."""
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import aiohttp
|
||||
from cpe import CPE
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import URL_HASSOS_OTA
|
||||
from .docker.hassos_cli import DockerHassOSCli
|
||||
from .exceptions import HassOSNotSupportedError, HassOSUpdateError, DBusError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassOS(CoreSysAttributes):
|
||||
"""HassOS interface inside HassIO."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize HassOS handler."""
|
||||
self.coresys = coresys
|
||||
self.instance = DockerHassOSCli(coresys)
|
||||
self._available = False
|
||||
self._version = None
|
||||
self._board = None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return True, if HassOS on host."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of HassOS."""
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def version_cli(self):
|
||||
"""Return version of HassOS cli."""
|
||||
return self.instance.version
|
||||
|
||||
@property
|
||||
def version_latest(self):
|
||||
"""Return version of HassOS."""
|
||||
return self.sys_updater.version_hassos
|
||||
|
||||
@property
|
||||
def version_cli_latest(self):
|
||||
"""Return version of HassOS."""
|
||||
return self.sys_updater.version_hassos_cli
|
||||
|
||||
@property
|
||||
def need_update(self):
|
||||
"""Return true if a HassOS update is available."""
|
||||
return self.version != self.version_latest
|
||||
|
||||
@property
|
||||
def need_cli_update(self):
|
||||
"""Return true if a HassOS cli update is available."""
|
||||
return self.version_cli != self.version_cli_latest
|
||||
|
||||
@property
|
||||
def board(self):
|
||||
"""Return board name."""
|
||||
return self._board
|
||||
|
||||
def _check_host(self):
|
||||
"""Check if HassOS is availabe."""
|
||||
if not self.available:
|
||||
_LOGGER.error("No HassOS availabe")
|
||||
raise HassOSNotSupportedError()
|
||||
|
||||
async def _download_raucb(self, version):
|
||||
"""Download rauc bundle (OTA) from github."""
|
||||
url = URL_HASSOS_OTA.format(version=version, board=self.board)
|
||||
raucb = Path(self.sys_config.path_tmp, f"hassos-{version}.raucb")
|
||||
|
||||
try:
|
||||
_LOGGER.info("Fetch OTA update from %s", url)
|
||||
async with self.sys_websession.get(url) as request:
|
||||
if request.status != 200:
|
||||
raise HassOSUpdateError()
|
||||
|
||||
# Download RAUCB file
|
||||
with raucb.open('wb') as ota_file:
|
||||
while True:
|
||||
chunk = await request.content.read(1048576)
|
||||
if not chunk:
|
||||
break
|
||||
ota_file.write(chunk)
|
||||
|
||||
_LOGGER.info("OTA update is downloaded on %s", raucb)
|
||||
return raucb
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write ota file: %s", err)
|
||||
|
||||
raise HassOSUpdateError()
|
||||
|
||||
async def load(self):
|
||||
"""Load HassOS data."""
|
||||
try:
|
||||
# Check needed host functions
|
||||
assert self.sys_dbus.rauc.is_connected
|
||||
assert self.sys_dbus.systemd.is_connected
|
||||
assert self.sys_dbus.hostname.is_connected
|
||||
|
||||
assert self.sys_host.info.cpe is not None
|
||||
cpe = CPE(self.sys_host.info.cpe)
|
||||
assert cpe.get_product()[0] == 'hassos'
|
||||
except (AssertionError, NotImplementedError):
|
||||
_LOGGER.debug("Found no HassOS")
|
||||
return
|
||||
|
||||
# Store meta data
|
||||
self._available = True
|
||||
self._version = cpe.get_version()[0]
|
||||
self._board = cpe.get_target_hardware()[0]
|
||||
|
||||
_LOGGER.info("Detect HassOS %s on host system", self.version)
|
||||
await self.instance.attach()
|
||||
|
||||
def config_sync(self):
|
||||
"""Trigger a host config reload from usb.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
self._check_host()
|
||||
|
||||
_LOGGER.info("Sync config from USB on HassOS.")
|
||||
return self.sys_host.services.restart('hassos-config.service')
|
||||
|
||||
async def update(self, version=None):
|
||||
"""Update HassOS system."""
|
||||
version = version or self.version_latest
|
||||
|
||||
# Check installed version
|
||||
self._check_host()
|
||||
if version == self.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
raise HassOSUpdateError()
|
||||
|
||||
# Fetch files from internet
|
||||
int_ota = await self._download_raucb(version)
|
||||
ext_ota = Path(self.sys_config.path_extern_tmp, int_ota.name)
|
||||
|
||||
try:
|
||||
await self.sys_dbus.rauc.install(ext_ota)
|
||||
completed = await self.sys_dbus.rauc.signal_completed()
|
||||
|
||||
except DBusError:
|
||||
_LOGGER.error("Rauc communication error")
|
||||
raise HassOSUpdateError() from None
|
||||
|
||||
finally:
|
||||
int_ota.unlink()
|
||||
|
||||
# Update success
|
||||
if 0 in completed:
|
||||
_LOGGER.info("Install HassOS %s success", version)
|
||||
self.sys_create_task(self.sys_host.control.reboot())
|
||||
return
|
||||
|
||||
# Update fails
|
||||
rauc_status = await self.sys_dbus.get_properties()
|
||||
_LOGGER.error(
|
||||
"HassOS update fails with: %s", rauc_status.get('LastError'))
|
||||
raise HassOSUpdateError()
|
||||
|
||||
async def update_cli(self, version=None):
|
||||
"""Update local HassOS cli."""
|
||||
version = version or self.version_cli_latest
|
||||
|
||||
if version == self.version_cli:
|
||||
_LOGGER.warning("Version %s is already installed for CLI", version)
|
||||
raise HassOSUpdateError()
|
||||
|
||||
if await self.instance.update(version):
|
||||
return
|
||||
|
||||
_LOGGER.error("HassOS CLI update fails.")
|
||||
raise HassOSUpdateError()
|
@@ -1,22 +1,28 @@
|
||||
"""HomeAssistant control object."""
|
||||
import asyncio
|
||||
from collections import namedtuple
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
import socket
|
||||
import time
|
||||
|
||||
import aiohttp
|
||||
from aiohttp.hdrs import CONTENT_TYPE
|
||||
from aiohttp import hdrs
|
||||
import attr
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID,
|
||||
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||
ATTR_WAIT_BOOT, HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
||||
ATTR_WAIT_BOOT, ATTR_REFRESH_TOKEN,
|
||||
HEADER_HA_ACCESS)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .utils import convert_to_ascii
|
||||
from .exceptions import (
|
||||
HomeAssistantUpdateError, HomeAssistantError, HomeAssistantAPIError,
|
||||
HomeAssistantAuthError)
|
||||
from .utils import convert_to_ascii, process_lock
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
@@ -24,7 +30,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||
|
||||
ConfigResult = namedtuple('ConfigResult', ['valid', 'log'])
|
||||
# pylint: disable=invalid-name
|
||||
ConfigResult = attr.make_class('ConfigResult', ['valid', 'log'], frozen=True)
|
||||
|
||||
|
||||
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
@@ -35,6 +42,10 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.coresys = coresys
|
||||
self.instance = DockerHomeAssistant(coresys)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
self._error_state = False
|
||||
# We don't persist access tokens. Instead we fetch new ones when needed
|
||||
self.access_token = None
|
||||
|
||||
async def load(self):
|
||||
"""Prepare HomeAssistant object."""
|
||||
@@ -44,10 +55,20 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
|
||||
await self.install_landingpage()
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return System Machines."""
|
||||
return self.instance.machine
|
||||
|
||||
@property
|
||||
def error_state(self):
|
||||
"""Return True if system is in error."""
|
||||
return self._error_state
|
||||
|
||||
@property
|
||||
def api_ip(self):
|
||||
"""Return IP of HomeAssistant instance."""
|
||||
return self._docker.network.gateway
|
||||
return self.sys_docker.network.gateway
|
||||
|
||||
@property
|
||||
def api_port(self):
|
||||
@@ -116,7 +137,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Return last available version of homeassistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self._updater.version_homeassistant
|
||||
return self.sys_updater.version_homeassistant
|
||||
|
||||
@last_version.setter
|
||||
def last_version(self, value):
|
||||
@@ -162,6 +183,17 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Return a UUID of this HomeAssistant."""
|
||||
return self._data[ATTR_UUID]
|
||||
|
||||
@property
|
||||
def refresh_token(self):
|
||||
"""Return the refresh token to authenticate with HomeAssistant."""
|
||||
return self._data.get(ATTR_REFRESH_TOKEN)
|
||||
|
||||
@refresh_token.setter
|
||||
def refresh_token(self, value):
|
||||
"""Set Home Assistant refresh_token."""
|
||||
self._data[ATTR_REFRESH_TOKEN] = value
|
||||
|
||||
@process_lock
|
||||
async def install_landingpage(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
@@ -169,54 +201,93 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
if await self.instance.install('landingpage'):
|
||||
break
|
||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||
await asyncio.sleep(60, loop=self._loop)
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# Run landingpage after installation
|
||||
await self.start()
|
||||
_LOGGER.info("Start landingpage")
|
||||
try:
|
||||
await self._start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.warning("Can't start landingpage")
|
||||
|
||||
@process_lock
|
||||
async def install(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.last_version:
|
||||
await self._updater.reload()
|
||||
await self.sys_updater.reload()
|
||||
|
||||
tag = self.last_version
|
||||
if tag and await self.instance.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
||||
await asyncio.sleep(60, loop=self._loop)
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# finishing
|
||||
_LOGGER.info("HomeAssistant docker now installed")
|
||||
if self.boot:
|
||||
await self.start()
|
||||
await self.instance.cleanup()
|
||||
try:
|
||||
if not self.boot:
|
||||
return
|
||||
_LOGGER.info("Start HomeAssistant")
|
||||
await self._start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Can't start HomeAssistant!")
|
||||
finally:
|
||||
await self.instance.cleanup()
|
||||
|
||||
@process_lock
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
rollback = self.version if not self.error_state else None
|
||||
running = await self.instance.is_running()
|
||||
exists = await self.instance.exists()
|
||||
|
||||
if exists and version == self.instance.version:
|
||||
_LOGGER.info("Version %s is already installed", version)
|
||||
return False
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return HomeAssistantUpdateError()
|
||||
|
||||
try:
|
||||
return await self.instance.update(version)
|
||||
finally:
|
||||
if running:
|
||||
await self.start()
|
||||
# process a update
|
||||
async def _update(to_version):
|
||||
"""Run Home Assistant update."""
|
||||
try:
|
||||
_LOGGER.info("Update HomeAssistant to version %s", to_version)
|
||||
if not await self.instance.update(to_version):
|
||||
raise HomeAssistantUpdateError()
|
||||
finally:
|
||||
if running:
|
||||
await self._start()
|
||||
_LOGGER.info("Successfull run HomeAssistant %s", to_version)
|
||||
|
||||
async def start(self):
|
||||
"""Run HomeAssistant docker."""
|
||||
# Update Home Assistant
|
||||
with suppress(HomeAssistantError):
|
||||
await _update(version)
|
||||
return
|
||||
|
||||
# Update going wrong, revert it
|
||||
if self.error_state and rollback:
|
||||
_LOGGER.fatal("HomeAssistant update fails -> rollback!")
|
||||
await _update(rollback)
|
||||
else:
|
||||
raise HomeAssistantUpdateError()
|
||||
|
||||
async def _start(self):
|
||||
"""Start HomeAssistant docker & wait."""
|
||||
if not await self.instance.run():
|
||||
return False
|
||||
raise HomeAssistantError()
|
||||
await self._block_till_run()
|
||||
|
||||
return await self._block_till_run()
|
||||
@process_lock
|
||||
def start(self):
|
||||
"""Run HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._start()
|
||||
|
||||
@process_lock
|
||||
def stop(self):
|
||||
"""Stop HomeAssistant docker.
|
||||
|
||||
@@ -224,12 +295,11 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""
|
||||
return self.instance.stop()
|
||||
|
||||
@process_lock
|
||||
async def restart(self):
|
||||
"""Restart HomeAssistant docker."""
|
||||
if not await self.instance.restart():
|
||||
return False
|
||||
|
||||
return await self._block_till_run()
|
||||
await self.instance.stop()
|
||||
await self._start()
|
||||
|
||||
def logs(self):
|
||||
"""Get HomeAssistant docker logs.
|
||||
@@ -262,7 +332,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.instance.in_progress
|
||||
return self.instance.in_progress or self.lock.locked()
|
||||
|
||||
async def check_config(self):
|
||||
"""Run homeassistant config check."""
|
||||
@@ -272,7 +342,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
|
||||
# if not valid
|
||||
if result.exit_code is None:
|
||||
return ConfigResult(False, "")
|
||||
raise HomeAssistantError()
|
||||
|
||||
# parse output
|
||||
log = convert_to_ascii(result.output)
|
||||
@@ -280,55 +350,97 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
return ConfigResult(False, log)
|
||||
return ConfigResult(True, log)
|
||||
|
||||
async def check_api_state(self):
|
||||
"""Check if Home-Assistant up and running."""
|
||||
url = f"{self.api_url}/api/"
|
||||
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||
async def ensure_access_token(self):
|
||||
"""Ensures there is an access token."""
|
||||
if self.access_token is not None:
|
||||
return
|
||||
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
async with self.sys_websession_ssl.post(
|
||||
f"{self.api_url}/auth/token",
|
||||
timeout=30,
|
||||
data={
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": self.refresh_token
|
||||
}
|
||||
) as resp:
|
||||
if resp.status == 200:
|
||||
_LOGGER.info("Updated HomeAssistant API token")
|
||||
tokens = await resp.json()
|
||||
self.access_token = tokens['access_token']
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't update HomeAssistant access token!")
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
@asynccontextmanager
|
||||
async def make_request(self, method, path, json=None, content_type=None,
|
||||
data=None, timeout=30):
|
||||
"""Async context manager to make a request with right auth."""
|
||||
url = f"{self.api_url}/{path}"
|
||||
headers = {}
|
||||
|
||||
# Passthrough content type
|
||||
if content_type is not None:
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
|
||||
# Set old API Password
|
||||
if self.api_password:
|
||||
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||
headers[HEADER_HA_ACCESS] = self.api_password
|
||||
|
||||
try:
|
||||
# pylint: disable=bad-continuation
|
||||
async with self._websession_ssl.get(
|
||||
url, headers=header, timeout=30) as request:
|
||||
status = request.status
|
||||
for _ in (1, 2):
|
||||
# Prepare Access token
|
||||
if self.refresh_token:
|
||||
await self.ensure_access_token()
|
||||
headers[hdrs.AUTHORIZATION] = f'Bearer {self.access_token}'
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
return False
|
||||
try:
|
||||
async with getattr(self.sys_websession_ssl, method)(
|
||||
url, data=data, timeout=timeout, json=json,
|
||||
headers=headers
|
||||
) as resp:
|
||||
# Access token expired
|
||||
if resp.status == 401 and self.refresh_token:
|
||||
self.access_token = None
|
||||
continue
|
||||
yield resp
|
||||
return
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||
_LOGGER.error("Error on call %s: %s", url, err)
|
||||
break
|
||||
|
||||
if status not in (200, 201):
|
||||
_LOGGER.warning("Home-Assistant API config missmatch")
|
||||
return True
|
||||
raise HomeAssistantAPIError()
|
||||
|
||||
async def check_api_state(self):
|
||||
"""Return True if Home-Assistant up and running."""
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.make_request('get', 'api/') as resp:
|
||||
if resp.status in (200, 201):
|
||||
return True
|
||||
err = resp.status
|
||||
|
||||
_LOGGER.warning("Home-Assistant API config missmatch: %d", err)
|
||||
return False
|
||||
|
||||
async def send_event(self, event_type, event_data=None):
|
||||
"""Send event to Home-Assistant."""
|
||||
url = f"{self.api_url}/api/events/{event_type}"
|
||||
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.make_request(
|
||||
'get', f'api/events/{event_type}'
|
||||
) as resp:
|
||||
if resp.status in (200, 201):
|
||||
return
|
||||
err = resp.status
|
||||
|
||||
if self.api_password:
|
||||
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||
|
||||
try:
|
||||
# pylint: disable=bad-continuation
|
||||
async with self._websession_ssl.post(
|
||||
url, headers=header, timeout=30,
|
||||
json=event_data) as request:
|
||||
status = request.status
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||
_LOGGER.warning(
|
||||
"Home-Assistant event %s fails: %s", event_type, err)
|
||||
return False
|
||||
|
||||
if status not in (200, 201):
|
||||
_LOGGER.warning("Home-Assistant event %s fails", event_type)
|
||||
return False
|
||||
return True
|
||||
_LOGGER.warning("HomeAssistant event %s fails: %s", event_type, err)
|
||||
return HomeAssistantError()
|
||||
|
||||
async def _block_till_run(self):
|
||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||
start_time = time.monotonic()
|
||||
migration_progress = False
|
||||
migration_file = Path(
|
||||
self.sys_config.path_homeassistant, '.migration_progress')
|
||||
|
||||
def check_port():
|
||||
"""Check if port is mapped."""
|
||||
@@ -337,17 +449,46 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
result = sock.connect_ex((str(self.api_ip), self.api_port))
|
||||
sock.close()
|
||||
|
||||
# Check if the port is available
|
||||
if result == 0:
|
||||
return True
|
||||
return False
|
||||
except OSError:
|
||||
pass
|
||||
return False
|
||||
|
||||
while time.monotonic() - start_time < self.wait_boot:
|
||||
if await self._loop.run_in_executor(None, check_port):
|
||||
_LOGGER.info("Detect a running Home-Assistant instance")
|
||||
return True
|
||||
await asyncio.sleep(10, loop=self._loop)
|
||||
while True:
|
||||
await asyncio.sleep(10)
|
||||
|
||||
_LOGGER.warning("Don't wait anymore of Home-Assistant startup!")
|
||||
return False
|
||||
# 1
|
||||
# Check if Container is is_running
|
||||
if not await self.instance.is_running():
|
||||
_LOGGER.error("HomeAssistant is crashed!")
|
||||
break
|
||||
|
||||
# 2
|
||||
# Check if API response
|
||||
if await self.sys_run_in_executor(check_port):
|
||||
_LOGGER.info("Detect a running HomeAssistant instance")
|
||||
self._error_state = False
|
||||
return
|
||||
|
||||
# 3
|
||||
# Running DB Migration
|
||||
if migration_file.exists():
|
||||
if not migration_progress:
|
||||
migration_progress = True
|
||||
_LOGGER.info("HomeAssistant record migration in progress")
|
||||
continue
|
||||
elif migration_progress:
|
||||
migration_progress = False # Reset start time
|
||||
start_time = time.monotonic()
|
||||
_LOGGER.info("HomeAssistant record migration done")
|
||||
|
||||
# 4
|
||||
# Timeout
|
||||
if time.monotonic() - start_time > self.wait_boot:
|
||||
_LOGGER.warning("Don't wait anymore of HomeAssistant startup!")
|
||||
break
|
||||
|
||||
self._error_state = True
|
||||
raise HomeAssistantError()
|
||||
|
93
hassio/host/__init__.py
Normal file
93
hassio/host/__init__.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Host function like audio/dbus/systemd."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
from .alsa import AlsaAudio
|
||||
from .apparmor import AppArmorControl
|
||||
from .control import SystemControl
|
||||
from .info import InfoCenter
|
||||
from .services import ServiceManager
|
||||
from ..const import (
|
||||
FEATURES_REBOOT, FEATURES_SHUTDOWN, FEATURES_HOSTNAME, FEATURES_SERVICES,
|
||||
FEATURES_HASSOS)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HassioError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HostManager(CoreSysAttributes):
|
||||
"""Manage supported function from host."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Host manager."""
|
||||
self.coresys = coresys
|
||||
self._alsa = AlsaAudio(coresys)
|
||||
self._apparmor = AppArmorControl(coresys)
|
||||
self._control = SystemControl(coresys)
|
||||
self._info = InfoCenter(coresys)
|
||||
self._services = ServiceManager(coresys)
|
||||
|
||||
@property
|
||||
def alsa(self):
|
||||
"""Return host ALSA handler."""
|
||||
return self._alsa
|
||||
|
||||
@property
|
||||
def apparmor(self):
|
||||
"""Return host apparmor handler."""
|
||||
return self._apparmor
|
||||
|
||||
@property
|
||||
def control(self):
|
||||
"""Return host control handler."""
|
||||
return self._control
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
"""Return host info handler."""
|
||||
return self._info
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
"""Return host services handler."""
|
||||
return self._services
|
||||
|
||||
@property
|
||||
def supperted_features(self):
|
||||
"""Return a list of supported host features."""
|
||||
features = []
|
||||
|
||||
if self.sys_dbus.systemd.is_connected:
|
||||
features.extend([
|
||||
FEATURES_REBOOT,
|
||||
FEATURES_SHUTDOWN,
|
||||
FEATURES_SERVICES,
|
||||
])
|
||||
|
||||
if self.sys_dbus.hostname.is_connected:
|
||||
features.append(FEATURES_HOSTNAME)
|
||||
|
||||
if self.sys_hassos.available:
|
||||
features.append(FEATURES_HASSOS)
|
||||
|
||||
return features
|
||||
|
||||
async def reload(self):
|
||||
"""Reload host functions."""
|
||||
if self.sys_dbus.hostname.is_connected:
|
||||
await self.info.update()
|
||||
|
||||
if self.sys_dbus.systemd.is_connected:
|
||||
await self.services.update()
|
||||
|
||||
async def load(self):
|
||||
"""Load host information."""
|
||||
with suppress(HassioError):
|
||||
await self.reload()
|
||||
|
||||
# Load profile data
|
||||
try:
|
||||
await self.apparmor.load()
|
||||
except HassioError as err:
|
||||
_LOGGER.waring("Load host AppArmor on start fails: %s", err)
|
137
hassio/host/alsa.py
Normal file
137
hassio/host/alsa.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Host Audio-support."""
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
from string import Template
|
||||
|
||||
import attr
|
||||
|
||||
from ..const import ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
DefaultConfig = attr.make_class('DefaultConfig', ['input', 'output'])
|
||||
|
||||
|
||||
class AlsaAudio(CoreSysAttributes):
|
||||
"""Handle Audio ALSA host data."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Alsa audio system."""
|
||||
self.coresys = coresys
|
||||
self._data = {
|
||||
ATTR_INPUT: {},
|
||||
ATTR_OUTPUT: {},
|
||||
}
|
||||
self._cache = 0
|
||||
self._default = None
|
||||
|
||||
@property
|
||||
def input_devices(self):
|
||||
"""Return list of ALSA input devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_INPUT]
|
||||
|
||||
@property
|
||||
def output_devices(self):
|
||||
"""Return list of ALSA output devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_OUTPUT]
|
||||
|
||||
def _update_device(self):
|
||||
"""Update Internal device DB."""
|
||||
current_id = hash(frozenset(self.sys_hardware.audio_devices))
|
||||
|
||||
# Need rebuild?
|
||||
if current_id == self._cache:
|
||||
return
|
||||
|
||||
# Clean old stuff
|
||||
self._data[ATTR_INPUT].clear()
|
||||
self._data[ATTR_OUTPUT].clear()
|
||||
|
||||
# Init database
|
||||
_LOGGER.info("Update ALSA device list")
|
||||
database = self._audio_database()
|
||||
|
||||
# Process devices
|
||||
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
|
||||
for chan_id, chan_type in dev_data[ATTR_DEVICES].items():
|
||||
alsa_id = f"{dev_id},{chan_id}"
|
||||
dev_name = dev_data[ATTR_NAME]
|
||||
|
||||
# Lookup type
|
||||
if chan_type.endswith('playback'):
|
||||
key = ATTR_OUTPUT
|
||||
elif chan_type.endswith('capture'):
|
||||
key = ATTR_INPUT
|
||||
else:
|
||||
_LOGGER.warning("Unknown channel type: %s", chan_type)
|
||||
continue
|
||||
|
||||
# Use name from DB or a generic name
|
||||
self._data[key][alsa_id] = database.get(
|
||||
self.sys_machine, {}).get(
|
||||
dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}")
|
||||
|
||||
self._cache = current_id
|
||||
|
||||
@staticmethod
|
||||
def _audio_database():
|
||||
"""Read local json audio data into dict."""
|
||||
json_file = Path(__file__).parent.joinpath("data/audiodb.json")
|
||||
|
||||
try:
|
||||
# pylint: disable=no-member
|
||||
with json_file.open('r') as database:
|
||||
return json.loads(database.read())
|
||||
except (ValueError, OSError) as err:
|
||||
_LOGGER.warning("Can't read audio DB: %s", err)
|
||||
|
||||
return {}
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
"""Generate ALSA default setting."""
|
||||
# Init defaults
|
||||
if self._default is None:
|
||||
database = self._audio_database()
|
||||
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
|
||||
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
|
||||
|
||||
self._default = DefaultConfig(alsa_input, alsa_output)
|
||||
|
||||
# Search exists/new output
|
||||
if self._default.output is None and self.output_devices:
|
||||
self._default.output = next(iter(self.output_devices))
|
||||
_LOGGER.info("Detect output device %s", self._default.output)
|
||||
|
||||
# Search exists/new input
|
||||
if self._default.input is None and self.input_devices:
|
||||
self._default.input = next(iter(self.input_devices))
|
||||
_LOGGER.info("Detect input device %s", self._default.input)
|
||||
|
||||
return self._default
|
||||
|
||||
def asound(self, alsa_input=None, alsa_output=None):
|
||||
"""Generate an asound data."""
|
||||
alsa_input = alsa_input or self.default.input
|
||||
alsa_output = alsa_output or self.default.output
|
||||
|
||||
# Read Template
|
||||
asound_file = Path(__file__).parent.joinpath("data/asound.tmpl")
|
||||
try:
|
||||
# pylint: disable=no-member
|
||||
with asound_file.open('r') as asound:
|
||||
asound_data = asound.read()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound.tmpl: %s", err)
|
||||
return ""
|
||||
|
||||
# Process Template
|
||||
asound_template = Template(asound_data)
|
||||
return asound_template.safe_substitute(
|
||||
input=alsa_input, output=alsa_output
|
||||
)
|
121
hassio/host/apparmor.py
Normal file
121
hassio/host/apparmor.py
Normal file
@@ -0,0 +1,121 @@
|
||||
"""AppArmor control for host."""
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import DBusError, HostAppArmorError
|
||||
from ..utils.apparmor import validate_profile
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SYSTEMD_SERVICES = {'hassos-apparmor.service', 'hassio-apparmor.service'}
|
||||
|
||||
|
||||
class AppArmorControl(CoreSysAttributes):
|
||||
"""Handle host apparmor controls."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize host power handling."""
|
||||
self.coresys = coresys
|
||||
self._profiles = set()
|
||||
self._service = None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return True if AppArmor is availabe on host."""
|
||||
return self._service is not None
|
||||
|
||||
def exists(self, profile):
|
||||
"""Return True if a profile exists."""
|
||||
return profile in self._profiles
|
||||
|
||||
async def _reload_service(self):
|
||||
"""Reload internal service."""
|
||||
try:
|
||||
await self.sys_host.services.reload(self._service)
|
||||
except DBusError as err:
|
||||
_LOGGER.error("Can't reload %s: %s", self._service, err)
|
||||
|
||||
def _get_profile(self, profile_name):
|
||||
"""Get a profile from AppArmor store."""
|
||||
if profile_name not in self._profiles:
|
||||
_LOGGER.error("Can't find %s for removing", profile_name)
|
||||
raise HostAppArmorError()
|
||||
return Path(self.sys_config.path_apparmor, profile_name)
|
||||
|
||||
async def load(self):
|
||||
"""Load available profiles."""
|
||||
for content in self.sys_config.path_apparmor.iterdir():
|
||||
if not content.is_file():
|
||||
continue
|
||||
self._profiles.add(content.name)
|
||||
|
||||
# Is connected with systemd?
|
||||
_LOGGER.info("Load AppArmor Profiles: %s", self._profiles)
|
||||
for service in SYSTEMD_SERVICES:
|
||||
if not self.sys_host.services.exists(service):
|
||||
continue
|
||||
self._service = service
|
||||
|
||||
# Load profiles
|
||||
if self.available:
|
||||
await self._reload_service()
|
||||
else:
|
||||
_LOGGER.info("AppArmor is not enabled on Host")
|
||||
|
||||
async def load_profile(self, profile_name, profile_file):
|
||||
"""Load/Update a new/exists profile into AppArmor."""
|
||||
if not validate_profile(profile_name, profile_file):
|
||||
_LOGGER.error("profile is not valid with name %s", profile_name)
|
||||
raise HostAppArmorError()
|
||||
|
||||
# Copy to AppArmor folder
|
||||
dest_profile = Path(self.sys_config.path_apparmor, profile_name)
|
||||
try:
|
||||
shutil.copy(profile_file, dest_profile)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't copy %s: %s", profile_file, err)
|
||||
raise HostAppArmorError() from None
|
||||
|
||||
# Load profiles
|
||||
_LOGGER.info("Add or Update AppArmor profile: %s", profile_name)
|
||||
self._profiles.add(profile_name)
|
||||
if self.available:
|
||||
await self._reload_service()
|
||||
|
||||
async def remove_profile(self, profile_name):
|
||||
"""Remove a AppArmor profile."""
|
||||
profile_file = self._get_profile(profile_name)
|
||||
|
||||
# Only remove file
|
||||
if not self.available:
|
||||
try:
|
||||
profile_file.unlink()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't remove profile: %s", err)
|
||||
raise HostAppArmorError()
|
||||
return
|
||||
|
||||
# Marks als remove and start host process
|
||||
remove_profile = Path(
|
||||
self.sys_config.path_apparmor, 'remove', profile_name)
|
||||
try:
|
||||
profile_file.rename(remove_profile)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't mark profile as remove: %s", err)
|
||||
raise HostAppArmorError()
|
||||
|
||||
_LOGGER.info("Remove AppArmor profile: %s", profile_name)
|
||||
self._profiles.remove(profile_name)
|
||||
await self._reload_service()
|
||||
|
||||
def backup_profile(self, profile_name, backup_file):
|
||||
"""Backup A profile into a new file."""
|
||||
profile_file = self._get_profile(profile_name)
|
||||
|
||||
try:
|
||||
shutil.copy(profile_file, backup_file)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't backup profile %s: %s", profile_name, err)
|
||||
raise HostAppArmorError()
|
56
hassio/host/control.py
Normal file
56
hassio/host/control.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""Power control for host."""
|
||||
import logging
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HostNotSupportedError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MANAGER = 'manager'
|
||||
HOSTNAME = 'hostname'
|
||||
|
||||
|
||||
class SystemControl(CoreSysAttributes):
|
||||
"""Handle host power controls."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize host power handling."""
|
||||
self.coresys = coresys
|
||||
|
||||
def _check_dbus(self, flag):
|
||||
"""Check if systemd is connect or raise error."""
|
||||
if flag == MANAGER and self.sys_dbus.systemd.is_connected:
|
||||
return
|
||||
if flag == HOSTNAME and self.sys_dbus.hostname.is_connected:
|
||||
return
|
||||
|
||||
_LOGGER.error("No %s dbus connection available", flag)
|
||||
raise HostNotSupportedError()
|
||||
|
||||
async def reboot(self):
|
||||
"""Reboot host system."""
|
||||
self._check_dbus(MANAGER)
|
||||
|
||||
_LOGGER.info("Initialize host reboot over systemd")
|
||||
try:
|
||||
await self.sys_core.shutdown()
|
||||
finally:
|
||||
await self.sys_dbus.systemd.reboot()
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown host system."""
|
||||
self._check_dbus(MANAGER)
|
||||
|
||||
_LOGGER.info("Initialize host power off over systemd")
|
||||
try:
|
||||
await self.sys_core.shutdown()
|
||||
finally:
|
||||
await self.sys_dbus.systemd.power_off()
|
||||
|
||||
async def set_hostname(self, hostname):
|
||||
"""Set local a new Hostname."""
|
||||
self._check_dbus(HOSTNAME)
|
||||
|
||||
_LOGGER.info("Set Hostname %s", hostname)
|
||||
await self.sys_dbus.hostname.set_static_hostname(hostname)
|
||||
await self.sys_host.info.update()
|
17
hassio/host/data/asound.tmpl
Normal file
17
hassio/host/data/asound.tmpl
Normal file
@@ -0,0 +1,17 @@
|
||||
pcm.!default {
|
||||
type asym
|
||||
capture.pcm "mic"
|
||||
playback.pcm "speaker"
|
||||
}
|
||||
pcm.mic {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$input"
|
||||
}
|
||||
}
|
||||
pcm.speaker {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$output"
|
||||
}
|
||||
}
|
18
hassio/host/data/audiodb.json
Normal file
18
hassio/host/data/audiodb.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"raspberrypi3": {
|
||||
"bcm2835 - bcm2835 ALSA": {
|
||||
"0,0": "Raspberry Jack",
|
||||
"0,1": "Raspberry HDMI"
|
||||
},
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi2": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
}
|
||||
}
|
58
hassio/host/info.py
Normal file
58
hassio/host/info.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Info control for host."""
|
||||
import logging
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HassioError, HostNotSupportedError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InfoCenter(CoreSysAttributes):
|
||||
"""Handle local system information controls."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize system center handling."""
|
||||
self.coresys = coresys
|
||||
self._data = {}
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return local hostname."""
|
||||
return self._data.get('StaticHostname') or None
|
||||
|
||||
@property
|
||||
def chassis(self):
|
||||
"""Return local chassis type."""
|
||||
return self._data.get('Chassis') or None
|
||||
|
||||
@property
|
||||
def deployment(self):
|
||||
"""Return local deployment type."""
|
||||
return self._data.get('Deployment') or None
|
||||
|
||||
@property
|
||||
def kernel(self):
|
||||
"""Return local kernel version."""
|
||||
return self._data.get('KernelRelease') or None
|
||||
|
||||
@property
|
||||
def operating_system(self):
|
||||
"""Return local operating system."""
|
||||
return self._data.get('OperatingSystemPrettyName') or None
|
||||
|
||||
@property
|
||||
def cpe(self):
|
||||
"""Return local CPE."""
|
||||
return self._data.get('OperatingSystemCPEName') or None
|
||||
|
||||
async def update(self):
|
||||
"""Update properties over dbus."""
|
||||
if not self.sys_dbus.hostname.is_connected:
|
||||
_LOGGER.error("No hostname dbus connection available")
|
||||
raise HostNotSupportedError()
|
||||
|
||||
_LOGGER.info("Update local host information")
|
||||
try:
|
||||
self._data = await self.sys_dbus.hostname.get_properties()
|
||||
except HassioError:
|
||||
_LOGGER.warning("Can't update host system information!")
|
99
hassio/host/services.py
Normal file
99
hassio/host/services.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Service control for host."""
|
||||
import logging
|
||||
|
||||
import attr
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HassioError, HostNotSupportedError, HostServiceError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MOD_REPLACE = 'replace'
|
||||
|
||||
|
||||
class ServiceManager(CoreSysAttributes):
|
||||
"""Handle local service information controls."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize system center handling."""
|
||||
self.coresys = coresys
|
||||
self._services = set()
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterator trought services."""
|
||||
return iter(self._services)
|
||||
|
||||
def _check_dbus(self, unit=None):
|
||||
"""Check available dbus connection."""
|
||||
if not self.sys_dbus.systemd.is_connected:
|
||||
_LOGGER.error("No systemd dbus connection available")
|
||||
raise HostNotSupportedError()
|
||||
|
||||
if unit and not self.exists(unit):
|
||||
_LOGGER.error("Unit '%s' not found", unit)
|
||||
raise HostServiceError()
|
||||
|
||||
def start(self, unit):
|
||||
"""Start a service on host."""
|
||||
self._check_dbus(unit)
|
||||
|
||||
_LOGGER.info("Start local service %s", unit)
|
||||
return self.sys_dbus.systemd.start_unit(unit, MOD_REPLACE)
|
||||
|
||||
def stop(self, unit):
|
||||
"""Stop a service on host."""
|
||||
self._check_dbus(unit)
|
||||
|
||||
_LOGGER.info("Stop local service %s", unit)
|
||||
return self.sys_dbus.systemd.stop_unit(unit, MOD_REPLACE)
|
||||
|
||||
def reload(self, unit):
|
||||
"""Reload a service on host."""
|
||||
self._check_dbus(unit)
|
||||
|
||||
_LOGGER.info("Reload local service %s", unit)
|
||||
return self.sys_dbus.systemd.reload_unit(unit, MOD_REPLACE)
|
||||
|
||||
def restart(self, unit):
|
||||
"""Restart a service on host."""
|
||||
self._check_dbus(unit)
|
||||
|
||||
_LOGGER.info("Restart local service %s", unit)
|
||||
return self.sys_dbus.systemd.restart_unit(unit, MOD_REPLACE)
|
||||
|
||||
def exists(self, unit):
|
||||
"""Check if a unit exists and return True."""
|
||||
for service in self._services:
|
||||
if unit == service.name:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def update(self):
|
||||
"""Update properties over dbus."""
|
||||
self._check_dbus()
|
||||
|
||||
_LOGGER.info("Update service information")
|
||||
self._services.clear()
|
||||
try:
|
||||
systemd_units = await self.sys_dbus.systemd.list_units()
|
||||
for service_data in systemd_units[0]:
|
||||
if not service_data[0].endswith(".service") or \
|
||||
service_data[2] != 'loaded':
|
||||
continue
|
||||
self._services.add(ServiceInfo.read_from(service_data))
|
||||
except (HassioError, IndexError):
|
||||
_LOGGER.warning("Can't update host service information!")
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class ServiceInfo:
|
||||
"""Represent a single Service."""
|
||||
|
||||
name = attr.ib(type=str)
|
||||
description = attr.ib(type=str)
|
||||
state = attr.ib(type=str)
|
||||
|
||||
@staticmethod
|
||||
def read_from(unit):
|
||||
"""Parse data from dbus into this object."""
|
||||
return ServiceInfo(unit[0], unit[1], unit[3])
|
@@ -3,12 +3,14 @@ import asyncio
|
||||
import logging
|
||||
import shlex
|
||||
|
||||
import async_timeout
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
||||
|
||||
|
||||
class DNSForward(object):
|
||||
class DNSForward:
|
||||
"""Manage DNS forwarding to internal DNS."""
|
||||
|
||||
def __init__(self, loop):
|
||||
@@ -38,5 +40,10 @@ class DNSForward(object):
|
||||
return
|
||||
|
||||
self.proc.kill()
|
||||
await self.proc.wait()
|
||||
try:
|
||||
with async_timeout.timeout(5):
|
||||
await self.proc.wait()
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Stop waiting for DNS shutdown")
|
||||
|
||||
_LOGGER.info("Stop DNS forwarding")
|
||||
|
@@ -23,8 +23,8 @@ GPIO_DEVICES = Path("/sys/class/gpio")
|
||||
RE_TTY = re.compile(r"tty[A-Z]+")
|
||||
|
||||
|
||||
class Hardware(object):
|
||||
"""Represent a interface to procfs, sysfs and udev."""
|
||||
class Hardware:
|
||||
"""Represent an interface to procfs, sysfs and udev."""
|
||||
|
||||
def __init__(self):
|
||||
"""Init hardware object."""
|
||||
@@ -63,6 +63,10 @@ class Hardware(object):
|
||||
@property
|
||||
def audio_devices(self):
|
||||
"""Return all available audio interfaces."""
|
||||
if not ASOUND_CARDS.exists():
|
||||
_LOGGER.info("No audio devices found")
|
||||
return {}
|
||||
|
||||
try:
|
||||
with ASOUND_CARDS.open('r') as cards_file:
|
||||
cards = cards_file.read()
|
||||
|
@@ -1,124 +0,0 @@
|
||||
"""Host control for HassIO."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from ..const import (
|
||||
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
|
||||
ATTR_HOSTNAME, ATTR_OS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 15
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
FEATURES_SHUTDOWN = 'shutdown'
|
||||
FEATURES_REBOOT = 'reboot'
|
||||
FEATURES_UPDATE = 'update'
|
||||
FEATURES_HOSTNAME = 'hostname'
|
||||
FEATURES_NETWORK_INFO = 'network_info'
|
||||
FEATURES_NETWORK_CONTROL = 'network_control'
|
||||
|
||||
|
||||
class HostControl(object):
|
||||
"""Client for host control."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize HostControl socket client."""
|
||||
self.loop = loop
|
||||
self.active = False
|
||||
self.version = UNKNOWN
|
||||
self.last_version = UNKNOWN
|
||||
self.type = UNKNOWN
|
||||
self.features = []
|
||||
self.hostname = UNKNOWN
|
||||
self.os_info = UNKNOWN
|
||||
|
||||
if SOCKET_HC.is_socket():
|
||||
self.active = True
|
||||
|
||||
async def _send_command(self, command):
|
||||
"""Send command to host.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
if not self.active:
|
||||
return
|
||||
|
||||
reader, writer = await asyncio.open_unix_connection(
|
||||
str(SOCKET_HC), loop=self.loop)
|
||||
|
||||
try:
|
||||
# send
|
||||
_LOGGER.info("Send '%s' to HostControl.", command)
|
||||
|
||||
with async_timeout.timeout(TIMEOUT, loop=self.loop):
|
||||
writer.write("{}\n".format(command).encode())
|
||||
data = await reader.readline()
|
||||
|
||||
response = data.decode().rstrip()
|
||||
_LOGGER.info("Receive from HostControl: %s.", response)
|
||||
|
||||
if response == "OK":
|
||||
return True
|
||||
elif response == "ERROR":
|
||||
return False
|
||||
elif response == "WRONG":
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return json.loads(response)
|
||||
except json.JSONDecodeError:
|
||||
_LOGGER.warning("Json parse error from HostControl '%s'.",
|
||||
response)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Timeout from HostControl!")
|
||||
|
||||
finally:
|
||||
writer.close()
|
||||
|
||||
async def load(self):
|
||||
"""Load Info from host.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
info = await self._send_command("info")
|
||||
if not info:
|
||||
return
|
||||
|
||||
self.version = info.get(ATTR_VERSION, UNKNOWN)
|
||||
self.last_version = info.get(ATTR_LAST_VERSION, UNKNOWN)
|
||||
self.type = info.get(ATTR_TYPE, UNKNOWN)
|
||||
self.features = info.get(ATTR_FEATURES, [])
|
||||
self.hostname = info.get(ATTR_HOSTNAME, UNKNOWN)
|
||||
self.os_info = info.get(ATTR_OS, UNKNOWN)
|
||||
|
||||
def reboot(self):
|
||||
"""Reboot the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("reboot")
|
||||
|
||||
def shutdown(self):
|
||||
"""Shutdown the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("shutdown")
|
||||
|
||||
def update(self, version=None):
|
||||
"""Update the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("update {}".format(version))
|
||||
return self._send_command("update")
|
||||
|
||||
def set_hostname(self, hostname):
|
||||
"""Update hostname on host."""
|
||||
return self._send_command("hostname {}".format(hostname))
|
@@ -10,7 +10,7 @@ CALL = 'callback'
|
||||
TASK = 'task'
|
||||
|
||||
|
||||
class Scheduler(object):
|
||||
class Scheduler:
|
||||
"""Schedule task inside HassIO."""
|
||||
|
||||
def __init__(self, loop):
|
||||
|
@@ -1,8 +1,8 @@
|
||||
"""Handle internal services discovery."""
|
||||
|
||||
from .discovery import Discovery # noqa
|
||||
from .mqtt import MQTTService
|
||||
from .data import ServicesData
|
||||
from .discovery import Discovery
|
||||
from ..const import SERVICE_MQTT
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
@@ -19,7 +19,6 @@ class ServiceManager(CoreSysAttributes):
|
||||
"""Initialize Services handler."""
|
||||
self.coresys = coresys
|
||||
self.data = ServicesData()
|
||||
self.discovery = Discovery(coresys)
|
||||
self.services_obj = {}
|
||||
|
||||
@property
|
||||
@@ -37,9 +36,9 @@ class ServiceManager(CoreSysAttributes):
|
||||
self.services_obj[slug] = service(self.coresys)
|
||||
|
||||
# Read exists discovery messages
|
||||
self.discovery.load()
|
||||
self.sys_discovery.load()
|
||||
|
||||
def reset(self):
|
||||
"""Reset available data."""
|
||||
self.data.reset_data()
|
||||
self.discovery.load()
|
||||
self.sys_discovery.load()
|
||||
|
@@ -36,7 +36,7 @@ class Discovery(CoreSysAttributes):
|
||||
|
||||
self._data.clear()
|
||||
self._data.extend(messages)
|
||||
self._services.data.save_data()
|
||||
self.sys_services.data.save_data()
|
||||
|
||||
def get(self, uuid):
|
||||
"""Return discovery message."""
|
||||
@@ -45,7 +45,7 @@ class Discovery(CoreSysAttributes):
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return discovery data."""
|
||||
return self._services.data.discovery
|
||||
return self.sys_services.data.discovery
|
||||
|
||||
@property
|
||||
def list_messages(self):
|
||||
@@ -56,7 +56,7 @@ class Discovery(CoreSysAttributes):
|
||||
"""Send a discovery message to Home-Assistant."""
|
||||
message = Message(provider, component, platform, config)
|
||||
|
||||
# Allready exists?
|
||||
# Already exists?
|
||||
for exists_message in self.message_obj:
|
||||
if exists_message == message:
|
||||
_LOGGER.warning("Found douplicate discovery message from %s",
|
||||
@@ -69,7 +69,7 @@ class Discovery(CoreSysAttributes):
|
||||
self.save()
|
||||
|
||||
# send event to Home-Assistant
|
||||
self._loop.create_task(self._homeassistant.send_event(
|
||||
self.sys_create_task(self.sys_homeassistant.send_event(
|
||||
EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid}))
|
||||
|
||||
return message
|
||||
@@ -80,11 +80,11 @@ class Discovery(CoreSysAttributes):
|
||||
self.save()
|
||||
|
||||
# send event to Home-Assistant
|
||||
self._loop.create_task(self._homeassistant.send_event(
|
||||
self.sys_create_task(self.sys_homeassistant.send_event(
|
||||
EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid}))
|
||||
|
||||
|
||||
class Message(object):
|
||||
class Message:
|
||||
"""Represent a single Discovery message."""
|
||||
|
||||
def __init__(self, provider, component, platform, config, uuid=None):
|
||||
|
@@ -37,7 +37,7 @@ class ServiceInterface(CoreSysAttributes):
|
||||
|
||||
def save(self):
|
||||
"""Save changes."""
|
||||
self._services.data.save_data()
|
||||
self.sys_services.data.save_data()
|
||||
|
||||
def get_service_data(self):
|
||||
"""Return the requested service data."""
|
||||
|
@@ -21,7 +21,7 @@ class MQTTService(ServiceInterface):
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return data of this service."""
|
||||
return self._services.data.mqtt
|
||||
return self.sys_services.data.mqtt
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
@@ -66,7 +66,7 @@ class MQTTService(ServiceInterface):
|
||||
return True
|
||||
|
||||
# discover mqtt to homeassistant
|
||||
message = self._services.discovery.send(
|
||||
message = self.sys_discovery.send(
|
||||
provider, SERVICE_MQTT, None, self.hass_config)
|
||||
|
||||
self._data[ATTR_DISCOVERY_ID] = message.uuid
|
||||
@@ -81,8 +81,8 @@ class MQTTService(ServiceInterface):
|
||||
|
||||
discovery_id = self._data.get(ATTR_DISCOVERY_ID)
|
||||
if discovery_id:
|
||||
self._services.discovery.remove(
|
||||
self._services.discovery.get(discovery_id))
|
||||
self.sys_discovery.remove(
|
||||
self.sys_discovery.get(discovery_id))
|
||||
|
||||
self._data.clear()
|
||||
self.save()
|
||||
|
@@ -35,7 +35,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
"""Initialize a new snapshot object from name."""
|
||||
date_str = utcnow().isoformat()
|
||||
slug = create_slug(name, date_str)
|
||||
tar_file = Path(self._config.path_backup, f"{slug}.tar")
|
||||
tar_file = Path(self.sys_config.path_backup, f"{slug}.tar")
|
||||
|
||||
# init object
|
||||
snapshot = Snapshot(self.coresys, tar_file)
|
||||
@@ -65,11 +65,11 @@ class SnapshotManager(CoreSysAttributes):
|
||||
self.snapshots_obj[snapshot.slug] = snapshot
|
||||
|
||||
tasks = [_load_snapshot(tar_file) for tar_file in
|
||||
self._config.path_backup.glob("*.tar")]
|
||||
self.sys_config.path_backup.glob("*.tar")]
|
||||
|
||||
_LOGGER.info("Found %d snapshot files", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
def remove(self, snapshot):
|
||||
"""Remove a snapshot."""
|
||||
@@ -92,13 +92,13 @@ class SnapshotManager(CoreSysAttributes):
|
||||
if not await snapshot.load():
|
||||
return None
|
||||
|
||||
# Allready exists?
|
||||
# Already exists?
|
||||
if snapshot.slug in self.snapshots_obj:
|
||||
_LOGGER.error("Snapshot %s allready exists!", snapshot.slug)
|
||||
_LOGGER.error("Snapshot %s already exists!", snapshot.slug)
|
||||
return None
|
||||
|
||||
# Move snapshot to backup
|
||||
tar_origin = Path(self._config.path_backup, f"{snapshot.slug}.tar")
|
||||
tar_origin = Path(self.sys_config.path_backup, f"{snapshot.slug}.tar")
|
||||
try:
|
||||
snapshot.tarfile.rename(tar_origin)
|
||||
|
||||
@@ -124,7 +124,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
|
||||
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self._scheduler.suspend = True
|
||||
self.sys_scheduler.suspend = True
|
||||
await self.lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
@@ -146,7 +146,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
return snapshot
|
||||
|
||||
finally:
|
||||
self._scheduler.suspend = False
|
||||
self.sys_scheduler.suspend = False
|
||||
self.lock.release()
|
||||
|
||||
async def do_snapshot_partial(self, name="", addons=None, folders=None,
|
||||
@@ -162,25 +162,28 @@ class SnapshotManager(CoreSysAttributes):
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self._scheduler.suspend = True
|
||||
self.sys_scheduler.suspend = True
|
||||
await self.lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# Snapshot add-ons
|
||||
addon_list = []
|
||||
for addon_slug in addons:
|
||||
addon = self._addons.get(addon_slug)
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
if addon and addon.is_installed:
|
||||
addon_list.append(addon)
|
||||
continue
|
||||
_LOGGER.warning("Add-on %s not found", addon_slug)
|
||||
_LOGGER.warning(
|
||||
"Add-on %s not found/installed", addon_slug)
|
||||
|
||||
_LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
|
||||
await snapshot.store_addons(addon_list)
|
||||
if addon_list:
|
||||
_LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
|
||||
await snapshot.store_addons(addon_list)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders(folders)
|
||||
# Snapshot folders
|
||||
if folders:
|
||||
_LOGGER.info("Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders(folders)
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Snapshot %s error", snapshot.slug)
|
||||
@@ -192,7 +195,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
return snapshot
|
||||
|
||||
finally:
|
||||
self._scheduler.suspend = False
|
||||
self.sys_scheduler.suspend = False
|
||||
self.lock.release()
|
||||
|
||||
async def do_restore_full(self, snapshot, password=None):
|
||||
@@ -212,21 +215,14 @@ class SnapshotManager(CoreSysAttributes):
|
||||
|
||||
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self._scheduler.suspend = True
|
||||
self.sys_scheduler.suspend = True
|
||||
await self.lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
tasks = []
|
||||
|
||||
# Stop Home-Assistant / Add-ons
|
||||
tasks.append(self._homeassistant.stop())
|
||||
for addon in self._addons.list_addons:
|
||||
if addon.is_installed:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Restore %s stop tasks", snapshot.slug)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await self.sys_core.shutdown()
|
||||
|
||||
# Restore folders
|
||||
_LOGGER.info("Restore %s run folders", snapshot.slug)
|
||||
@@ -235,8 +231,8 @@ class SnapshotManager(CoreSysAttributes):
|
||||
# Start homeassistant restore
|
||||
_LOGGER.info("Restore %s run Home-Assistant", snapshot.slug)
|
||||
snapshot.restore_homeassistant()
|
||||
task_hass = self._loop.create_task(
|
||||
self._homeassistant.update(snapshot.homeassistant_version))
|
||||
task_hass = self.sys_create_task(self.sys_homeassistant.update(
|
||||
snapshot.homeassistant_version))
|
||||
|
||||
# Restore repositories
|
||||
_LOGGER.info("Restore %s run Repositories", snapshot.slug)
|
||||
@@ -244,13 +240,13 @@ class SnapshotManager(CoreSysAttributes):
|
||||
|
||||
# Delete delta add-ons
|
||||
tasks.clear()
|
||||
for addon in self._addons.list_installed:
|
||||
for addon in self.sys_addons.list_installed:
|
||||
if addon.slug not in snapshot.addon_list:
|
||||
tasks.append(addon.uninstall())
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Restore %s remove add-ons", snapshot.slug)
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# Restore add-ons
|
||||
_LOGGER.info("Restore %s old add-ons", snapshot.slug)
|
||||
@@ -260,7 +256,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
_LOGGER.info("Restore %s wait until homeassistant ready",
|
||||
snapshot.slug)
|
||||
await task_hass
|
||||
await self._homeassistant.start()
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||
@@ -271,7 +267,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
finally:
|
||||
self._scheduler.suspend = False
|
||||
self.sys_scheduler.suspend = False
|
||||
self.lock.release()
|
||||
|
||||
async def do_restore_partial(self, snapshot, homeassistant=False,
|
||||
@@ -290,12 +286,13 @@ class SnapshotManager(CoreSysAttributes):
|
||||
|
||||
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self._scheduler.suspend = True
|
||||
self.sys_scheduler.suspend = True
|
||||
await self.lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
if FOLDER_HOMEASSISTANT in folders:
|
||||
await self._homeassistant.stop()
|
||||
# Stop Home-Assistant if they will be restored later
|
||||
if homeassistant and FOLDER_HOMEASSISTANT in folders:
|
||||
await self.sys_homeassistant.stop()
|
||||
|
||||
# Process folders
|
||||
if folders:
|
||||
@@ -308,14 +305,14 @@ class SnapshotManager(CoreSysAttributes):
|
||||
_LOGGER.info("Restore %s run Home-Assistant",
|
||||
snapshot.slug)
|
||||
snapshot.restore_homeassistant()
|
||||
task_hass = self._loop.create_task(
|
||||
self._homeassistant.update(
|
||||
task_hass = self.sys_create_task(
|
||||
self.sys_homeassistant.update(
|
||||
snapshot.homeassistant_version))
|
||||
|
||||
# Process Add-ons
|
||||
addon_list = []
|
||||
for slug in addons:
|
||||
addon = self._addons.get(slug)
|
||||
addon = self.sys_addons.get(slug)
|
||||
if addon:
|
||||
addon_list.append(addon)
|
||||
continue
|
||||
@@ -330,7 +327,7 @@ class SnapshotManager(CoreSysAttributes):
|
||||
_LOGGER.info("Restore %s wait for Home-Assistant",
|
||||
snapshot.slug)
|
||||
await task_hass
|
||||
await self._homeassistant.start()
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||
@@ -341,5 +338,5 @@ class SnapshotManager(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
finally:
|
||||
self._scheduler.suspend = False
|
||||
self.sys_scheduler.suspend = False
|
||||
self.lock.release()
|
||||
|
@@ -137,7 +137,7 @@ class Snapshot(CoreSysAttributes):
|
||||
self._data[ATTR_CRYPTO] = CRYPTO_AES128
|
||||
|
||||
def set_password(self, password):
|
||||
"""Set the password for a exists snapshot."""
|
||||
"""Set the password for an existing snapshot."""
|
||||
if not password:
|
||||
return False
|
||||
|
||||
@@ -151,7 +151,7 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
def _encrypt_data(self, data):
|
||||
"""Make data secure."""
|
||||
if not self._key:
|
||||
if not self._key or data is None:
|
||||
return data
|
||||
|
||||
return b64encode(
|
||||
@@ -159,7 +159,7 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
def _decrypt_data(self, data):
|
||||
"""Make data readable."""
|
||||
if not self._key:
|
||||
if not self._key or data is None:
|
||||
return data
|
||||
|
||||
return Padding.unpad(
|
||||
@@ -179,7 +179,7 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
# read snapshot.json
|
||||
try:
|
||||
raw = await self._loop.run_in_executor(None, _load_file)
|
||||
raw = await self.sys_run_in_executor(_load_file)
|
||||
except (tarfile.TarError, KeyError) as err:
|
||||
_LOGGER.error(
|
||||
"Can't read snapshot tarfile %s: %s", self.tarfile, err)
|
||||
@@ -204,19 +204,19 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context to open a snapshot."""
|
||||
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))
|
||||
self._tmp = TemporaryDirectory(dir=str(self.sys_config.path_tmp))
|
||||
|
||||
# create a snapshot
|
||||
if not self.tarfile.is_file():
|
||||
return self
|
||||
|
||||
# extract a exists snapshot
|
||||
# extract an existing snapshot
|
||||
def _extract_snapshot():
|
||||
"""Extract a snapshot."""
|
||||
with tarfile.open(self.tarfile, "r:") as tar:
|
||||
tar.extractall(path=self._tmp.name)
|
||||
|
||||
await self._loop.run_in_executor(None, _extract_snapshot)
|
||||
await self.sys_run_in_executor(_extract_snapshot)
|
||||
|
||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||
"""Async context to close a snapshot."""
|
||||
@@ -241,7 +241,7 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
try:
|
||||
write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
|
||||
await self._loop.run_in_executor(None, _create_snapshot)
|
||||
await self.sys_run_in_executor(_create_snapshot)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't write snapshot: %s", err)
|
||||
finally:
|
||||
@@ -249,10 +249,10 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
async def store_addons(self, addon_list=None):
|
||||
"""Add a list of add-ons into snapshot."""
|
||||
addon_list = addon_list or self._addons.list_installed
|
||||
addon_list = addon_list or self.sys_addons.list_installed
|
||||
|
||||
async def _addon_save(addon):
|
||||
"""Task to store a add-on into snapshot."""
|
||||
"""Task to store an add-on into snapshot."""
|
||||
addon_file = SecureTarFile(
|
||||
Path(self._tmp.name, f"{addon.slug}.tar.gz"),
|
||||
'w', key=self._key)
|
||||
@@ -273,19 +273,19 @@ class Snapshot(CoreSysAttributes):
|
||||
# Run tasks
|
||||
tasks = [_addon_save(addon) for addon in addon_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async def restore_addons(self, addon_list=None):
|
||||
"""Restore a list add-on from snapshot."""
|
||||
if not addon_list:
|
||||
addon_list = []
|
||||
for addon_slug in self.addon_list:
|
||||
addon = self._addons.get(addon_slug)
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
if addon:
|
||||
addon_list.append(addon)
|
||||
|
||||
async def _addon_restore(addon):
|
||||
"""Task to restore a add-on into snapshot."""
|
||||
"""Task to restore an add-on into snapshot."""
|
||||
addon_file = SecureTarFile(
|
||||
Path(self._tmp.name, f"{addon.slug}.tar.gz"),
|
||||
'r', key=self._key)
|
||||
@@ -303,7 +303,7 @@ class Snapshot(CoreSysAttributes):
|
||||
# Run tasks
|
||||
tasks = [_addon_restore(addon) for addon in addon_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async def store_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
@@ -313,7 +313,7 @@ class Snapshot(CoreSysAttributes):
|
||||
"""Intenal function to snapshot a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||
origin_dir = Path(self._config.path_hassio, name)
|
||||
origin_dir = Path(self.sys_config.path_hassio, name)
|
||||
|
||||
# Check if exsits
|
||||
if not origin_dir.is_dir():
|
||||
@@ -332,10 +332,10 @@ class Snapshot(CoreSysAttributes):
|
||||
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
|
||||
|
||||
# Run tasks
|
||||
tasks = [self._loop.run_in_executor(None, _folder_save, folder)
|
||||
tasks = [self.sys_run_in_executor(_folder_save, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async def restore_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
@@ -345,7 +345,7 @@ class Snapshot(CoreSysAttributes):
|
||||
"""Intenal function to restore a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||
origin_dir = Path(self._config.path_hassio, name)
|
||||
origin_dir = Path(self.sys_config.path_hassio, name)
|
||||
|
||||
# Check if exists inside snapshot
|
||||
if not tar_name.exists():
|
||||
@@ -366,58 +366,58 @@ class Snapshot(CoreSysAttributes):
|
||||
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
||||
|
||||
# Run tasks
|
||||
tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
|
||||
tasks = [self.sys_run_in_executor(_folder_restore, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
def store_homeassistant(self):
|
||||
"""Read all data from homeassistant object."""
|
||||
self.homeassistant[ATTR_VERSION] = self._homeassistant.version
|
||||
self.homeassistant[ATTR_WATCHDOG] = self._homeassistant.watchdog
|
||||
self.homeassistant[ATTR_BOOT] = self._homeassistant.boot
|
||||
self.homeassistant[ATTR_WAIT_BOOT] = self._homeassistant.wait_boot
|
||||
self.homeassistant[ATTR_VERSION] = self.sys_homeassistant.version
|
||||
self.homeassistant[ATTR_WATCHDOG] = self.sys_homeassistant.watchdog
|
||||
self.homeassistant[ATTR_BOOT] = self.sys_homeassistant.boot
|
||||
self.homeassistant[ATTR_WAIT_BOOT] = self.sys_homeassistant.wait_boot
|
||||
|
||||
# Custom image
|
||||
if self._homeassistant.is_custom_image:
|
||||
self.homeassistant[ATTR_IMAGE] = self._homeassistant.image
|
||||
if self.sys_homeassistant.is_custom_image:
|
||||
self.homeassistant[ATTR_IMAGE] = self.sys_homeassistant.image
|
||||
self.homeassistant[ATTR_LAST_VERSION] = \
|
||||
self._homeassistant.last_version
|
||||
self.sys_homeassistant.last_version
|
||||
|
||||
# API/Proxy
|
||||
self.homeassistant[ATTR_PORT] = self._homeassistant.api_port
|
||||
self.homeassistant[ATTR_SSL] = self._homeassistant.api_ssl
|
||||
self.homeassistant[ATTR_PORT] = self.sys_homeassistant.api_port
|
||||
self.homeassistant[ATTR_SSL] = self.sys_homeassistant.api_ssl
|
||||
self.homeassistant[ATTR_PASSWORD] = \
|
||||
self._encrypt_data(self._homeassistant.api_password)
|
||||
self._encrypt_data(self.sys_homeassistant.api_password)
|
||||
|
||||
def restore_homeassistant(self):
|
||||
"""Write all data to homeassistant object."""
|
||||
self._homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
|
||||
self._homeassistant.boot = self.homeassistant[ATTR_BOOT]
|
||||
self._homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
|
||||
self.sys_homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
|
||||
self.sys_homeassistant.boot = self.homeassistant[ATTR_BOOT]
|
||||
self.sys_homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
|
||||
|
||||
# Custom image
|
||||
if self.homeassistant.get(ATTR_IMAGE):
|
||||
self._homeassistant.image = self.homeassistant[ATTR_IMAGE]
|
||||
self._homeassistant.last_version = \
|
||||
self.sys_homeassistant.image = self.homeassistant[ATTR_IMAGE]
|
||||
self.sys_homeassistant.last_version = \
|
||||
self.homeassistant[ATTR_LAST_VERSION]
|
||||
|
||||
# API/Proxy
|
||||
self._homeassistant.api_port = self.homeassistant[ATTR_PORT]
|
||||
self._homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
|
||||
self._homeassistant.api_password = \
|
||||
self.sys_homeassistant.api_port = self.homeassistant[ATTR_PORT]
|
||||
self.sys_homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
|
||||
self.sys_homeassistant.api_password = \
|
||||
self._decrypt_data(self.homeassistant[ATTR_PASSWORD])
|
||||
|
||||
# save
|
||||
self._homeassistant.save_data()
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
def store_repositories(self):
|
||||
"""Store repository list into snapshot."""
|
||||
self.repositories = self._config.addons_repositories
|
||||
self.repositories = self.sys_config.addons_repositories
|
||||
|
||||
def restore_repositories(self):
|
||||
"""Restore repositories from snapshot.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._addons.load_repositories(self.repositories)
|
||||
return self.sys_addons.load_repositories(self.repositories)
|
||||
|
@@ -25,7 +25,7 @@ def password_for_validating(password):
|
||||
|
||||
|
||||
def key_to_iv(key):
|
||||
"""Generate a iv from Key."""
|
||||
"""Generate an iv from Key."""
|
||||
for _ in range(100):
|
||||
key = hashlib.sha256(key).digest()
|
||||
return key[:16]
|
||||
|
@@ -15,8 +15,8 @@ ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||
|
||||
|
||||
def unique_addons(addons_list):
|
||||
"""Validate that a add-on is unique."""
|
||||
single = set([addon[ATTR_SLUG] for addon in addons_list])
|
||||
"""Validate that an add-on is unique."""
|
||||
single = set(addon[ATTR_SLUG] for addon in addons_list)
|
||||
|
||||
if len(single) != len(addons_list):
|
||||
raise vol.Invalid("Invalid addon list on snapshot!")
|
||||
@@ -39,7 +39,7 @@ SCHEMA_SNAPSHOT = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=600):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
|
@@ -1,8 +1,15 @@
|
||||
"""HomeAssistant control object."""
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .docker.supervisor import DockerSupervisor
|
||||
from .const import URL_HASSIO_APPARMOR
|
||||
from .exceptions import HostAppArmorError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,7 +30,7 @@ class Supervisor(CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def need_update(self):
|
||||
"""Return True if a update is available."""
|
||||
"""Return True if an update is available."""
|
||||
return self.version != self.last_version
|
||||
|
||||
@property
|
||||
@@ -34,7 +41,7 @@ class Supervisor(CoreSysAttributes):
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return last available version of homeassistant."""
|
||||
return self._updater.version_hassio
|
||||
return self.sys_updater.version_hassio
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
@@ -46,17 +53,43 @@ class Supervisor(CoreSysAttributes):
|
||||
"""Return arch of hass.io containter."""
|
||||
return self.instance.arch
|
||||
|
||||
async def update_apparmor(self):
|
||||
"""Fetch last version and update profile."""
|
||||
url = URL_HASSIO_APPARMOR
|
||||
try:
|
||||
_LOGGER.info("Fetch AppArmor profile %s", url)
|
||||
async with self.sys_websession.get(url, timeout=10) as request:
|
||||
data = await request.text()
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
_LOGGER.warning("Can't fetch AppArmor profile: %s", err)
|
||||
return
|
||||
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_dir:
|
||||
profile_file = Path(tmp_dir, 'apparmor.txt')
|
||||
try:
|
||||
profile_file.write_text(data)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write temporary profile: %s", err)
|
||||
return
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(
|
||||
"hassio-supervisor", profile_file)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't update AppArmor profile!")
|
||||
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
|
||||
if version == self._supervisor.version:
|
||||
if version == self.sys_supervisor.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return
|
||||
|
||||
_LOGGER.info("Update supervisor to version %s", version)
|
||||
if await self.instance.install(version):
|
||||
self._loop.call_later(1, self._loop.stop)
|
||||
await self.update_apparmor()
|
||||
self.sys_loop.call_later(1, self.sys_loop.stop)
|
||||
return True
|
||||
|
||||
_LOGGER.error("Update of hass.io fails!")
|
||||
|
123
hassio/tasks.py
123
hassio/tasks.py
@@ -6,56 +6,61 @@ from .coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_WATCHDOG_API = 'HASS_WATCHDOG_API'
|
||||
|
||||
RUN_UPDATE_SUPERVISOR = 29100
|
||||
RUN_UPDATE_ADDONS = 57600
|
||||
RUN_UPDATE_HASSOSCLI = 29100
|
||||
|
||||
RUN_RELOAD_ADDONS = 21600
|
||||
RUN_RELOAD_SNAPSHOTS = 72000
|
||||
RUN_RELOAD_HOST = 72000
|
||||
RUN_RELOAD_UPDATER = 21600
|
||||
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||
|
||||
|
||||
class Tasks(CoreSysAttributes):
|
||||
"""Handle Tasks inside HassIO."""
|
||||
|
||||
RUN_UPDATE_SUPERVISOR = 29100
|
||||
RUN_UPDATE_ADDONS = 57600
|
||||
|
||||
RUN_RELOAD_ADDONS = 21600
|
||||
RUN_RELOAD_SNAPSHOTS = 72000
|
||||
RUN_RELOAD_HOST_CONTROL = 72000
|
||||
RUN_RELOAD_UPDATER = 21600
|
||||
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Tasks."""
|
||||
self.coresys = coresys
|
||||
self.jobs = set()
|
||||
self._data = {}
|
||||
self._cache = {}
|
||||
|
||||
async def load(self):
|
||||
"""Add Tasks to scheduler."""
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._update_addons, self.RUN_UPDATE_ADDONS))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._update_supervisor, self.RUN_UPDATE_SUPERVISOR))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._update_addons, RUN_UPDATE_ADDONS))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._update_supervisor, RUN_UPDATE_SUPERVISOR))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._update_hassos_cli, RUN_UPDATE_HASSOSCLI))
|
||||
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._addons.reload, self.RUN_RELOAD_ADDONS))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._updater.reload, self.RUN_RELOAD_UPDATER))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self._host_control.load, self.RUN_RELOAD_HOST_CONTROL))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self.sys_addons.reload, RUN_RELOAD_ADDONS))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self.sys_updater.reload, RUN_RELOAD_UPDATER))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self.sys_snapshots.reload, RUN_RELOAD_SNAPSHOTS))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self.sys_host.reload, RUN_RELOAD_HOST))
|
||||
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._watchdog_homeassistant_docker,
|
||||
self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
|
||||
self.jobs.add(self._scheduler.register_task(
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._watchdog_homeassistant_api,
|
||||
self.RUN_WATCHDOG_HOMEASSISTANT_API))
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API))
|
||||
|
||||
_LOGGER.info("All core tasks are scheduled")
|
||||
|
||||
async def _update_addons(self):
|
||||
"""Check if a update is available of a addon and update it."""
|
||||
"""Check if an update is available for an addon and update it."""
|
||||
tasks = []
|
||||
for addon in self._addons.list_addons:
|
||||
for addon in self.sys_addons.list_addons:
|
||||
if not addon.is_installed or not addon.auto_update:
|
||||
continue
|
||||
|
||||
@@ -70,35 +75,36 @@ class Tasks(CoreSysAttributes):
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
||||
await asyncio.wait(tasks, loop=self._loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async def _update_supervisor(self):
|
||||
"""Check and run update of supervisor hassio."""
|
||||
if not self._supervisor.need_update:
|
||||
if not self.sys_supervisor.need_update:
|
||||
return
|
||||
|
||||
# don't perform a update on beta/dev channel
|
||||
if self._updater.beta_channel:
|
||||
_LOGGER.warning("Ignore Hass.io update on beta upstream!")
|
||||
# don't perform an update on dev channel
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore Hass.io update on dev channel!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new Hass.io version")
|
||||
await self._supervisor.update()
|
||||
await self.sys_supervisor.update()
|
||||
|
||||
async def _watchdog_homeassistant_docker(self):
|
||||
"""Check running state of docker and start if they is close."""
|
||||
# if Home-Assistant is active
|
||||
if not await self._homeassistant.is_initialize() or \
|
||||
not self._homeassistant.watchdog:
|
||||
if not await self.sys_homeassistant.is_initialize() or \
|
||||
not self.sys_homeassistant.watchdog or \
|
||||
self.sys_homeassistant.error_state:
|
||||
return
|
||||
|
||||
# if Home-Assistant is running
|
||||
if self._homeassistant.in_progress or \
|
||||
await self._homeassistant.is_running():
|
||||
if self.sys_homeassistant.in_progress or \
|
||||
await self.sys_homeassistant.is_running():
|
||||
return
|
||||
|
||||
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
|
||||
await self._homeassistant.start()
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
async def _watchdog_homeassistant_api(self):
|
||||
"""Create scheduler task for montoring running state of API.
|
||||
@@ -106,25 +112,42 @@ class Tasks(CoreSysAttributes):
|
||||
Try 2 times to call API before we restart Home-Assistant. Maybe we had
|
||||
a delay in our system.
|
||||
"""
|
||||
retry_scan = self._data.get('HASS_WATCHDOG_API', 0)
|
||||
|
||||
# If Home-Assistant is active
|
||||
if not await self._homeassistant.is_initialize() or \
|
||||
not self._homeassistant.watchdog:
|
||||
if not await self.sys_homeassistant.is_initialize() or \
|
||||
not self.sys_homeassistant.watchdog or \
|
||||
self.sys_homeassistant.error_state:
|
||||
return
|
||||
|
||||
# Init cache data
|
||||
retry_scan = self._cache.get(HASS_WATCHDOG_API, 0)
|
||||
|
||||
# If Home-Assistant API is up
|
||||
if self._homeassistant.in_progress or \
|
||||
await self._homeassistant.check_api_state():
|
||||
if self.sys_homeassistant.in_progress or \
|
||||
await self.sys_homeassistant.check_api_state():
|
||||
return
|
||||
|
||||
# Look like we run into a problem
|
||||
retry_scan += 1
|
||||
if retry_scan == 1:
|
||||
self._data['HASS_WATCHDOG_API'] = retry_scan
|
||||
self._cache[HASS_WATCHDOG_API] = retry_scan
|
||||
_LOGGER.warning("Watchdog miss API response from Home-Assistant")
|
||||
return
|
||||
|
||||
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
|
||||
await self._homeassistant.restart()
|
||||
self._data['HASS_WATCHDOG_API'] = 0
|
||||
try:
|
||||
await self.sys_homeassistant.restart()
|
||||
finally:
|
||||
self._cache[HASS_WATCHDOG_API] = 0
|
||||
|
||||
async def _update_hassos_cli(self):
|
||||
"""Check and run update of HassOS CLI."""
|
||||
if not self.sys_hassos.need_cli_update:
|
||||
return
|
||||
|
||||
# don't perform an update on dev channel
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore HassOS CLI update on dev channel!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new HassOS CLI version")
|
||||
await self.sys_hassos.update_cli()
|
||||
|
@@ -1,19 +1,20 @@
|
||||
"""Fetch last versions from webserver."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
|
||||
from .const import (
|
||||
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||
ATTR_BETA_CHANNEL)
|
||||
ATTR_CHANNEL, ATTR_HASSOS, ATTR_HASSOS_CLI)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .utils import AsyncThrottle
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_UPDATER_CONFIG
|
||||
from .exceptions import HassioUpdaterError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,12 +27,15 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||
self.coresys = coresys
|
||||
|
||||
def load(self):
|
||||
"""Update internal data.
|
||||
async def load(self):
|
||||
"""Update internal data."""
|
||||
with suppress(HassioUpdaterError):
|
||||
await self.fetch_data()
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.reload()
|
||||
async def reload(self):
|
||||
"""Update internal data."""
|
||||
with suppress(HassioUpdaterError):
|
||||
await self.fetch_data()
|
||||
|
||||
@property
|
||||
def version_homeassistant(self):
|
||||
@@ -44,49 +48,68 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
return self._data.get(ATTR_HASSIO)
|
||||
|
||||
@property
|
||||
def upstream(self):
|
||||
"""Return Upstream branch for version."""
|
||||
if self.beta_channel:
|
||||
return 'dev'
|
||||
return 'master'
|
||||
def version_hassos(self):
|
||||
"""Return last version of hassos."""
|
||||
return self._data.get(ATTR_HASSOS)
|
||||
|
||||
@property
|
||||
def beta_channel(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
return self._data[ATTR_BETA_CHANNEL]
|
||||
def version_hassos_cli(self):
|
||||
"""Return last version of hassos cli."""
|
||||
return self._data.get(ATTR_HASSOS_CLI)
|
||||
|
||||
@beta_channel.setter
|
||||
def beta_channel(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
||||
@property
|
||||
def channel(self):
|
||||
"""Return upstream channel of hassio instance."""
|
||||
return self._data[ATTR_CHANNEL]
|
||||
|
||||
@channel.setter
|
||||
def channel(self, value):
|
||||
"""Set upstream mode."""
|
||||
self._data[ATTR_CHANNEL] = value
|
||||
|
||||
@AsyncThrottle(timedelta(seconds=60))
|
||||
async def reload(self):
|
||||
async def fetch_data(self):
|
||||
"""Fetch current versions from github.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
url = URL_HASSIO_VERSION.format(self.upstream)
|
||||
url = URL_HASSIO_VERSION.format(channel=self.channel)
|
||||
machine = self.sys_machine or 'default'
|
||||
board = self.sys_hassos.board
|
||||
|
||||
try:
|
||||
_LOGGER.info("Fetch update data from %s", url)
|
||||
with async_timeout.timeout(10, loop=self._loop):
|
||||
async with self._websession.get(url) as request:
|
||||
data = await request.json(content_type=None)
|
||||
async with self.sys_websession.get(url, timeout=10) as request:
|
||||
data = await request.json(content_type=None)
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||
return
|
||||
raise HassioUpdaterError() from None
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
|
||||
return
|
||||
raise HassioUpdaterError() from None
|
||||
|
||||
# data valid?
|
||||
if not data:
|
||||
if not data or data.get(ATTR_CHANNEL) != self.channel:
|
||||
_LOGGER.warning("Invalid data from %s", url)
|
||||
return
|
||||
raise HassioUpdaterError() from None
|
||||
|
||||
# update versions
|
||||
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
||||
self._data[ATTR_HASSIO] = data.get('hassio')
|
||||
self.save_data()
|
||||
try:
|
||||
# update supervisor version
|
||||
self._data[ATTR_HASSIO] = data['supervisor']
|
||||
|
||||
# update Home Assistant version
|
||||
self._data[ATTR_HOMEASSISTANT] = data['homeassistant'][machine]
|
||||
|
||||
# update hassos version
|
||||
if self.sys_hassos.available and board:
|
||||
self._data[ATTR_HASSOS] = data['hassos'][board]
|
||||
self._data[ATTR_HASSOS_CLI] = data['hassos-cli']
|
||||
|
||||
except KeyError as err:
|
||||
_LOGGER.warning("Can't process version data: %s", err)
|
||||
raise HassioUpdaterError() from None
|
||||
|
||||
else:
|
||||
self.save_data()
|
||||
|
@@ -1,7 +1,9 @@
|
||||
"""Tools file for HassIO."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import re
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||
|
||||
|
||||
@@ -10,7 +12,22 @@ def convert_to_ascii(raw):
|
||||
return RE_STRING.sub("", raw.decode())
|
||||
|
||||
|
||||
class AsyncThrottle(object):
|
||||
def process_lock(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api.lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api.lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
class AsyncThrottle:
|
||||
"""
|
||||
Decorator that prevents a function from being called more than once every
|
||||
time period.
|
||||
|
66
hassio/utils/apparmor.py
Normal file
66
hassio/utils/apparmor.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Some functions around apparmor profiles."""
|
||||
import logging
|
||||
import re
|
||||
|
||||
from ..exceptions import AppArmorFileError, AppArmorInvalidError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_PROFILE = re.compile(r"^profile ([^ ]+).*$")
|
||||
|
||||
|
||||
def get_profile_name(profile_file):
|
||||
"""Read the profile name from file."""
|
||||
profiles = set()
|
||||
|
||||
try:
|
||||
with profile_file.open('r') as profile_data:
|
||||
for line in profile_data:
|
||||
match = RE_PROFILE.match(line)
|
||||
if not match:
|
||||
continue
|
||||
profiles.add(match.group(1))
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read apparmor profile: %s", err)
|
||||
raise AppArmorFileError()
|
||||
|
||||
if len(profiles) != 1:
|
||||
_LOGGER.error("To many profiles inside file: %s", profiles)
|
||||
raise AppArmorInvalidError()
|
||||
|
||||
return profiles.pop()
|
||||
|
||||
|
||||
def validate_profile(profile_name, profile_file):
|
||||
"""Check if profile from file is valid with profile name."""
|
||||
if profile_name == get_profile_name(profile_file):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def adjust_profile(profile_name, profile_file, profile_new):
|
||||
"""Fix the profile name."""
|
||||
org_profile = get_profile_name(profile_file)
|
||||
profile_data = []
|
||||
|
||||
# Process old data
|
||||
try:
|
||||
with profile_file.open('r') as profile:
|
||||
for line in profile:
|
||||
match = RE_PROFILE.match(line)
|
||||
if not match:
|
||||
profile_data.append(line)
|
||||
else:
|
||||
profile_data.append(
|
||||
line.replace(org_profile, profile_name))
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't adjust origin profile: %s", err)
|
||||
raise AppArmorFileError()
|
||||
|
||||
# Write into new file
|
||||
try:
|
||||
with profile_new.open('w') as profile:
|
||||
profile.writelines(profile_data)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write new profile: %s", err)
|
||||
raise AppArmorFileError()
|
@@ -1,18 +1,14 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import logging
|
||||
import re
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
import pytz
|
||||
|
||||
UTC = pytz.utc
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||
|
||||
# Copyright (c) Django Software Foundation and individual contributors.
|
||||
# All rights reserved.
|
||||
@@ -25,23 +21,6 @@ DATETIME_RE = re.compile(
|
||||
)
|
||||
|
||||
|
||||
async def fetch_timezone(websession):
|
||||
"""Read timezone from freegeoip."""
|
||||
data = {}
|
||||
try:
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(FREEGEOIP_URL) as request:
|
||||
data = await request.json()
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
_LOGGER.warning("Can't fetch freegeoip data: %s", err)
|
||||
|
||||
except ValueError as err:
|
||||
_LOGGER.warning("Error on parse freegeoip data: %s", err)
|
||||
|
||||
return data.get('time_zone', 'UTC')
|
||||
|
||||
|
||||
# Copyright (c) Django Software Foundation and individual contributors.
|
||||
# All rights reserved.
|
||||
# https://github.com/django/django/blob/master/LICENSE
|
||||
|
285
hassio/utils/gdbus.py
Normal file
285
hassio/utils/gdbus.py
Normal file
@@ -0,0 +1,285 @@
|
||||
"""DBus implementation with glib."""
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
import shlex
|
||||
import re
|
||||
from signal import SIGINT
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from ..exceptions import DBusFatalError, DBusParseError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Use to convert GVariant into json
|
||||
RE_GVARIANT_TYPE = re.compile(
|
||||
r"(?:boolean|byte|int16|uint16|int32|uint32|handle|int64|uint64|double|"
|
||||
r"string|objectpath|signature) ")
|
||||
RE_GVARIANT_VARIANT = re.compile(
|
||||
r"(?<=(?: |{|\[))<((?:'|\").*?(?:'|\")|\d+(?:\.\d+)?)>(?=(?:|]|}|,))")
|
||||
RE_GVARIANT_STRING = re.compile(r"(?<=(?: |{|\[|\())'(.*?)'(?=(?:|]|}|,|\)))")
|
||||
RE_GVARIANT_TUPLE_O = re.compile(r"\"[^\"]*?\"|(\()")
|
||||
RE_GVARIANT_TUPLE_C = re.compile(r"\"[^\"]*?\"|(,?\))")
|
||||
|
||||
RE_MONITOR_OUTPUT = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)")
|
||||
|
||||
# Commands for dbus
|
||||
INTROSPECT = ("gdbus introspect --system --dest {bus} "
|
||||
"--object-path {object} --xml")
|
||||
CALL = ("gdbus call --system --dest {bus} --object-path {object} "
|
||||
"--method {method} {args}")
|
||||
MONITOR = ("gdbus monitor --system --dest {bus}")
|
||||
|
||||
DBUS_METHOD_GETALL = 'org.freedesktop.DBus.Properties.GetAll'
|
||||
|
||||
|
||||
class DBus:
|
||||
"""DBus handler."""
|
||||
|
||||
def __init__(self, bus_name, object_path):
|
||||
"""Initialize dbus object."""
|
||||
self.bus_name = bus_name
|
||||
self.object_path = object_path
|
||||
self.methods = set()
|
||||
self.signals = set()
|
||||
|
||||
@staticmethod
|
||||
async def connect(bus_name, object_path):
|
||||
"""Read object data."""
|
||||
self = DBus(bus_name, object_path)
|
||||
await self._init_proxy() # pylint: disable=protected-access
|
||||
|
||||
_LOGGER.info("Connect to dbus: %s - %s", bus_name, object_path)
|
||||
return self
|
||||
|
||||
async def _init_proxy(self):
|
||||
"""Read interface data."""
|
||||
command = shlex.split(INTROSPECT.format(
|
||||
bus=self.bus_name,
|
||||
object=self.object_path
|
||||
))
|
||||
|
||||
# Ask data
|
||||
_LOGGER.info("Introspect %s on %s", self.bus_name, self.object_path)
|
||||
data = await self._send(command)
|
||||
|
||||
# Parse XML
|
||||
try:
|
||||
xml = ET.fromstring(data)
|
||||
except ET.ParseError as err:
|
||||
_LOGGER.error("Can't parse introspect data: %s", err)
|
||||
raise DBusParseError() from None
|
||||
|
||||
# Read available methods
|
||||
_LOGGER.debug("data: %s", data)
|
||||
for interface in xml.findall("./interface"):
|
||||
interface_name = interface.get('name')
|
||||
|
||||
# Methods
|
||||
for method in interface.findall("./method"):
|
||||
method_name = method.get('name')
|
||||
self.methods.add(f"{interface_name}.{method_name}")
|
||||
|
||||
# Signals
|
||||
for signal in interface.findall("./signal"):
|
||||
signal_name = signal.get('name')
|
||||
self.signals.add(f"{interface_name}.{signal_name}")
|
||||
|
||||
@staticmethod
|
||||
def parse_gvariant(raw):
|
||||
"""Parse GVariant input to python."""
|
||||
raw = RE_GVARIANT_TYPE.sub("", raw)
|
||||
raw = RE_GVARIANT_VARIANT.sub(r"\1", raw)
|
||||
raw = RE_GVARIANT_STRING.sub(r'"\1"', raw)
|
||||
raw = RE_GVARIANT_TUPLE_O.sub(
|
||||
lambda x: x.group(0) if not x.group(1) else"[", raw)
|
||||
raw = RE_GVARIANT_TUPLE_C.sub(
|
||||
lambda x: x.group(0) if not x.group(1) else"]", raw)
|
||||
|
||||
# No data
|
||||
if raw.startswith("[]"):
|
||||
return []
|
||||
|
||||
try:
|
||||
return json.loads(raw)
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.error("Can't parse '%s': %s", raw, err)
|
||||
raise DBusParseError() from None
|
||||
|
||||
@staticmethod
|
||||
def gvariant_args(args):
|
||||
"""Convert args into gvariant."""
|
||||
gvariant = ""
|
||||
for arg in args:
|
||||
if isinstance(arg, bool):
|
||||
gvariant += " {}".format(str(arg).lower())
|
||||
elif isinstance(arg, (int, float)):
|
||||
gvariant += f" {arg}"
|
||||
elif isinstance(arg, str):
|
||||
gvariant += f" \"{arg}\""
|
||||
else:
|
||||
gvariant += " {}".format(str(arg))
|
||||
|
||||
return gvariant.lstrip()
|
||||
|
||||
async def call_dbus(self, method, *args):
|
||||
"""Call a dbus method."""
|
||||
command = shlex.split(CALL.format(
|
||||
bus=self.bus_name,
|
||||
object=self.object_path,
|
||||
method=method,
|
||||
args=self.gvariant_args(args)
|
||||
))
|
||||
|
||||
# Run command
|
||||
_LOGGER.info("Call %s on %s", method, self.object_path)
|
||||
data = await self._send(command)
|
||||
|
||||
# Parse and return data
|
||||
return self.parse_gvariant(data)
|
||||
|
||||
async def get_properties(self, interface):
|
||||
"""Read all properties from interface."""
|
||||
try:
|
||||
return (await self.call_dbus(DBUS_METHOD_GETALL, interface))[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("No attributes returned for %s", interface)
|
||||
raise DBusFatalError from None
|
||||
|
||||
async def _send(self, command):
|
||||
"""Send command over dbus."""
|
||||
# Run command
|
||||
_LOGGER.debug("Send dbus command: %s", command)
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*command,
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
data, error = await proc.communicate()
|
||||
except OSError as err:
|
||||
_LOGGER.error("DBus fatal error: %s", err)
|
||||
raise DBusFatalError() from None
|
||||
|
||||
# Success?
|
||||
if proc.returncode != 0:
|
||||
_LOGGER.error("DBus return error: %s", error)
|
||||
raise DBusFatalError()
|
||||
|
||||
# End
|
||||
return data.decode()
|
||||
|
||||
def attach_signals(self, filters=None):
|
||||
"""Generate a signals wrapper."""
|
||||
return DBusSignalWrapper(self, filters)
|
||||
|
||||
async def wait_signal(self, signal):
|
||||
"""Wait for single event."""
|
||||
monitor = DBusSignalWrapper(self, [signal])
|
||||
async with monitor as signals:
|
||||
async for signal in signals:
|
||||
return signal
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Mapping to dbus method."""
|
||||
return getattr(DBusCallWrapper(self, self.bus_name), name)
|
||||
|
||||
|
||||
class DBusCallWrapper:
|
||||
"""Wrapper a DBus interface for a call."""
|
||||
|
||||
def __init__(self, dbus, interface):
|
||||
"""Initialize wrapper."""
|
||||
self.dbus = dbus
|
||||
self.interface = interface
|
||||
|
||||
def __call__(self):
|
||||
"""Should never be called."""
|
||||
_LOGGER.error("DBus method %s not exists!", self.interface)
|
||||
raise DBusFatalError()
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Mapping to dbus method."""
|
||||
interface = f"{self.interface}.{name}"
|
||||
|
||||
if interface not in self.dbus.methods:
|
||||
return DBusCallWrapper(self.dbus, interface)
|
||||
|
||||
def _method_wrapper(*args):
|
||||
"""Wrap method.
|
||||
|
||||
Return a coroutine
|
||||
"""
|
||||
return self.dbus.call_dbus(interface, *args)
|
||||
|
||||
return _method_wrapper
|
||||
|
||||
|
||||
class DBusSignalWrapper:
|
||||
"""Process Signals."""
|
||||
|
||||
def __init__(self, dbus, signals=None):
|
||||
"""Initialize dbus signal wrapper."""
|
||||
self.dbus = dbus
|
||||
self._signals = signals
|
||||
self._proc = None
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Start monitor events."""
|
||||
_LOGGER.info("Start dbus monitor on %s", self.dbus.bus_name)
|
||||
command = shlex.split(MONITOR.format(
|
||||
bus=self.dbus.bus_name
|
||||
))
|
||||
self._proc = await asyncio.create_subprocess_exec(
|
||||
*command,
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||
"""Stop monitor events."""
|
||||
_LOGGER.info("Stop dbus monitor on %s", self.dbus.bus_name)
|
||||
self._proc.send_signal(SIGINT)
|
||||
await self._proc.communicate()
|
||||
|
||||
def __aiter__(self):
|
||||
"""Start Iteratation."""
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
"""Get next data."""
|
||||
if not self._proc:
|
||||
raise StopAsyncIteration()
|
||||
|
||||
# Read signals
|
||||
while True:
|
||||
try:
|
||||
data = await self._proc.stdout.readline()
|
||||
except asyncio.TimeoutError:
|
||||
raise StopAsyncIteration() from None
|
||||
|
||||
# Program close
|
||||
if not data:
|
||||
raise StopAsyncIteration()
|
||||
|
||||
# Extract metadata
|
||||
match = RE_MONITOR_OUTPUT.match(data.decode())
|
||||
if not match:
|
||||
continue
|
||||
signal = match.group('signal')
|
||||
data = match.group('data')
|
||||
|
||||
# Filter signals?
|
||||
if self._signals and signal not in self._signals:
|
||||
_LOGGER.debug("Skip event %s - %s", signal, data)
|
||||
continue
|
||||
|
||||
try:
|
||||
return self.dbus.parse_gvariant(data)
|
||||
except DBusParseError:
|
||||
raise StopAsyncIteration() from None
|
@@ -21,7 +21,7 @@ def read_json_file(jsonfile):
|
||||
return json.loads(cfile.read())
|
||||
|
||||
|
||||
class JsonConfig(object):
|
||||
class JsonConfig:
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, json_file, schema):
|
||||
@@ -45,7 +45,7 @@ class JsonConfig(object):
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
self._data = {}
|
||||
|
||||
|
@@ -12,7 +12,7 @@ MOD_READ = 'r'
|
||||
MOD_WRITE = 'w'
|
||||
|
||||
|
||||
class SecureTarFile(object):
|
||||
class SecureTarFile:
|
||||
"""Handle encrypted files for tarfile library."""
|
||||
|
||||
def __init__(self, name, mode, key=None, gzip=True):
|
||||
@@ -81,7 +81,7 @@ class SecureTarFile(object):
|
||||
|
||||
|
||||
def _generate_iv(key, salt):
|
||||
"""Generate a iv from data."""
|
||||
"""Generate an iv from data."""
|
||||
temp_iv = key + salt
|
||||
for _ in range(100):
|
||||
temp_iv = hashlib.sha256(temp_iv).digest()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user