mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-15 20:19:21 +00:00
Compare commits
556 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
d87a85ceb5 | ||
![]() |
9ab6e80b6f | ||
![]() |
78e91e859e | ||
![]() |
9eee8eade6 | ||
![]() |
124ce0b8b7 | ||
![]() |
00e7d96472 | ||
![]() |
398815efd8 | ||
![]() |
bdc2bdcf56 | ||
![]() |
68eafb0a7d | ||
![]() |
7ca2fd7193 | ||
![]() |
ec823edd8f | ||
![]() |
858c7a1fa7 | ||
![]() |
6ac45a24fc | ||
![]() |
9430b39042 | ||
![]() |
ae7466ccfe | ||
![]() |
2c17fe5da8 | ||
![]() |
a0fb91af29 | ||
![]() |
f626e31fd3 | ||
![]() |
0151a149fd | ||
![]() |
9dea93142b | ||
![]() |
7f878bfac0 | ||
![]() |
ebe9ae2341 | ||
![]() |
e777bbd024 | ||
![]() |
2116d56124 | ||
![]() |
0b6a82b018 | ||
![]() |
b4ea28af4e | ||
![]() |
22f59712df | ||
![]() |
efe95f7bab | ||
![]() |
200c68f67f | ||
![]() |
dcefec7b99 | ||
![]() |
5db798bcf8 | ||
![]() |
70005296cc | ||
![]() |
f2bf8dea93 | ||
![]() |
fee858c956 | ||
![]() |
e3ae48c8ff | ||
![]() |
fa9e20385e | ||
![]() |
f51c9704e0 | ||
![]() |
57c58d81c0 | ||
![]() |
1ec1082068 | ||
![]() |
35b7c2269c | ||
![]() |
cc3e6ec6fd | ||
![]() |
4df42e054d | ||
![]() |
1b481e0b37 | ||
![]() |
3aa4cdf540 | ||
![]() |
029f277945 | ||
![]() |
e7e0b9adda | ||
![]() |
5fbff75da8 | ||
![]() |
58299a0389 | ||
![]() |
1151d7e17b | ||
![]() |
b56ed547e3 | ||
![]() |
a71ebba940 | ||
![]() |
4fcb516c75 | ||
![]() |
22142d32d2 | ||
![]() |
21194f1411 | ||
![]() |
09df046fa8 | ||
![]() |
63d3889d5c | ||
![]() |
0ffc0559e2 | ||
![]() |
78118a502c | ||
![]() |
946cc3d618 | ||
![]() |
c40a3f18e9 | ||
![]() |
f01945bf8c | ||
![]() |
0f72db45f9 | ||
![]() |
83510341b6 | ||
![]() |
70dd6593e4 | ||
![]() |
60ba2db561 | ||
![]() |
5820d16419 | ||
![]() |
9f9ff0d1ad | ||
![]() |
806161e3ac | ||
![]() |
44ae9c7b63 | ||
![]() |
75d24ba534 | ||
![]() |
13243cd02c | ||
![]() |
411fad8a45 | ||
![]() |
5fe9d63c79 | ||
![]() |
33095f8792 | ||
![]() |
0253722369 | ||
![]() |
495c45564a | ||
![]() |
8517b43e85 | ||
![]() |
033ea4e7dc | ||
![]() |
a0c9e5ad26 | ||
![]() |
408d6eafcc | ||
![]() |
054e357483 | ||
![]() |
cb520bff23 | ||
![]() |
024ebe0026 | ||
![]() |
7b62e2f07b | ||
![]() |
7d52b3ba01 | ||
![]() |
46caa23319 | ||
![]() |
9aa5eda2c8 | ||
![]() |
f48182a69c | ||
![]() |
788f883490 | ||
![]() |
e84e82d018 | ||
![]() |
20e73796b8 | ||
![]() |
7769d6fff1 | ||
![]() |
561e80c2be | ||
![]() |
96f47a4c32 | ||
![]() |
7482d6dd45 | ||
![]() |
aea31ee6dd | ||
![]() |
de43965ecb | ||
![]() |
baa61c6aa0 | ||
![]() |
cb22dafb3c | ||
![]() |
ea26784c3e | ||
![]() |
72332ed40f | ||
![]() |
46f2bf16a8 | ||
![]() |
e2725f8033 | ||
![]() |
9084ac119f | ||
![]() |
41943ba61a | ||
![]() |
33794669a1 | ||
![]() |
fe155a4ff0 | ||
![]() |
124e487ef7 | ||
![]() |
f361916a60 | ||
![]() |
20afa1544b | ||
![]() |
c08d5af4db | ||
![]() |
dc341c8af8 | ||
![]() |
2507b52adb | ||
![]() |
1302708135 | ||
![]() |
1314812f92 | ||
![]() |
f739e3ed11 | ||
![]() |
abb526fc0f | ||
![]() |
efb1a24b8f | ||
![]() |
bc0835963d | ||
![]() |
316190dff8 | ||
![]() |
029ead0c7c | ||
![]() |
a85172f30b | ||
![]() |
dfe2532813 | ||
![]() |
cf3bb23629 | ||
![]() |
2132042aca | ||
![]() |
19e448fc54 | ||
![]() |
a4e0fb8e99 | ||
![]() |
5b72e2887e | ||
![]() |
d2b6ec1b7e | ||
![]() |
4b541a23c4 | ||
![]() |
99869449ae | ||
![]() |
eab73f3895 | ||
![]() |
9e96615ffa | ||
![]() |
350010feb5 | ||
![]() |
7395e4620b | ||
![]() |
7d91ae4513 | ||
![]() |
343f759983 | ||
![]() |
24ee3f8cc0 | ||
![]() |
c143eadb62 | ||
![]() |
e7df38f4d1 | ||
![]() |
3e42318ac8 | ||
![]() |
c6e5d2932e | ||
![]() |
1aaf21a350 | ||
![]() |
f185eece8a | ||
![]() |
9d951280ef | ||
![]() |
3f598bafc0 | ||
![]() |
cddd859f56 | ||
![]() |
e7adf50ec1 | ||
![]() |
ac437f809a | ||
![]() |
f13dee9b9d | ||
![]() |
00855c0909 | ||
![]() |
1fafed5a07 | ||
![]() |
7adb81b350 | ||
![]() |
4647035b00 | ||
![]() |
8ad7344e02 | ||
![]() |
f1c46b3385 | ||
![]() |
7f84073b12 | ||
![]() |
e383a11bb7 | ||
![]() |
cc113e2251 | ||
![]() |
c5a3830c7d | ||
![]() |
a2abadc970 | ||
![]() |
db444b89d3 | ||
![]() |
77881e8a58 | ||
![]() |
0b15f88da3 | ||
![]() |
7c6bf96f6f | ||
![]() |
dc77e2d8d9 | ||
![]() |
68824fab4f | ||
![]() |
d6b3a36714 | ||
![]() |
8ab1f703c7 | ||
![]() |
95a4e292aa | ||
![]() |
3b9252558f | ||
![]() |
4a324dccc6 | ||
![]() |
8fffb0f8b5 | ||
![]() |
87adfce211 | ||
![]() |
297813f6e6 | ||
![]() |
362315852a | ||
![]() |
d221f36cf8 | ||
![]() |
9e18589b6b | ||
![]() |
c4d09210e1 | ||
![]() |
43797c5eb5 | ||
![]() |
fe38fe94dc | ||
![]() |
f185291eca | ||
![]() |
7541ae6476 | ||
![]() |
d94715be2b | ||
![]() |
99cc5972c8 | ||
![]() |
3d101a24a1 | ||
![]() |
2ed3ddf05b | ||
![]() |
10b3658bd7 | ||
![]() |
9f5903089e | ||
![]() |
0593885ed4 | ||
![]() |
3efbe11d49 | ||
![]() |
1c2e0e5749 | ||
![]() |
f64da6a547 | ||
![]() |
94fba7e175 | ||
![]() |
a59245e6bb | ||
![]() |
217c1acc62 | ||
![]() |
2c0a68bd8f | ||
![]() |
e37ffd6107 | ||
![]() |
3bde598fa7 | ||
![]() |
53f42ff934 | ||
![]() |
9041eb9e9a | ||
![]() |
70ac395232 | ||
![]() |
82f68b4a7b | ||
![]() |
2b2f3214e9 | ||
![]() |
1c0d63a02e | ||
![]() |
de77215630 | ||
![]() |
f300b843c1 | ||
![]() |
0bb81136bb | ||
![]() |
2a81ced817 | ||
![]() |
7363951a9a | ||
![]() |
6f770b78af | ||
![]() |
10219a348f | ||
![]() |
23d1013cfa | ||
![]() |
05980d4147 | ||
![]() |
e5e25c895f | ||
![]() |
b486883ff6 | ||
![]() |
42dd4d9557 | ||
![]() |
7dff9e09a7 | ||
![]() |
c315b026a3 | ||
![]() |
a4ba4c80e8 | ||
![]() |
ccd48b63a2 | ||
![]() |
6d5f70ced6 | ||
![]() |
ccffb4b786 | ||
![]() |
68dbbe212c | ||
![]() |
5df869e08a | ||
![]() |
63b9e023b4 | ||
![]() |
8f357739ec | ||
![]() |
808fc0f8b6 | ||
![]() |
1a6f6085e6 | ||
![]() |
0de3e9a233 | ||
![]() |
f1237f124f | ||
![]() |
69142b6fb0 | ||
![]() |
28f295a1e2 | ||
![]() |
55c2127baa | ||
![]() |
265c36b345 | ||
![]() |
9f081fe32f | ||
![]() |
e4fb6ad727 | ||
![]() |
1040a1624a | ||
![]() |
a2ee2852a0 | ||
![]() |
b2e3b726d9 | ||
![]() |
0f4e557552 | ||
![]() |
2efa9f9483 | ||
![]() |
43e6ca8f4a | ||
![]() |
34d67a7bcd | ||
![]() |
5a6051f9a1 | ||
![]() |
157e48f946 | ||
![]() |
9469a258ff | ||
![]() |
fd0aeb5341 | ||
![]() |
4d4a4ce043 | ||
![]() |
678f77cc05 | ||
![]() |
6c30248389 | ||
![]() |
fda7c1cf11 | ||
![]() |
364e5ec0b8 | ||
![]() |
947bf7799c | ||
![]() |
e22836d706 | ||
![]() |
6c8fcbfb80 | ||
![]() |
f1fe1877fe | ||
![]() |
3c0831c8eb | ||
![]() |
35b3f364c9 | ||
![]() |
c4299b51cd | ||
![]() |
31caed20fa | ||
![]() |
41fed656c1 | ||
![]() |
c5ee2ebc49 | ||
![]() |
743a218219 | ||
![]() |
093ef17fb7 | ||
![]() |
a41912be0a | ||
![]() |
5becd51b50 | ||
![]() |
ef7a375396 | ||
![]() |
19879e3287 | ||
![]() |
d1c4f342fc | ||
![]() |
2f62b7046c | ||
![]() |
0cca8f522b | ||
![]() |
39decec001 | ||
![]() |
3489db2768 | ||
![]() |
3382688669 | ||
![]() |
cf00ce7d78 | ||
![]() |
2c714aa003 | ||
![]() |
1e7858bf06 | ||
![]() |
4e428c2e41 | ||
![]() |
b95ab3e95a | ||
![]() |
0dd7f8fbaa | ||
![]() |
a2789ac540 | ||
![]() |
a785e10a3f | ||
![]() |
10dad5a209 | ||
![]() |
9327b24d44 | ||
![]() |
7d02bb2fe9 | ||
![]() |
a2d3ee0d67 | ||
![]() |
d29fab69e8 | ||
![]() |
6205f40298 | ||
![]() |
6b169f3f17 | ||
![]() |
0d4a5a7ffb | ||
![]() |
dac90d29dd | ||
![]() |
7e815633e7 | ||
![]() |
f062f31ca2 | ||
![]() |
1374f90433 | ||
![]() |
b692b19a4d | ||
![]() |
92d5b14cf5 | ||
![]() |
6a84829c16 | ||
![]() |
7036ecbd0a | ||
![]() |
19b5059972 | ||
![]() |
cebc377fa7 | ||
![]() |
d36c3919d7 | ||
![]() |
0684427373 | ||
![]() |
8ff79e85bf | ||
![]() |
ee4b28a490 | ||
![]() |
fddd5b8860 | ||
![]() |
72279072ac | ||
![]() |
0b70448273 | ||
![]() |
4eb24fcbc5 | ||
![]() |
06edf59d14 | ||
![]() |
36ca851bc2 | ||
![]() |
a4e453bf83 | ||
![]() |
d211eec66f | ||
![]() |
db8540d4ab | ||
![]() |
30e270e7c0 | ||
![]() |
9734307551 | ||
![]() |
c650f8d1e1 | ||
![]() |
10005898f8 | ||
![]() |
716389e0c1 | ||
![]() |
658729feb5 | ||
![]() |
ae7808eb2a | ||
![]() |
d8e0e9e0b0 | ||
![]() |
a860a3c122 | ||
![]() |
fe60d526b9 | ||
![]() |
769904778f | ||
![]() |
a3a40c79d6 | ||
![]() |
b44f613136 | ||
![]() |
801be9c60b | ||
![]() |
b6db6a1287 | ||
![]() |
4181174bcc | ||
![]() |
3be46e6011 | ||
![]() |
98b93efc5c | ||
![]() |
6156019c2f | ||
![]() |
80d60148a9 | ||
![]() |
8baf59a608 | ||
![]() |
b546365aaa | ||
![]() |
0a68698912 | ||
![]() |
45288a2491 | ||
![]() |
f34a175e4f | ||
![]() |
6e7e145822 | ||
![]() |
9abebe2d5d | ||
![]() |
b0c5884c3f | ||
![]() |
a79e6a8eea | ||
![]() |
c1f1aed9ca | ||
![]() |
65b0e17b5b | ||
![]() |
6947131b47 | ||
![]() |
914dd53da0 | ||
![]() |
58616ef686 | ||
![]() |
563e0c1e0e | ||
![]() |
437070fd7a | ||
![]() |
baa9cf451c | ||
![]() |
c2918d4519 | ||
![]() |
1efdcd4691 | ||
![]() |
2a43087ed7 | ||
![]() |
5716324934 | ||
![]() |
ae267e0380 | ||
![]() |
3918a2a228 | ||
![]() |
e375fc36d3 | ||
![]() |
f5e29b4651 | ||
![]() |
524d875516 | ||
![]() |
60bdc00ce9 | ||
![]() |
073166190f | ||
![]() |
b80e4d7d70 | ||
![]() |
cc434e27cf | ||
![]() |
8377e04b62 | ||
![]() |
0a47fb9c83 | ||
![]() |
a5d3c850e9 | ||
![]() |
d6391f62be | ||
![]() |
c6f302e448 | ||
![]() |
9706022c21 | ||
![]() |
1d858f4920 | ||
![]() |
e09ba30d46 | ||
![]() |
38ec3d14ed | ||
![]() |
8ee9380cc7 | ||
![]() |
6e74e4c008 | ||
![]() |
5ebc58851b | ||
![]() |
16b09bbfc5 | ||
![]() |
d4b5fc79f4 | ||
![]() |
e51c044ccd | ||
![]() |
d3b1ba81f7 | ||
![]() |
26f55f02c0 | ||
![]() |
8050707ff9 | ||
![]() |
46252030cf | ||
![]() |
681fa835ef | ||
![]() |
d6560eb976 | ||
![]() |
3770b307af | ||
![]() |
0dacbb31be | ||
![]() |
bbdbd756a7 | ||
![]() |
508e38e622 | ||
![]() |
ffe45d0d02 | ||
![]() |
9206d1acf8 | ||
![]() |
da867ef8ef | ||
![]() |
4826201e51 | ||
![]() |
463c97f9e7 | ||
![]() |
3983928c6c | ||
![]() |
15e626027f | ||
![]() |
d46810752e | ||
![]() |
3d10b502a0 | ||
![]() |
433c5cef3b | ||
![]() |
697caf553a | ||
![]() |
1e11359c71 | ||
![]() |
5285431825 | ||
![]() |
7743a572a9 | ||
![]() |
3b974920d3 | ||
![]() |
6bc9792248 | ||
![]() |
da55f6fb10 | ||
![]() |
ffa90a3407 | ||
![]() |
0a13ea3743 | ||
![]() |
0e2e588145 | ||
![]() |
b8c50fee36 | ||
![]() |
8cb0b7c498 | ||
![]() |
699fcdafba | ||
![]() |
b4d5aeb5d0 | ||
![]() |
d067dd643e | ||
![]() |
65a2bf2d18 | ||
![]() |
e826e8184f | ||
![]() |
dacbde7d77 | ||
![]() |
5b0587b672 | ||
![]() |
f0320c0f6d | ||
![]() |
e05c32df25 | ||
![]() |
9c40c32e95 | ||
![]() |
ac60de0360 | ||
![]() |
587047f9d6 | ||
![]() |
e815223047 | ||
![]() |
b6fb5ab950 | ||
![]() |
a0906937c4 | ||
![]() |
07c47df369 | ||
![]() |
85e9a949cc | ||
![]() |
3933fb0664 | ||
![]() |
a885fbdb41 | ||
![]() |
210793eb34 | ||
![]() |
0235c7bce0 | ||
![]() |
4419c0fc6c | ||
![]() |
2f3701693d | ||
![]() |
3bf446cbdb | ||
![]() |
0c67cc13a1 | ||
![]() |
0b80d7b6f4 | ||
![]() |
23c35d4c80 | ||
![]() |
e939c29efa | ||
![]() |
ea0655b4e5 | ||
![]() |
4117ce2e86 | ||
![]() |
dec04386bf | ||
![]() |
b50756785e | ||
![]() |
b9538bdc67 | ||
![]() |
a928281bbe | ||
![]() |
4533d17e27 | ||
![]() |
546df6d001 | ||
![]() |
f14eef62ae | ||
![]() |
ee86770570 | ||
![]() |
385a4e9f6f | ||
![]() |
142cdcffca | ||
![]() |
eb6c753514 | ||
![]() |
c3b62c80fb | ||
![]() |
f77e176a6e | ||
![]() |
3f99dec858 | ||
![]() |
81b0cf55b0 | ||
![]() |
1d5d2dc731 | ||
![]() |
04f5ee0a80 | ||
![]() |
7a02777cfb | ||
![]() |
7257c44d27 | ||
![]() |
cb15602814 | ||
![]() |
0f2c333484 | ||
![]() |
6f2cf2ef85 | ||
![]() |
70a721a47d | ||
![]() |
b32947af98 | ||
![]() |
94b44ec7fe | ||
![]() |
5c8aa71c31 | ||
![]() |
a6c424b7c8 | ||
![]() |
38e40c342d | ||
![]() |
26d390b66e | ||
![]() |
baddafa552 | ||
![]() |
f443d3052b | ||
![]() |
8fc27ff28e | ||
![]() |
3784d759f5 | ||
![]() |
61037f3852 | ||
![]() |
db8aaecdbe | ||
![]() |
15a4541595 | ||
![]() |
50ae8e2335 | ||
![]() |
279df17ba4 | ||
![]() |
f8e6362283 | ||
![]() |
0c44064926 | ||
![]() |
73c437574c | ||
![]() |
69a2182c04 | ||
![]() |
ce80e6cd32 | ||
![]() |
054def09f7 | ||
![]() |
eebe90bd14 | ||
![]() |
6ea280ce60 | ||
![]() |
e992b70f92 | ||
![]() |
0f58bb35ba | ||
![]() |
56abfb6adc | ||
![]() |
8352d61f8d | ||
![]() |
51d585f299 | ||
![]() |
d017a52922 | ||
![]() |
78ec0d1314 | ||
![]() |
c84151e9e8 | ||
![]() |
e8e599cb8c | ||
![]() |
232b9ea239 | ||
![]() |
1c49351e66 | ||
![]() |
34d1f4725d | ||
![]() |
7cd81dcc95 | ||
![]() |
1bdd3d88de | ||
![]() |
d105552fa9 | ||
![]() |
b5af35bd6c | ||
![]() |
7d46487491 | ||
![]() |
38a599011e | ||
![]() |
e59e2fc8d7 | ||
![]() |
b9ce405ada | ||
![]() |
d7df423deb | ||
![]() |
99eea99e93 | ||
![]() |
63d82ce03e | ||
![]() |
13a2c1ecd9 | ||
![]() |
627ab4ee81 | ||
![]() |
54f45539be | ||
![]() |
53297205c8 | ||
![]() |
0f09fdfcce | ||
![]() |
24db0fdb86 | ||
![]() |
7349234638 | ||
![]() |
c691f2a559 | ||
![]() |
110cd32dc3 | ||
![]() |
26d8dc0ec6 | ||
![]() |
fd41bda828 | ||
![]() |
1e3868bb70 | ||
![]() |
ece6c644cf | ||
![]() |
6a5bd5a014 | ||
![]() |
664334f1ad | ||
![]() |
e5e28747d4 | ||
![]() |
c7956d95ae | ||
![]() |
5ce6abdbb6 | ||
![]() |
fad0185c26 | ||
![]() |
86faf32709 | ||
![]() |
19f413796d | ||
![]() |
8f94b4d63f | ||
![]() |
db263f84af | ||
![]() |
747810b729 | ||
![]() |
d6768f15a1 | ||
![]() |
6c75957578 | ||
![]() |
3a8307acfe | ||
![]() |
f20c7d42ee | ||
![]() |
9419fbff94 | ||
![]() |
3ac6c03637 | ||
![]() |
a95274f1b3 | ||
![]() |
9d2fb87cec | ||
![]() |
ce9c3565b6 | ||
![]() |
b0ec58ed1b | ||
![]() |
893a5f8dd3 | ||
![]() |
98064f6a90 | ||
![]() |
5146f89354 | ||
![]() |
fb46592d48 | ||
![]() |
b4fb5ac681 | ||
![]() |
4b7201dc59 | ||
![]() |
3a5a4e4c27 | ||
![]() |
70104a9280 | ||
![]() |
efbc7b17a1 | ||
![]() |
64c5e20fc4 | ||
![]() |
13498afa97 | ||
![]() |
f6375f1bd6 |
13
.github/move.yml
vendored
Normal file
13
.github/move.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Configuration for move-issues - https://github.com/dessant/move-issues
|
||||
|
||||
# Delete the command comment. Ignored when the comment also contains other content
|
||||
deleteCommand: true
|
||||
# Close the source issue after moving
|
||||
closeSourceIssue: true
|
||||
# Lock the source issue after moving
|
||||
lockSourceIssue: false
|
||||
# Set custom aliases for targets
|
||||
# aliases:
|
||||
# r: repo
|
||||
# or: owner/repo
|
||||
|
4
.github/release-drafter.yml
vendored
Normal file
4
.github/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
$CHANGES
|
12
.travis.yml
12
.travis.yml
@@ -1,12 +1,6 @@
|
||||
sudo: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.6"
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
sudo: true
|
||||
dist: xenial
|
||||
install: pip install -U tox
|
||||
language: python
|
||||
python: 3.7
|
||||
script: tox
|
||||
|
399
API.md
399
API.md
@@ -4,7 +4,7 @@
|
||||
|
||||
Interface for Home Assistant to control things from supervisor.
|
||||
|
||||
On error:
|
||||
On error / Code 400:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -13,7 +13,7 @@ On error:
|
||||
}
|
||||
```
|
||||
|
||||
On success:
|
||||
On success / Code 200:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -22,6 +22,8 @@ On success:
|
||||
}
|
||||
```
|
||||
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with envoriment `HASSIO_TOKEN`.
|
||||
|
||||
### Hass.io
|
||||
|
||||
- GET `/supervisor/ping`
|
||||
@@ -34,8 +36,9 @@ The addons from `addons` are only installed one.
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"beta_channel": "true|false",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
@@ -44,6 +47,7 @@ The addons from `addons` are only installed one.
|
||||
"repository": "12345678|null",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"state": "started|stopped",
|
||||
}
|
||||
@@ -68,8 +72,9 @@ Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"beta_channel": "true|false",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
@@ -84,44 +89,20 @@ Reload addons/version.
|
||||
|
||||
Output is the raw docker log.
|
||||
|
||||
### Security
|
||||
|
||||
- GET `/security/info`
|
||||
|
||||
- GET `/supervisor/stats`
|
||||
```json
|
||||
{
|
||||
"initialize": "bool",
|
||||
"totp": "bool"
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/totp`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
Return QR-Code
|
||||
|
||||
- POST `/security/session`
|
||||
```json
|
||||
{
|
||||
"password": "xy",
|
||||
"totp": "null|123456"
|
||||
}
|
||||
```
|
||||
|
||||
### Backup/Snapshot
|
||||
### Snapshot
|
||||
|
||||
- GET `/snapshots`
|
||||
|
||||
@@ -131,7 +112,9 @@ Return QR-Code
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"date": "ISO",
|
||||
"name": "Custom name"
|
||||
"name": "Custom name",
|
||||
"type": "full|partial",
|
||||
"protected": "bool"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -139,11 +122,28 @@ Return QR-Code
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- POST `/snapshots/new/upload`
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional"
|
||||
"name": "Optional",
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
@@ -153,7 +153,15 @@ Return QR-Code
|
||||
{
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
@@ -168,15 +176,14 @@ Return QR-Code
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"homeassistant": {
|
||||
"version": "INSTALLED_HASS_VERSION",
|
||||
"devices": []
|
||||
},
|
||||
"protected": "bool",
|
||||
"homeassistant": "version",
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION"
|
||||
"version": "INSTALLED_VERSION",
|
||||
"size": "SIZE_IN_MB"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
@@ -185,36 +192,47 @@ Return QR-Code
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
|
||||
- GET `/snapshots/{slug}/download`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
- POST `/host/shutdown`
|
||||
|
||||
- POST `/host/reboot`
|
||||
|
||||
- GET `/host/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "",
|
||||
"version": "",
|
||||
"last_version": "",
|
||||
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
|
||||
"hostname": "",
|
||||
"os": "",
|
||||
"audio": {
|
||||
"input": "0,0",
|
||||
"output": "0,0"
|
||||
}
|
||||
"hostname": "hostname|null",
|
||||
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
||||
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
||||
"kernel": "4.15.7|null",
|
||||
"chassis": "specific|null",
|
||||
"deployment": "stable|beta|dev|null",
|
||||
"cpe": "xy|null",
|
||||
}
|
||||
```
|
||||
|
||||
@@ -222,22 +240,67 @@ Return QR-Code
|
||||
|
||||
```json
|
||||
{
|
||||
"audio_input": "0,0",
|
||||
"audio_output": "0,0"
|
||||
"hostname": "",
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/update`
|
||||
- POST `/host/reload`
|
||||
|
||||
Optional:
|
||||
#### Services
|
||||
|
||||
- GET `/host/services`
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
"services": [
|
||||
{
|
||||
"name": "xy.service",
|
||||
"description": "XY ...",
|
||||
"state": "active|"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/host/hardware`
|
||||
- POST `/host/service/{unit}/stop`
|
||||
|
||||
- POST `/host/service/{unit}/start`
|
||||
|
||||
- POST `/host/service/{unit}/reload`
|
||||
|
||||
### HassOS
|
||||
|
||||
- GET `/hassos/info`
|
||||
```json
|
||||
{
|
||||
"version": "2.3",
|
||||
"version_cli": "7",
|
||||
"version_latest": "2.4",
|
||||
"version_cli_latest": "8",
|
||||
"board": "ova|rpi"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update`
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update/cli`
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/config/sync`
|
||||
|
||||
Load host configs from a USB stick.
|
||||
|
||||
### Hardware
|
||||
|
||||
- GET `/hardware/info`
|
||||
```json
|
||||
{
|
||||
"serial": ["/dev/xy"],
|
||||
@@ -256,21 +319,18 @@ Optional:
|
||||
}
|
||||
```
|
||||
|
||||
### Network
|
||||
|
||||
- GET `/network/info`
|
||||
|
||||
- GET `/hardware/audio`
|
||||
```json
|
||||
{
|
||||
"hostname": ""
|
||||
"audio": {
|
||||
"input": {
|
||||
"0,0": "Mic"
|
||||
},
|
||||
"output": {
|
||||
"1,0": "Jack",
|
||||
"1,1": "HDMI"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/network/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": "",
|
||||
}
|
||||
```
|
||||
|
||||
@@ -282,13 +342,14 @@ Optional:
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"devices": [""],
|
||||
"machine": "Image machine type",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
"port": 8123,
|
||||
"ssl": "bool",
|
||||
"watchdog": "bool"
|
||||
"watchdog": "bool",
|
||||
"startup_time": 600
|
||||
}
|
||||
```
|
||||
|
||||
@@ -315,13 +376,14 @@ Output is the raw Docker log.
|
||||
|
||||
```json
|
||||
{
|
||||
"devices": [],
|
||||
"image": "Optional|null",
|
||||
"last_version": "Optional for custom image|null",
|
||||
"port": "port for access hass",
|
||||
"ssl": "bool",
|
||||
"password": "",
|
||||
"watchdog": "bool"
|
||||
"refresh_token": "",
|
||||
"watchdog": "bool",
|
||||
"startup_time": 600
|
||||
}
|
||||
```
|
||||
|
||||
@@ -331,6 +393,23 @@ Image with `null` and last_version with `null` reset this options.
|
||||
|
||||
Proxy to real home-assistant instance.
|
||||
|
||||
- GET `/homeassistant/websocket`
|
||||
|
||||
Proxy to real websocket instance.
|
||||
|
||||
- GET `/homeassistant/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### RESTful for API addons
|
||||
|
||||
- GET `/addons`
|
||||
@@ -350,15 +429,9 @@ Get all available addons.
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"build": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"devices": ["/dev/xy"],
|
||||
"url": "null|url",
|
||||
"logo": "bool",
|
||||
"audio": "bool",
|
||||
"gpio": "bool",
|
||||
"stdin": "bool",
|
||||
"hassio_api": "bool",
|
||||
"homeassistant_api": "bool"
|
||||
"icon": "bool",
|
||||
"logo": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
@@ -379,7 +452,9 @@ Get all available addons.
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xdssd_xybla",
|
||||
"description": "description",
|
||||
"long_description": "null|markdown",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
@@ -392,22 +467,36 @@ Get all available addons.
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"host_network": "bool",
|
||||
"host_ipc": "bool",
|
||||
"host_dbus": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"apparmor": "disable|default|profile",
|
||||
"devices": ["/dev/xy"],
|
||||
"auto_uart": "bool",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"changelog": "bool",
|
||||
"hassio_api": "bool",
|
||||
"homeassistant_api": "bool",
|
||||
"stdin": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"gpio": "bool",
|
||||
"devicetree": "bool",
|
||||
"docker_api": "bool",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0"
|
||||
"audio_output": "null|0,0",
|
||||
"services": "null|['mqtt']",
|
||||
"discovery": "null|['component/platform']"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/icon`
|
||||
|
||||
- GET `/addons/{addon}/logo`
|
||||
|
||||
- GET `/addons/{addon}/changelog`
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
|
||||
```json
|
||||
@@ -423,7 +512,7 @@ Get all available addons.
|
||||
}
|
||||
```
|
||||
|
||||
For reset custom network/audio settings, set it `null`.
|
||||
Reset custom network/audio/options, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
@@ -449,45 +538,105 @@ Only supported for local build addons
|
||||
|
||||
Write data to add-on stdin
|
||||
|
||||
## Host Control
|
||||
|
||||
Communicate over UNIX socket with a host daemon.
|
||||
|
||||
- commands
|
||||
|
||||
```
|
||||
# info
|
||||
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
||||
# reboot
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
|
||||
# hostname xy
|
||||
|
||||
# network info
|
||||
-> {}
|
||||
# network wlan ssd xy
|
||||
# network wlan password xy
|
||||
# network int ip xy
|
||||
# network int netmask xy
|
||||
# network int route xy
|
||||
- GET `/addons/{addon}/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
Features:
|
||||
### Service discovery
|
||||
|
||||
- shutdown
|
||||
- reboot
|
||||
- update
|
||||
- hostname
|
||||
- network_info
|
||||
- network_control
|
||||
|
||||
Answer:
|
||||
```
|
||||
{}|OK|ERROR|WRONG
|
||||
- GET `/services/discovery`
|
||||
```json
|
||||
{
|
||||
"discovery": [
|
||||
{
|
||||
"provider": "name",
|
||||
"uuid": "uuid",
|
||||
"component": "component",
|
||||
"platform": "null|platform",
|
||||
"config": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- {}: json
|
||||
- OK: call was successfully
|
||||
- ERROR: error on call
|
||||
- WRONG: not supported
|
||||
- GET `/services/discovery/{UUID}`
|
||||
```json
|
||||
{
|
||||
"provider": "name",
|
||||
"uuid": "uuid",
|
||||
"component": "component",
|
||||
"platform": "null|platform",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/discovery`
|
||||
```json
|
||||
{
|
||||
"component": "component",
|
||||
"platform": "null|platform",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"uuid": "uuid"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/discovery/{UUID}`
|
||||
|
||||
- GET `/services`
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"slug": "name",
|
||||
"available": "bool",
|
||||
"provider": "null|name|list"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### MQTT
|
||||
|
||||
This service performs an auto discovery to Home-Assistant.
|
||||
|
||||
- GET `/services/mqtt`
|
||||
```json
|
||||
{
|
||||
"provider": "name",
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/mqtt`
|
||||
```json
|
||||
{
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool|optional",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/mqtt`
|
||||
|
29
Dockerfile
29
Dockerfile
@@ -1,21 +1,22 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
# add env
|
||||
ENV LANG C.UTF-8
|
||||
# Setup base
|
||||
COPY requirements.txt /usr/src/
|
||||
RUN apk add --no-cache \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
libstdc++ \
|
||||
eudev-libs \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
g++ \
|
||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -f /usr/src/requirements.txt
|
||||
|
||||
# setup base
|
||||
RUN apk add --no-cache python3 python3-dev \
|
||||
libressl libressl-dev \
|
||||
libffi libffi-dev \
|
||||
musl musl-dev \
|
||||
gcc libstdc++ \
|
||||
git socat \
|
||||
&& pip3 install --no-cache-dir --upgrade pip \
|
||||
&& pip3 install --no-cache-dir --upgrade cryptography jwcrypto \
|
||||
&& apk del python3-dev libressl-dev libffi-dev musl-dev gcc
|
||||
|
||||
# install HassIO
|
||||
# Install HassIO
|
||||
COPY . /usr/src/hassio
|
||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||
&& rm -rf /usr/src/hassio
|
||||
|
10
README.md
10
README.md
@@ -1,8 +1,12 @@
|
||||
# Hass.io
|
||||
|
||||
### First private cloud solution for home automation.
|
||||
## First private cloud solution for home automation
|
||||
|
||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||

|
||||
|
||||
@@ -11,4 +15,4 @@ Hass.io is a Docker based system for managing your Home Assistant installation a
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
|
@@ -4,15 +4,24 @@ from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import hassio.bootstrap as bootstrap
|
||||
import hassio.core as core
|
||||
from hassio import bootstrap
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def attempt_use_uvloop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
import uvloop
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
attempt_use_uvloop()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if not bootstrap.check_environment():
|
||||
@@ -23,15 +32,14 @@ if __name__ == "__main__":
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hassio setup")
|
||||
config = bootstrap.initialize_system_data()
|
||||
hassio = core.HassIO(loop, config)
|
||||
coresys = bootstrap.initialize_coresys(loop)
|
||||
|
||||
bootstrap.migrate_system_env(config)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
_LOGGER.info("Setup HassIO")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
|
||||
try:
|
||||
@@ -39,7 +47,7 @@ if __name__ == "__main__":
|
||||
loop.run_forever()
|
||||
finally:
|
||||
_LOGGER.info("Stopping HassIO")
|
||||
loop.run_until_complete(hassio.stop())
|
||||
loop.run_until_complete(coresys.core.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
|
@@ -4,47 +4,59 @@ import logging
|
||||
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import Data
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||
from .data import AddonsData
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
|
||||
|
||||
class AddonManager(object):
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage addons inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, docker):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.docker = docker
|
||||
self.data = Data(config)
|
||||
self.addons = {}
|
||||
self.repositories = {}
|
||||
self.coresys = coresys
|
||||
self.data = AddonsData(coresys)
|
||||
self.addons_obj = {}
|
||||
self.repositories_obj = {}
|
||||
|
||||
@property
|
||||
def list_addons(self):
|
||||
"""Return a list of all addons."""
|
||||
return list(self.addons.values())
|
||||
return list(self.addons_obj.values())
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed addons."""
|
||||
return [addon for addon in self.addons_obj.values()
|
||||
if addon.is_installed]
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of addon repositories."""
|
||||
return list(self.repositories.values())
|
||||
return list(self.repositories_obj.values())
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return a adddon from slug."""
|
||||
return self.addons.get(addon_slug)
|
||||
"""Return an add-on from slug."""
|
||||
return self.addons_obj.get(addon_slug)
|
||||
|
||||
async def prepare(self):
|
||||
def from_uuid(self, uuid):
|
||||
"""Return an add-on from uuid."""
|
||||
for addon in self.list_addons:
|
||||
if addon.is_installed and uuid == addon.uuid:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self):
|
||||
"""Startup addon management."""
|
||||
self.data.reload()
|
||||
|
||||
# init hassio built-in repositories
|
||||
repositories = \
|
||||
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# init custom repositories & load addons
|
||||
await self.load_repositories(repositories)
|
||||
@@ -52,9 +64,9 @@ class AddonManager(object):
|
||||
async def reload(self):
|
||||
"""Update addons from repo and reload list."""
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories.values()]
|
||||
self.repositories_obj.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
@@ -65,29 +77,29 @@ class AddonManager(object):
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories)
|
||||
old_rep = set(self.repositories_obj)
|
||||
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.config, self.loop, self.data, url)
|
||||
repository = Repository(self.coresys, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories[url] = repository
|
||||
self.repositories_obj[url] = repository
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.config.add_addon_repository(url)
|
||||
self.sys_config.add_addon_repository(url)
|
||||
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories.pop(url).remove()
|
||||
self.config.drop_addon_repository(url)
|
||||
self.repositories_obj.pop(url).remove()
|
||||
self.sys_config.drop_addon_repository(url)
|
||||
|
||||
# update data
|
||||
self.data.reload()
|
||||
@@ -98,8 +110,8 @@ class AddonManager(object):
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons)
|
||||
del_addons = set(self.addons) - all_addons
|
||||
add_addons = all_addons - set(self.addons_obj)
|
||||
del_addons = set(self.addons_obj) - all_addons
|
||||
|
||||
_LOGGER.info("Load addons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
@@ -107,27 +119,40 @@ class AddonManager(object):
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(
|
||||
self.config, self.loop, self.docker, self.data, addon_slug)
|
||||
addon = Addon(self.coresys, addon_slug)
|
||||
|
||||
tasks.append(addon.load())
|
||||
self.addons[addon_slug] = addon
|
||||
self.addons_obj[addon_slug] = addon
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons.pop(addon_slug)
|
||||
self.addons_obj.pop(addon_slug)
|
||||
|
||||
async def auto_boot(self, stage):
|
||||
async def boot(self, stage):
|
||||
"""Boot addons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons.values():
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
|
||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks)
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage):
|
||||
"""Shutdown addons."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and \
|
||||
await addon.state() == STATE_STARTED and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
_LOGGER.info("Shutdown %s stop %d addons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Init file for HassIO addons."""
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
import json
|
||||
@@ -12,19 +13,25 @@ import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import (
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME)
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME, RE_SERVICE)
|
||||
from .utils import check_installed, remove_data
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_UUID,
|
||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY)
|
||||
from .util import check_installed
|
||||
from ..dock.addon import DockerAddon
|
||||
from ..tools import write_json_file, read_json_file
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES,
|
||||
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, SECURITY_PROFILE,
|
||||
SECURITY_DISABLE, SECURITY_DEFAULT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..utils.json import write_json_file, read_json_file
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..exceptions import HostAppArmorError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -33,22 +40,20 @@ RE_WEBUI = re.compile(
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
|
||||
|
||||
|
||||
class Addon(object):
|
||||
class Addon(CoreSysAttributes):
|
||||
"""Hold data for addon inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, docker, data, slug):
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize data holder."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.data = data
|
||||
self._id = slug
|
||||
self.coresys = coresys
|
||||
self.instance = DockerAddon(coresys, slug)
|
||||
|
||||
self.docker = DockerAddon(config, loop, docker, self)
|
||||
self._id = slug
|
||||
|
||||
async def load(self):
|
||||
"""Async initialize of object."""
|
||||
if self.is_installed:
|
||||
await self.docker.attach()
|
||||
await self.instance.attach()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
@@ -58,90 +63,96 @@ class Addon(object):
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return addon data from system or cache."""
|
||||
return self.data.system.get(self._id, self.data.cache.get(self._id))
|
||||
return self._data.system.get(self._id, self._data.cache.get(self._id))
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return addons data storage."""
|
||||
return self.sys_addons.data
|
||||
|
||||
@property
|
||||
def is_installed(self):
|
||||
"""Return True if a addon is installed."""
|
||||
return self._id in self.data.system
|
||||
"""Return True if an addon is installed."""
|
||||
return self._id in self._data.system
|
||||
|
||||
@property
|
||||
def is_detached(self):
|
||||
"""Return True if addon is detached."""
|
||||
return self._id not in self.data.cache
|
||||
return self._id not in self._data.cache
|
||||
|
||||
@property
|
||||
def version_installed(self):
|
||||
"""Return installed version."""
|
||||
return self.data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
return self._data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
|
||||
def _set_install(self, version):
|
||||
"""Set addon as installed."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id] = {
|
||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||
self._data.user[self._id] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
}
|
||||
self.data.save()
|
||||
self._data.save_data()
|
||||
|
||||
def _set_uninstall(self):
|
||||
"""Set addon as uninstalled."""
|
||||
self.data.system.pop(self._id, None)
|
||||
self.data.user.pop(self._id, None)
|
||||
self.data.save()
|
||||
self._data.system.pop(self._id, None)
|
||||
self._data.user.pop(self._id, None)
|
||||
self._data.save_data()
|
||||
|
||||
def _set_update(self, version):
|
||||
"""Update version of addon."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id][ATTR_VERSION] = version
|
||||
self.data.save()
|
||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||
self._data.user[self._id][ATTR_VERSION] = version
|
||||
self._data.save_data()
|
||||
|
||||
def _restore_data(self, user, system):
|
||||
"""Restore data to addon."""
|
||||
self.data.user[self._id] = deepcopy(user)
|
||||
self.data.system[self._id] = deepcopy(system)
|
||||
self.data.save()
|
||||
self._data.user[self._id] = deepcopy(user)
|
||||
self._data.system[self._id] = deepcopy(system)
|
||||
self._data.save_data()
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Return options with local changes."""
|
||||
if self.is_installed:
|
||||
return {
|
||||
**self.data.system[self._id][ATTR_OPTIONS],
|
||||
**self.data.user[self._id][ATTR_OPTIONS]
|
||||
**self._data.system[self._id][ATTR_OPTIONS],
|
||||
**self._data.user[self._id][ATTR_OPTIONS]
|
||||
}
|
||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||
return self._data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
@options.setter
|
||||
def options(self, value):
|
||||
"""Store user addon options."""
|
||||
self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||
self.data.save()
|
||||
if value is None:
|
||||
self._data.user[self._id][ATTR_OPTIONS] = {}
|
||||
else:
|
||||
self._data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_BOOT]
|
||||
if ATTR_BOOT in self._data.user.get(self._id, {}):
|
||||
return self._data.user[self._id][ATTR_BOOT]
|
||||
return self._mesh[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value):
|
||||
"""Store user boot options."""
|
||||
self.data.user[self._id][ATTR_BOOT] = value
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_BOOT] = value
|
||||
|
||||
@property
|
||||
def auto_update(self):
|
||||
"""Return if auto update is enable."""
|
||||
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
||||
if ATTR_AUTO_UPDATE in self._data.user.get(self._id, {}):
|
||||
return self._data.user[self._id][ATTR_AUTO_UPDATE]
|
||||
return None
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value):
|
||||
"""Set auto update."""
|
||||
self.data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -153,11 +164,31 @@ class Addon(object):
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self._mesh[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
"""Return an API token for this add-on."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id][ATTR_UUID]
|
||||
return None
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Return description of addon."""
|
||||
return self._mesh[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self):
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, 'README.md')
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
with readme.open('r') as readme_file:
|
||||
return readme_file.read()
|
||||
|
||||
@property
|
||||
def repository(self):
|
||||
"""Return repository of addon."""
|
||||
@@ -166,8 +197,8 @@ class Addon(object):
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return version of addon."""
|
||||
if self._id in self.data.cache:
|
||||
return self.data.cache[self._id][ATTR_VERSION]
|
||||
if self._id in self._data.cache:
|
||||
return self._data.cache[self._id][ATTR_VERSION]
|
||||
return self.version_installed
|
||||
|
||||
@property
|
||||
@@ -175,6 +206,26 @@ class Addon(object):
|
||||
"""Return startup type of addon."""
|
||||
return self._mesh.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
"""Return dict of services with rights."""
|
||||
raw_services = self._mesh.get(ATTR_SERVICES)
|
||||
if not raw_services:
|
||||
return None
|
||||
|
||||
formated_services = {}
|
||||
for data in raw_services:
|
||||
service = RE_SERVICE.match(data)
|
||||
formated_services[service.group('service')] = \
|
||||
service.group('rights') or 'ro'
|
||||
|
||||
return formated_services
|
||||
|
||||
@property
|
||||
def discovery(self):
|
||||
"""Return list of discoverable components/platforms."""
|
||||
return self._mesh.get(ATTR_DISCOVERY)
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Return ports of addon."""
|
||||
@@ -182,24 +233,22 @@ class Addon(object):
|
||||
return None
|
||||
|
||||
if not self.is_installed or \
|
||||
ATTR_NETWORK not in self.data.user[self._id]:
|
||||
ATTR_NETWORK not in self._data.user[self._id]:
|
||||
return self._mesh[ATTR_PORTS]
|
||||
return self.data.user[self._id][ATTR_NETWORK]
|
||||
return self._data.user[self._id][ATTR_NETWORK]
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value):
|
||||
"""Set custom ports of addon."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
self._data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
else:
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
|
||||
self.data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
@property
|
||||
def webui(self):
|
||||
@@ -218,7 +267,7 @@ class Addon(object):
|
||||
if self.ports is None:
|
||||
port = t_port
|
||||
else:
|
||||
port = self.ports.get("{}/tcp".format(t_port), t_port)
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
|
||||
# for interface config or port lists
|
||||
if isinstance(port, (tuple, list)):
|
||||
@@ -230,18 +279,33 @@ class Addon(object):
|
||||
else:
|
||||
proto = s_prefix
|
||||
|
||||
return "{}://[HOST]:{}{}".format(proto, port, s_suffix)
|
||||
return f"{proto}://[HOST]:{port}{s_suffix}"
|
||||
|
||||
@property
|
||||
def host_network(self):
|
||||
"""Return True if addon run on host network."""
|
||||
return self._mesh[ATTR_HOST_NETWORK]
|
||||
|
||||
@property
|
||||
def host_ipc(self):
|
||||
"""Return True if addon run on host IPC namespace."""
|
||||
return self._mesh[ATTR_HOST_IPC]
|
||||
|
||||
@property
|
||||
def host_dbus(self):
|
||||
"""Return True if addon run on host DBUS."""
|
||||
return self._mesh[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return devices of addon."""
|
||||
return self._mesh.get(ATTR_DEVICES)
|
||||
|
||||
@property
|
||||
def auto_uart(self):
|
||||
"""Return True if we should map all uart device."""
|
||||
return self._mesh.get(ATTR_AUTO_UART)
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs of addon."""
|
||||
@@ -257,11 +321,25 @@ class Addon(object):
|
||||
"""Return list of privilege."""
|
||||
return self._mesh.get(ATTR_PRIVILEGED)
|
||||
|
||||
@property
|
||||
def apparmor(self):
|
||||
"""Return True if apparmor is enabled."""
|
||||
if not self._mesh.get(ATTR_APPARMOR):
|
||||
return SECURITY_DISABLE
|
||||
elif self.sys_host.apparmor.exists(self.slug):
|
||||
return SECURITY_PROFILE
|
||||
return SECURITY_DEFAULT
|
||||
|
||||
@property
|
||||
def legacy(self):
|
||||
"""Return if the add-on don't support hass labels."""
|
||||
return self._mesh.get(ATTR_LEGACY)
|
||||
|
||||
@property
|
||||
def with_docker_api(self):
|
||||
"""Return if the add-on need read-only docker API access."""
|
||||
return self._mesh.get(ATTR_DOCKER_API)
|
||||
|
||||
@property
|
||||
def access_hassio_api(self):
|
||||
"""Return True if the add-on access to hassio api."""
|
||||
@@ -282,6 +360,11 @@ class Addon(object):
|
||||
"""Return True if the add-on access to gpio interface."""
|
||||
return self._mesh[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_devicetree(self):
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self._mesh[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def with_audio(self):
|
||||
"""Return True if the add-on access to audio."""
|
||||
@@ -293,50 +376,57 @@ class Addon(object):
|
||||
if not self.with_audio:
|
||||
return None
|
||||
|
||||
setting = self.config.audio_output
|
||||
if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]:
|
||||
setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
return setting
|
||||
if self.is_installed and \
|
||||
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
|
||||
return self._data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
return self.sys_host.alsa.default.output
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set/remove custom audio output settings."""
|
||||
"""Set/reset audio output settings."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||
else:
|
||||
self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA config for input or None."""
|
||||
if not self.with_audio:
|
||||
return
|
||||
return None
|
||||
|
||||
setting = self.config.audio_input
|
||||
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
||||
setting = self.data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
return setting
|
||||
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
|
||||
return self._data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
return self.sys_host.alsa.default.input
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set/remove custom audio input settings."""
|
||||
"""Set/reset audio input settings."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||
else:
|
||||
self.data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||
self.data.save()
|
||||
self._data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of addon."""
|
||||
return self._mesh.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def with_icon(self):
|
||||
"""Return True if an icon exists."""
|
||||
return self.path_icon.exists()
|
||||
|
||||
@property
|
||||
def with_logo(self):
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def with_changelog(self):
|
||||
"""Return True if a changelog exists."""
|
||||
return self.path_changelog.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self):
|
||||
"""Return list of supported arch."""
|
||||
@@ -349,11 +439,11 @@ class Addon(object):
|
||||
|
||||
# Repository with dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.config.arch)
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.sys_arch)
|
||||
|
||||
# local build
|
||||
return "{}/{}-addon-{}".format(
|
||||
addon_data[ATTR_REPOSITORY], self.config.arch,
|
||||
addon_data[ATTR_REPOSITORY], self.sys_arch,
|
||||
addon_data[ATTR_SLUG])
|
||||
|
||||
@property
|
||||
@@ -374,12 +464,12 @@ class Addon(object):
|
||||
@property
|
||||
def path_data(self):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return Path(self.config.path_addons_data, self._id)
|
||||
return Path(self.sys_config.path_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
"""Return addon data path external for docker."""
|
||||
return PurePath(self.config.path_extern_addons_data, self._id)
|
||||
return PurePath(self.sys_config.path_extern_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_options(self):
|
||||
@@ -391,11 +481,40 @@ class Addon(object):
|
||||
"""Return path to this addon."""
|
||||
return Path(self._mesh[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_icon(self):
|
||||
"""Return path to addon icon."""
|
||||
return Path(self.path_location, 'icon.png')
|
||||
|
||||
@property
|
||||
def path_logo(self):
|
||||
"""Return path to addon logo."""
|
||||
return Path(self.path_location, 'logo.png')
|
||||
|
||||
@property
|
||||
def path_changelog(self):
|
||||
"""Return path to addon changelog."""
|
||||
return Path(self.path_location, 'CHANGELOG.md')
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return path to custom AppArmor profile."""
|
||||
return Path(self.path_location, 'apparmor.txt')
|
||||
|
||||
@property
|
||||
def path_asound(self):
|
||||
"""Return path to asound config."""
|
||||
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
|
||||
|
||||
@property
|
||||
def path_extern_asound(self):
|
||||
"""Return path to asound config for docker."""
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
|
||||
|
||||
def save_data(self):
|
||||
"""Save data of addon."""
|
||||
self.sys_addons.data.save_data()
|
||||
|
||||
def write_options(self):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.schema
|
||||
@@ -403,13 +522,52 @@ class Addon(object):
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_options, options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
|
||||
_LOGGER.error("Addon %s have wrong options: %s", self._id,
|
||||
humanize_error(options, ex))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Addon %s can't write options: %s", self._id, err)
|
||||
else:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def write_asound(self):
|
||||
"""Write asound config to file and return True on success."""
|
||||
asound_config = self.sys_host.alsa.asound(
|
||||
alsa_input=self.audio_input, alsa_output=self.audio_output)
|
||||
|
||||
try:
|
||||
with self.path_asound.open('w') as config_file:
|
||||
config_file.write(asound_config)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Addon %s can't write asound: %s", self._id, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def _install_apparmor(self):
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||
exists_addon = self.path_apparmor.exists()
|
||||
|
||||
# Nothing to do
|
||||
if not exists_local and not exists_addon:
|
||||
return
|
||||
|
||||
# Need removed
|
||||
if exists_local and not exists_addon:
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
return
|
||||
|
||||
# Need install/update
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
|
||||
profile_file = Path(tmp_folder, 'apparmor.txt')
|
||||
|
||||
adjust_profile(self.slug, self.path_apparmor, profile_file)
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Create a schema for addon options."""
|
||||
@@ -425,8 +583,8 @@ class Addon(object):
|
||||
return True
|
||||
|
||||
# load next schema
|
||||
new_raw_schema = self.data.cache[self._id][ATTR_SCHEMA]
|
||||
default_options = self.data.cache[self._id][ATTR_OPTIONS]
|
||||
new_raw_schema = self._data.cache[self._id][ATTR_SCHEMA]
|
||||
default_options = self._data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
# if disabled
|
||||
if isinstance(new_raw_schema, bool):
|
||||
@@ -434,7 +592,7 @@ class Addon(object):
|
||||
|
||||
# merge options
|
||||
options = {
|
||||
**self.data.user[self._id][ATTR_OPTIONS],
|
||||
**self._data.user[self._id][ATTR_OPTIONS],
|
||||
**default_options,
|
||||
}
|
||||
|
||||
@@ -450,10 +608,10 @@ class Addon(object):
|
||||
return True
|
||||
|
||||
async def install(self):
|
||||
"""Install a addon."""
|
||||
if self.config.arch not in self.supported_arch:
|
||||
"""Install an addon."""
|
||||
if self.sys_arch not in self.supported_arch:
|
||||
_LOGGER.error(
|
||||
"Addon %s not supported on %s", self._id, self.config.arch)
|
||||
"Addon %s not supported on %s", self._id, self.sys_arch)
|
||||
return False
|
||||
|
||||
if self.is_installed:
|
||||
@@ -465,7 +623,10 @@ class Addon(object):
|
||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||
self.path_data.mkdir()
|
||||
|
||||
if not await self.docker.install(self.last_version):
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
|
||||
if not await self.instance.install(self.last_version):
|
||||
return False
|
||||
|
||||
self._set_install(self.last_version)
|
||||
@@ -473,14 +634,24 @@ class Addon(object):
|
||||
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
"""Remove a addon."""
|
||||
if not await self.docker.remove():
|
||||
"""Remove an addon."""
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Remove Home-Assistant addon data folder %s", self.path_data)
|
||||
shutil.rmtree(str(self.path_data))
|
||||
await remove_data(self.path_data)
|
||||
|
||||
# Cleanup audio settings
|
||||
if self.path_asound.exists():
|
||||
with suppress(OSError):
|
||||
self.path_asound.unlink()
|
||||
|
||||
# Cleanup apparmor profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
with suppress(HostAppArmorError):
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
|
||||
self._set_uninstall()
|
||||
return True
|
||||
@@ -490,17 +661,22 @@ class Addon(object):
|
||||
if not self.is_installed:
|
||||
return STATE_NONE
|
||||
|
||||
if await self.docker.is_running():
|
||||
if await self.instance.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
@check_installed
|
||||
def start(self):
|
||||
"""Set options and start addon.
|
||||
async def start(self):
|
||||
"""Set options and start addon."""
|
||||
# Options
|
||||
if not self.write_options():
|
||||
return False
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.run()
|
||||
# Sound
|
||||
if self.with_audio and not self.write_asound():
|
||||
return False
|
||||
|
||||
return await self.instance.run()
|
||||
|
||||
@check_installed
|
||||
def stop(self):
|
||||
@@ -508,7 +684,7 @@ class Addon(object):
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.stop()
|
||||
return self.instance.stop()
|
||||
|
||||
@check_installed
|
||||
async def update(self):
|
||||
@@ -516,26 +692,26 @@ class Addon(object):
|
||||
last_state = await self.state()
|
||||
|
||||
if self.last_version == self.version_installed:
|
||||
_LOGGER.warning(
|
||||
"No update available for Addon %s", self._id)
|
||||
_LOGGER.warning("No update available for Addon %s", self._id)
|
||||
return False
|
||||
|
||||
if not await self.docker.update(self.last_version):
|
||||
if not await self.instance.update(self.last_version):
|
||||
return False
|
||||
self._set_update(self.last_version)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.docker.run()
|
||||
await self.start()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
def restart(self):
|
||||
"""Restart addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.restart()
|
||||
async def restart(self):
|
||||
"""Restart addon."""
|
||||
await self.stop()
|
||||
return await self.start()
|
||||
|
||||
@check_installed
|
||||
def logs(self):
|
||||
@@ -543,7 +719,15 @@ class Addon(object):
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.logs()
|
||||
return self.instance.logs()
|
||||
|
||||
@check_installed
|
||||
def stats(self):
|
||||
"""Return stats of container.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
||||
|
||||
@check_installed
|
||||
async def rebuild(self):
|
||||
@@ -555,15 +739,15 @@ class Addon(object):
|
||||
return False
|
||||
|
||||
# remove docker container but not addon config
|
||||
if not await self.docker.remove():
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
|
||||
if not await self.docker.install(self.version_installed):
|
||||
if not await self.instance.install(self.version_installed):
|
||||
return False
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.docker.run()
|
||||
await self.start()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
@@ -576,103 +760,126 @@ class Addon(object):
|
||||
_LOGGER.error("Add-on don't support write to stdin!")
|
||||
return False
|
||||
|
||||
return await self.docker.write_stdin(data)
|
||||
return await self.instance.write_stdin(data)
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
"""Snapshot a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
"""Snapshot state of an addon."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.docker.export_image(Path(temp, "image.tar")):
|
||||
self.instance.export_image(Path(temp, 'image.tar')):
|
||||
return False
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.data.user.get(self._id, {}),
|
||||
ATTR_SYSTEM: self.data.system.get(self._id, {}),
|
||||
ATTR_USER: self._data.user.get(self._id, {}),
|
||||
ATTR_SYSTEM: self._data.system.get(self._id, {}),
|
||||
ATTR_VERSION: self.version_installed,
|
||||
ATTR_STATE: await self.state(),
|
||||
}
|
||||
|
||||
# store local configs/state
|
||||
if not write_json_file(Path(temp, "addon.json"), data):
|
||||
_LOGGER.error("Can't write addon.json for %s", self._id)
|
||||
try:
|
||||
write_json_file(Path(temp, 'addon.json'), data)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't save meta for %s: %s", self._id, err)
|
||||
return False
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
profile = Path(temp, 'apparmor.txt')
|
||||
try:
|
||||
self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't backup AppArmor profile")
|
||||
return False
|
||||
|
||||
# write into tarfile
|
||||
def _create_tar():
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
with tarfile.open(tar_file, "w:gz",
|
||||
compresslevel=1) as snapshot:
|
||||
with tar_file as snapshot:
|
||||
snapshot.add(temp, arcname=".")
|
||||
snapshot.add(self.path_data, arcname="data")
|
||||
|
||||
try:
|
||||
_LOGGER.info("Build snapshot for addon %s", self._id)
|
||||
await self.loop.run_in_executor(None, _create_tar)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Finish snapshot for addon %s", self._id)
|
||||
return True
|
||||
|
||||
async def restore(self, tar_file):
|
||||
"""Restore a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
"""Restore state of an addon."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||
# extract snapshot
|
||||
def _extract_tar():
|
||||
def _extract_tarfile():
|
||||
"""Extract tar snapshot."""
|
||||
with tarfile.open(tar_file, "r:gz") as snapshot:
|
||||
with tar_file as snapshot:
|
||||
snapshot.extractall(path=Path(temp))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _extract_tar)
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s -> %s", tar_file, err)
|
||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
# read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
data = read_json_file(Path(temp, 'addon.json'))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't read addon.json -> %s", err)
|
||||
_LOGGER.error("Can't read addon.json: %s", err)
|
||||
|
||||
# validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate %s, snapshot data -> %s",
|
||||
_LOGGER.error("Can't validate %s, snapshot data: %s",
|
||||
self._id, humanize_error(data, err))
|
||||
return False
|
||||
|
||||
# restore data / reload addon
|
||||
_LOGGER.info("Restore config for addon %s", self._id)
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
|
||||
|
||||
# check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if version != self.docker.version:
|
||||
image_file = Path(temp, "image.tar")
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore image for addon %s", self._id)
|
||||
|
||||
image_file = Path(temp, 'image.tar')
|
||||
if image_file.is_file():
|
||||
await self.docker.import_image(image_file, version)
|
||||
await self.instance.import_image(image_file, version)
|
||||
else:
|
||||
if await self.docker.install(version):
|
||||
await self.docker.cleanup()
|
||||
if await self.instance.install(version):
|
||||
await self.instance.cleanup()
|
||||
else:
|
||||
await self.docker.stop()
|
||||
await self.instance.stop()
|
||||
|
||||
# restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
if self.path_data.is_dir():
|
||||
shutil.rmtree(str(self.path_data), ignore_errors=True)
|
||||
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
||||
|
||||
try:
|
||||
_LOGGER.info("Restore data for addon %s", self._id)
|
||||
await self.loop.run_in_executor(None, _restore_data)
|
||||
if self.path_data.is_dir():
|
||||
await remove_data(self.path_data)
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data -> %s", err)
|
||||
_LOGGER.error("Can't restore origin data: %s", err)
|
||||
return False
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(temp, 'apparmor.txt')
|
||||
if profile_file.exists():
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(
|
||||
self.slug, profile_file)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't restore AppArmor profile")
|
||||
return False
|
||||
|
||||
# run addon
|
||||
|
@@ -1,30 +1,37 @@
|
||||
"""HassIO addons build environment."""
|
||||
from pathlib import Path
|
||||
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
from .validate import SCHEMA_BUILD_CONFIG, BASE_IMAGE
|
||||
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
|
||||
from ..tools import JsonConfig
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
|
||||
|
||||
class AddonBuild(JsonConfig):
|
||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Handle build options for addons."""
|
||||
|
||||
def __init__(self, config, addon):
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize addon builder."""
|
||||
self.config = config
|
||||
self.addon = addon
|
||||
self.coresys = coresys
|
||||
self._id = slug
|
||||
|
||||
super().__init__(
|
||||
Path(addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def save(self):
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return addon of build data."""
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def base_image(self):
|
||||
"""Base images for this addon."""
|
||||
return self._data[ATTR_BUILD_FROM][self.config.arch]
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self.sys_arch, BASE_IMAGE[self.sys_arch])
|
||||
|
||||
@property
|
||||
def squash(self):
|
||||
@@ -40,21 +47,21 @@ class AddonBuild(JsonConfig):
|
||||
"""Create a dict with docker build arguments."""
|
||||
args = {
|
||||
'path': str(self.addon.path_location),
|
||||
'tag': "{}:{}".format(self.addon.image, version),
|
||||
'tag': f"{self.addon.image}:{version}",
|
||||
'pull': True,
|
||||
'forcerm': True,
|
||||
'squash': self.squash,
|
||||
'labels': {
|
||||
'io.hass.version': version,
|
||||
'io.hass.arch': self.config.arch,
|
||||
'io.hass.arch': self.sys_arch,
|
||||
'io.hass.type': META_ADDON,
|
||||
'io.hass.name': self.addon.name,
|
||||
'io.hass.description': self.addon.description,
|
||||
'io.hass.name': self._fix_label('name'),
|
||||
'io.hass.description': self._fix_label('description'),
|
||||
},
|
||||
'buildargs': {
|
||||
'BUILD_FROM': self.base_image,
|
||||
'BUILD_VERSION': version,
|
||||
'BUILD_ARCH': self.config.arch,
|
||||
'BUILD_ARCH': self.sys_arch,
|
||||
**self.additional_args,
|
||||
}
|
||||
}
|
||||
@@ -63,3 +70,8 @@ class AddonBuild(JsonConfig):
|
||||
args['labels']['io.hass.url'] = self.addon.url
|
||||
|
||||
return args
|
||||
|
||||
def _fix_label(self, label_name):
|
||||
"""Remove characters they are not supported."""
|
||||
label = getattr(self.addon, label_name, "")
|
||||
return label.replace("'", "")
|
||||
|
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"local": {
|
||||
"name": "Local Add-Ons",
|
||||
"name": "Local add-ons",
|
||||
"url": "https://home-assistant.io/hassio",
|
||||
"maintainer": "By our self"
|
||||
"maintainer": "you"
|
||||
},
|
||||
"core": {
|
||||
"name": "Built-in Add-Ons",
|
||||
"name": "Official add-ons",
|
||||
"url": "https://home-assistant.io/addons",
|
||||
"maintainer": "Home Assistant authors"
|
||||
"maintainer": "Home Assistant"
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,4 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import copy
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
@@ -7,24 +6,25 @@ from pathlib import Path
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import extract_hash_from_path
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import (
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
FILE_HASSIO_ADDONS, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
from ..tools import JsonConfig, read_json_file
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils.json import JsonConfig, read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Data(JsonConfig):
|
||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Hold data for addons inside HassIO."""
|
||||
|
||||
def __init__(self, config):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
|
||||
self.config = config
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
||||
self.coresys = coresys
|
||||
self._repositories = {}
|
||||
self._cache = {}
|
||||
|
||||
@@ -55,23 +55,20 @@ class Data(JsonConfig):
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_core, REPOSITORY_CORE)
|
||||
self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
|
||||
# read local repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_local, REPOSITORY_LOCAL)
|
||||
self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
|
||||
# read custom git repositories
|
||||
for repository_element in self.config.path_addons_git.iterdir():
|
||||
for repository_element in self.sys_config.path_addons_git.iterdir():
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
# update local data
|
||||
self._merge_config()
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
@@ -83,7 +80,7 @@ class Data(JsonConfig):
|
||||
read_json_file(repository_file)
|
||||
)
|
||||
|
||||
except (OSError, json.JSONDecodeError):
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read repository information from %s",
|
||||
repository_file)
|
||||
return
|
||||
@@ -118,7 +115,7 @@ class Data(JsonConfig):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s -> %s", addon,
|
||||
_LOGGER.warning("Can't read %s: %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
|
||||
def _set_builtin_repositories(self):
|
||||
@@ -127,7 +124,7 @@ class Data(JsonConfig):
|
||||
builtin_file = Path(__file__).parent.joinpath('built-in.json')
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read built-in.json -> %s", err)
|
||||
_LOGGER.warning("Can't read built-in json: %s", err)
|
||||
return
|
||||
|
||||
# core repository
|
||||
@@ -137,25 +134,3 @@ class Data(JsonConfig):
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
|
||||
def _merge_config(self):
|
||||
"""Update local config if they have update.
|
||||
|
||||
It need to be the same version as the local version is for merge.
|
||||
"""
|
||||
have_change = False
|
||||
|
||||
for addon in set(self.system):
|
||||
# detached
|
||||
if addon not in self._cache:
|
||||
continue
|
||||
|
||||
cache = self._cache[addon]
|
||||
data = self.system[addon]
|
||||
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
|
||||
if data != cache:
|
||||
self.system[addon] = copy.deepcopy(cache)
|
||||
have_change = True
|
||||
|
||||
if have_change:
|
||||
self.save()
|
||||
|
@@ -7,104 +7,149 @@ import shutil
|
||||
|
||||
import git
|
||||
|
||||
from .util import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import RE_REPOSITORY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GitRepo(object):
|
||||
class GitRepo(CoreSysAttributes):
|
||||
"""Manage addons git repo."""
|
||||
|
||||
def __init__(self, config, loop, path, url):
|
||||
def __init__(self, coresys, path, url):
|
||||
"""Initialize git base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.coresys = coresys
|
||||
self.repo = None
|
||||
self.path = path
|
||||
self.url = url
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
self._data = RE_REPOSITORY.match(url).groupdict()
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return repository URL."""
|
||||
return self._data[ATTR_URL]
|
||||
|
||||
@property
|
||||
def branch(self):
|
||||
"""Return repository branch."""
|
||||
return self._data[ATTR_BRANCH]
|
||||
|
||||
async def load(self):
|
||||
"""Init git addon repo."""
|
||||
if not self.path.is_dir():
|
||||
return await self.clone()
|
||||
|
||||
async with self._lock:
|
||||
async with self.lock:
|
||||
try:
|
||||
_LOGGER.info("Load addon %s repository", self.path)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo, str(self.path))
|
||||
self.repo = await self.sys_run_in_executor(
|
||||
git.Repo, str(self.path))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
|
||||
self._remove()
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def clone(self):
|
||||
"""Clone git addon repo."""
|
||||
async with self._lock:
|
||||
async with self.lock:
|
||||
git_args = {
|
||||
attribute: value
|
||||
for attribute, value in (
|
||||
('recursive', True),
|
||||
('branch', self.branch),
|
||||
('depth', 1),
|
||||
('shallow-submodules', True)
|
||||
) if value is not None
|
||||
}
|
||||
|
||||
try:
|
||||
_LOGGER.info("Clone addon %s repository", self.url)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, ft.partial(
|
||||
self.repo = await self.sys_run_in_executor(ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path),
|
||||
recursive=True))
|
||||
**git_args
|
||||
))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
|
||||
self._remove()
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def pull(self):
|
||||
"""Pull git addon repo."""
|
||||
if self._lock.locked():
|
||||
if self.lock.locked():
|
||||
_LOGGER.warning("It is already a task in progress.")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
async with self.lock:
|
||||
_LOGGER.info("Update addon %s repository", self.url)
|
||||
branch = self.repo.active_branch.name
|
||||
|
||||
try:
|
||||
_LOGGER.info("Pull addon %s repository", self.url)
|
||||
await self.loop.run_in_executor(
|
||||
None, self.repo.remotes.origin.pull)
|
||||
# Download data
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.remotes.origin.fetch, **{
|
||||
'update-shallow': True,
|
||||
'depth': 1,
|
||||
}))
|
||||
|
||||
# Jump on top of that
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.git.reset, f"origin/{branch}", hard=True))
|
||||
|
||||
# Cleanup old data
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.git.clean, "-xdf"))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
||||
_LOGGER.error("Can't update %s repo: %s.", self.url, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""HassIO addons repository."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize git hassio addon repository."""
|
||||
super().__init__(
|
||||
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
|
||||
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom addons repository."""
|
||||
|
||||
def __init__(self, config, loop, url):
|
||||
"""Initialize git hassio addon repository."""
|
||||
path = Path(config.path_addons_git, get_hash_from_repository(url))
|
||||
|
||||
super().__init__(config, loop, path, url)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a custom addon."""
|
||||
if self.path.is_dir():
|
||||
_LOGGER.info("Remove custom addon repository %s", self.url)
|
||||
def _remove(self):
|
||||
"""Remove a repository."""
|
||||
if not self.path.is_dir():
|
||||
return
|
||||
|
||||
def log_err(funct, path, _):
|
||||
"""Log error."""
|
||||
_LOGGER.warning("Can't remove %s", path)
|
||||
|
||||
shutil.rmtree(str(self.path), onerror=log_err)
|
||||
|
||||
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""HassIO addons repository."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize git hassio addon repository."""
|
||||
super().__init__(
|
||||
coresys, coresys.config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
|
||||
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom addons repository."""
|
||||
|
||||
def __init__(self, coresys, url):
|
||||
"""Initialize git hassio addon repository."""
|
||||
path = Path(
|
||||
coresys.config.path_addons_git,
|
||||
get_hash_from_repository(url))
|
||||
|
||||
super().__init__(coresys, path, url)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a custom repository."""
|
||||
_LOGGER.info("Remove custom addon repository %s", self.url)
|
||||
self._remove()
|
||||
|
@@ -1,18 +1,19 @@
|
||||
"""Represent a HassIO repository."""
|
||||
from .git import GitRepoHassIO, GitRepoCustom
|
||||
from .util import get_hash_from_repository
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import (
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
|
||||
class Repository(object):
|
||||
class Repository(CoreSysAttributes):
|
||||
"""Repository in HassIO."""
|
||||
|
||||
def __init__(self, config, loop, data, repository):
|
||||
def __init__(self, coresys, repository):
|
||||
"""Initialize repository object."""
|
||||
self.data = data
|
||||
self.coresys = coresys
|
||||
self.source = None
|
||||
self.git = None
|
||||
|
||||
@@ -20,16 +21,16 @@ class Repository(object):
|
||||
self._id = repository
|
||||
elif repository == REPOSITORY_CORE:
|
||||
self._id = repository
|
||||
self.git = GitRepoHassIO(config, loop)
|
||||
self.git = GitRepoHassIO(coresys)
|
||||
else:
|
||||
self._id = get_hash_from_repository(repository)
|
||||
self.git = GitRepoCustom(config, loop, repository)
|
||||
self.git = GitRepoCustom(coresys, repository)
|
||||
self.source = repository
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return data struct repository."""
|
||||
return self.data.repositories.get(self._id, {})
|
||||
return self.sys_addons.data.repositories.get(self._id, {})
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Util addons functions."""
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
@@ -33,3 +34,20 @@ def check_installed(method):
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
||||
|
||||
|
||||
async def remove_data(folder):
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder),
|
||||
stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
@@ -1,6 +1,7 @@
|
||||
"""Validate addons options schema."""
|
||||
import logging
|
||||
import re
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -12,16 +13,20 @@ from ..const import (
|
||||
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK, ATTR_UUID,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY,
|
||||
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_DEVICE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt)(?::(?P<rights>rw|:ro))?$")
|
||||
RE_DISCOVERY = re.compile(r"^(?P<component>\w*)(?:/(?P<platform>\w*>))?$")
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
@@ -55,7 +60,11 @@ STARTUP_ALL = [
|
||||
PRIVILEGED_ALL = [
|
||||
"NET_ADMIN",
|
||||
"SYS_ADMIN",
|
||||
"SYS_RAWIO"
|
||||
"SYS_RAWIO",
|
||||
"IPC_LOCK",
|
||||
"SYS_TIME",
|
||||
"SYS_NICE",
|
||||
"SYS_RESOURCE"
|
||||
]
|
||||
|
||||
BASE_IMAGE = {
|
||||
@@ -91,18 +100,26 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS):
|
||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [vol.Match(RE_DISCOVERY)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||
@@ -114,9 +131,10 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||
}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"),
|
||||
vol.Optional(ATTR_IMAGE):
|
||||
vol.Match(r"^([a-zA-Z.:\d{}]+/)*?([\w{}]+)/([\-\w{}]+)$"),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
@@ -134,23 +152,25 @@ SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
||||
}),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default={}): vol.Schema({
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
||||
vol.Coerce(str): vol.Coerce(str)
|
||||
}),
|
||||
})
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
||||
vol.Match(r"^[0-9a-f]{32}$"),
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
})
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
@@ -159,11 +179,11 @@ SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default={}): {
|
||||
SCHEMA_ADDONS_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||
},
|
||||
vol.Optional(ATTR_SYSTEM, default={}): {
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||
}
|
||||
})
|
||||
@@ -174,7 +194,7 @@ SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
def validate_options(raw_schema):
|
||||
@@ -202,8 +222,7 @@ def validate_options(raw_schema):
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, 'root')
|
||||
return options
|
||||
@@ -212,11 +231,12 @@ def validate_options(raw_schema):
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(typ, value, key):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||
raise vol.Invalid(f"Missing required option '{key}'")
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
@@ -245,7 +265,7 @@ def _single_validate(typ, value, key):
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group('match'))(str(value))
|
||||
|
||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||
|
||||
|
||||
def _nested_validate_list(typ, data_list, key):
|
||||
@@ -291,5 +311,4 @@ def _check_missing_options(origin, exists, root):
|
||||
if isinstance(origin[miss_opt], str) and \
|
||||
origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(
|
||||
"Missing option {} in {}".format(miss_opt, root))
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||
|
@@ -5,166 +5,254 @@ from pathlib import Path
|
||||
from aiohttp import web
|
||||
|
||||
from .addons import APIAddons
|
||||
from .discovery import APIDiscovery
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .hardware import APIHardware
|
||||
from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .hassos import APIHassOS
|
||||
from .proxy import APIProxy
|
||||
from .supervisor import APISupervisor
|
||||
from .security import APISecurity
|
||||
from .snapshots import APISnapshots
|
||||
from .services import APIServices
|
||||
from .security import SecurityMiddleware
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RestAPI(object):
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle rest api for hassio."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.webapp = web.Application(loop=self.loop)
|
||||
self.coresys = coresys
|
||||
self.security = SecurityMiddleware(coresys)
|
||||
self.webapp = web.Application(
|
||||
middlewares=[self.security.token_validation], loop=coresys.loop)
|
||||
|
||||
# service stuff
|
||||
self._handler = None
|
||||
self.server = None
|
||||
self._runner = web.AppRunner(self.webapp)
|
||||
self._site = None
|
||||
|
||||
def register_host(self, host_control, hardware):
|
||||
async def load(self):
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
self._register_hassos()
|
||||
self._register_hardware()
|
||||
self._register_homeassistant()
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_snapshots()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
|
||||
def _register_host(self):
|
||||
"""Register hostcontrol function."""
|
||||
api_host = APIHost(self.config, self.loop, host_control, hardware)
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/host/info', api_host.info)
|
||||
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_post('/host/update', api_host.update)
|
||||
self.webapp.router.add_post('/host/options', api_host.options)
|
||||
self.webapp.add_routes([
|
||||
web.get('/host/info', api_host.info),
|
||||
web.post('/host/reboot', api_host.reboot),
|
||||
web.post('/host/shutdown', api_host.shutdown),
|
||||
web.post('/host/reload', api_host.reload),
|
||||
web.post('/host/options', api_host.options),
|
||||
web.get('/host/services', api_host.services),
|
||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
||||
web.post('/host/services/{service}/start', api_host.service_start),
|
||||
web.post(
|
||||
'/host/services/{service}/restart', api_host.service_restart),
|
||||
web.post(
|
||||
'/host/services/{service}/reload', api_host.service_reload),
|
||||
])
|
||||
|
||||
def register_network(self, host_control):
|
||||
"""Register network function."""
|
||||
api_net = APINetwork(self.config, self.loop, host_control)
|
||||
def _register_hassos(self):
|
||||
"""Register hassos function."""
|
||||
api_hassos = APIHassOS()
|
||||
api_hassos.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
self.webapp.add_routes([
|
||||
web.get('/hassos/info', api_hassos.info),
|
||||
web.post('/hassos/update', api_hassos.update),
|
||||
web.post('/hassos/update/cli', api_hassos.update_cli),
|
||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
||||
])
|
||||
|
||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||
updater):
|
||||
def _register_hardware(self):
|
||||
"""Register hardware function."""
|
||||
api_hardware = APIHardware()
|
||||
api_hardware.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/hardware/info', api_hardware.info),
|
||||
web.get('/hardware/audio', api_hardware.audio),
|
||||
])
|
||||
|
||||
def _register_supervisor(self):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, supervisor, snapshots, addons,
|
||||
host_control, updater)
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||
self.webapp.add_routes([
|
||||
web.get('/supervisor/ping', api_supervisor.ping),
|
||||
web.get('/supervisor/info', api_supervisor.info),
|
||||
web.get('/supervisor/stats', api_supervisor.stats),
|
||||
web.get('/supervisor/logs', api_supervisor.logs),
|
||||
web.post('/supervisor/update', api_supervisor.update),
|
||||
web.post('/supervisor/reload', api_supervisor.reload),
|
||||
web.post('/supervisor/options', api_supervisor.options),
|
||||
])
|
||||
|
||||
def register_homeassistant(self, dock_homeassistant):
|
||||
def _register_homeassistant(self):
|
||||
"""Register homeassistant function."""
|
||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop)
|
||||
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
||||
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||
self.webapp.router.add_post(
|
||||
'/homeassistant/api/{path:.+}', api_hass.api)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/{path:.+}', api_hass.api)
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/info', api_hass.info),
|
||||
web.get('/homeassistant/logs', api_hass.logs),
|
||||
web.get('/homeassistant/stats', api_hass.stats),
|
||||
web.post('/homeassistant/options', api_hass.options),
|
||||
web.post('/homeassistant/update', api_hass.update),
|
||||
web.post('/homeassistant/restart', api_hass.restart),
|
||||
web.post('/homeassistant/stop', api_hass.stop),
|
||||
web.post('/homeassistant/start', api_hass.start),
|
||||
web.post('/homeassistant/check', api_hass.check),
|
||||
])
|
||||
|
||||
def register_addons(self, addons):
|
||||
def _register_proxy(self):
|
||||
"""Register HomeAssistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/', api_proxy.api),
|
||||
])
|
||||
|
||||
def _register_addons(self):
|
||||
"""Register homeassistant function."""
|
||||
api_addons = APIAddons(self.config, self.loop, addons)
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/addons', api_addons.list)
|
||||
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
||||
self.webapp.add_routes([
|
||||
web.get('/addons', api_addons.list),
|
||||
web.post('/addons/reload', api_addons.reload),
|
||||
web.get('/addons/{addon}/info', api_addons.info),
|
||||
web.post('/addons/{addon}/install', api_addons.install),
|
||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
||||
web.post('/addons/{addon}/start', api_addons.start),
|
||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
||||
web.post('/addons/{addon}/update', api_addons.update),
|
||||
web.post('/addons/{addon}/options', api_addons.options),
|
||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
||||
])
|
||||
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/uninstall', api_addons.uninstall)
|
||||
self.webapp.router.add_post('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/restart', api_addons.restart)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
||||
|
||||
def register_security(self):
|
||||
"""Register security function."""
|
||||
api_security = APISecurity(self.config, self.loop)
|
||||
|
||||
self.webapp.router.add_get('/security/info', api_security.info)
|
||||
self.webapp.router.add_post('/security/options', api_security.options)
|
||||
self.webapp.router.add_post('/security/totp', api_security.totp)
|
||||
self.webapp.router.add_post('/security/session', api_security.session)
|
||||
|
||||
def register_snapshots(self, snapshots):
|
||||
def _register_snapshots(self):
|
||||
"""Register snapshots function."""
|
||||
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
||||
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
||||
self.webapp.add_routes([
|
||||
web.get('/snapshots', api_snapshots.list),
|
||||
web.post('/snapshots/reload', api_snapshots.reload),
|
||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
||||
web.post('/snapshots/{snapshot}/restore/full',
|
||||
api_snapshots.restore_full),
|
||||
web.post('/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial),
|
||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
||||
])
|
||||
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||
def _register_services(self):
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/remove', api_snapshots.remove)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial)
|
||||
self.webapp.add_routes([
|
||||
web.get('/services', api_services.list),
|
||||
web.get('/services/{service}', api_services.get_service),
|
||||
web.post('/services/{service}', api_services.set_service),
|
||||
web.delete('/services/{service}', api_services.del_service),
|
||||
])
|
||||
|
||||
def register_panel(self):
|
||||
def _register_discovery(self):
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/services/discovery', api_discovery.list),
|
||||
web.get('/services/discovery/{uuid}', api_discovery.get_discovery),
|
||||
web.delete('/services/discovery/{uuid}',
|
||||
api_discovery.del_discovery),
|
||||
web.post('/services/discovery', api_discovery.set_discovery),
|
||||
])
|
||||
|
||||
def _register_panel(self):
|
||||
"""Register panel for homeassistant."""
|
||||
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
|
||||
def get_panel(request):
|
||||
"""Return file response with panel."""
|
||||
return web.FileResponse(panel)
|
||||
def create_response(panel_file):
|
||||
"""Create a function to generate a response."""
|
||||
path = panel_dir.joinpath(f"{panel_file!s}.html")
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
self.webapp.router.add_get('/panel', get_panel)
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel', create_response('hassio-main-es5'))])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
||||
])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.62 - 0.70
|
||||
self.webapp.add_routes([
|
||||
web.get('/app-es5/index.html', create_response('index')),
|
||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
||||
])
|
||||
|
||||
# This route is for HA > 0.70
|
||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
self._handler = self.webapp.make_handler(loop=self.loop)
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
|
||||
|
||||
try:
|
||||
self.server = await self.loop.create_server(
|
||||
self._handler, "0.0.0.0", "80")
|
||||
await self._site.start()
|
||||
except OSError as err:
|
||||
_LOGGER.fatal(
|
||||
"Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop rest api webserver."""
|
||||
if self.server:
|
||||
self.server.close()
|
||||
await self.server.wait_closed()
|
||||
await self.webapp.shutdown()
|
||||
if not self._site:
|
||||
return
|
||||
|
||||
if self._handler:
|
||||
await self._handler.shutdown(60)
|
||||
await self.webapp.cleanup()
|
||||
# Shutdown running API
|
||||
await self._site.stop()
|
||||
await self._runner.cleanup()
|
||||
|
||||
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
||||
|
@@ -5,7 +5,7 @@ import logging
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
@@ -14,8 +14,13 @@ from ..const import (
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
||||
from ..validate import DOCKER_PORTS
|
||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS, ALSA_DEVICE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,23 +33,19 @@ SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
})
|
||||
|
||||
|
||||
class APIAddons(object):
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle rest api for addons functions."""
|
||||
|
||||
def __init__(self, config, loop, addons):
|
||||
"""Initialize homeassistant rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.addons = addons
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
addon = self.addons.get(request.match_info.get('addon'))
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon = self.sys_addons.get(request.match_info.get('addon'))
|
||||
if not addon:
|
||||
raise RuntimeError("Addon not exists")
|
||||
raise RuntimeError("Addon does not exist")
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
raise RuntimeError("Addon is not installed")
|
||||
@@ -56,14 +57,14 @@ class APIAddons(object):
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return
|
||||
return None
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return all addons / repositories ."""
|
||||
data_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
for addon in self.sys_addons.list_addons:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
@@ -74,19 +75,13 @@ class APIAddons(object):
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.addons.list_repositories:
|
||||
for repository in self.sys_addons.list_repositories:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
@@ -103,7 +98,7 @@ class APIAddons(object):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload all addons data."""
|
||||
await asyncio.shield(self.addons.reload(), loop=self.loop)
|
||||
await asyncio.shield(self.sys_addons.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
@@ -113,7 +108,9 @@ class APIAddons(object):
|
||||
|
||||
return {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
@@ -126,17 +123,26 @@ class APIAddons(object):
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_HOST_NETWORK: addon.host_network,
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
ATTR_HOST_DBUS: addon.host_dbus,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_DOCKER_API: addon.with_docker_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_SERVICES: addon.services,
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -145,7 +151,7 @@ class APIAddons(object):
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): addon.schema,
|
||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||
})
|
||||
|
||||
body = await api_validate(addon_schema, request)
|
||||
@@ -163,19 +169,39 @@ class APIAddons(object):
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
addon = self._extract_addon(request)
|
||||
stats = await addon.stats()
|
||||
|
||||
if not stats:
|
||||
raise RuntimeError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
def install(self, request):
|
||||
"""Install addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
return asyncio.shield(addon.install(), loop=self.loop)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
@@ -189,13 +215,13 @@ class APIAddons(object):
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return asyncio.shield(addon.start(), loop=self.loop)
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def update(self, request):
|
||||
@@ -205,13 +231,13 @@ class APIAddons(object):
|
||||
if addon.last_version == addon.version_installed:
|
||||
raise RuntimeError("No update available!")
|
||||
|
||||
return asyncio.shield(addon.update(), loop=self.loop)
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart(), loop=self.loop)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request):
|
||||
@@ -220,7 +246,7 @@ class APIAddons(object):
|
||||
if not addon.need_build:
|
||||
raise RuntimeError("Only local build addons are supported")
|
||||
|
||||
return asyncio.shield(addon.rebuild(), loop=self.loop)
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
@@ -228,22 +254,42 @@ class APIAddons(object):
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def icon(self, request):
|
||||
"""Return icon from addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_icon:
|
||||
raise RuntimeError("No icon found!")
|
||||
|
||||
with addon.path_icon.open('rb') as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request):
|
||||
"""Return logo from addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
raise RuntimeError("No image found!")
|
||||
raise RuntimeError("No logo found!")
|
||||
|
||||
with addon.path_logo.open('rb') as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def changelog(self, request):
|
||||
"""Return changelog from addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_changelog:
|
||||
raise RuntimeError("No changelog found!")
|
||||
|
||||
with addon.path_changelog.open('r') as changelog:
|
||||
return changelog.read()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request):
|
||||
"""Write to stdin of addon."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_stdin:
|
||||
raise RuntimeError("STDIN not supported by addons")
|
||||
raise RuntimeError("STDIN not supported by addon")
|
||||
|
||||
data = await request.read()
|
||||
return await asyncio.shield(addon.write_stdin(data), loop=self.loop)
|
||||
return await asyncio.shield(addon.write_stdin(data))
|
||||
|
72
hassio/api/discovery.py
Normal file
72
hassio/api/discovery.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_PROVIDER, ATTR_UUID, ATTR_COMPONENT, ATTR_PLATFORM, ATTR_CONFIG,
|
||||
ATTR_DISCOVERY, REQUEST_FROM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema({
|
||||
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PLATFORM): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_CONFIG): vol.Any(None, dict),
|
||||
})
|
||||
|
||||
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
"""Handle rest api for discovery functions."""
|
||||
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self.sys_discovery.get(request.match_info.get('uuid'))
|
||||
if not message:
|
||||
raise RuntimeError("Discovery message not found")
|
||||
return message
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
discovery = []
|
||||
for message in self.sys_discovery.list_messages:
|
||||
discovery.append({
|
||||
ATTR_PROVIDER: message.provider,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_COMPONENT: message.component,
|
||||
ATTR_PLATFORM: message.platform,
|
||||
ATTR_CONFIG: message.config,
|
||||
})
|
||||
|
||||
return {ATTR_DISCOVERY: discovery}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
message = self.sys_discovery.send(
|
||||
provider=request[REQUEST_FROM], **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@api_process
|
||||
async def get_discovery(self, request):
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
return {
|
||||
ATTR_PROVIDER: message.provider,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_COMPONENT: message.component,
|
||||
ATTR_PLATFORM: message.platform,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_discovery(self, request):
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
self.sys_discovery.remove(message)
|
||||
return True
|
34
hassio/api/hardware.py
Normal file
34
hassio/api/hardware.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Init file for HassIO hardware rest api."""
|
||||
import logging
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle rest api for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request):
|
||||
"""Show ALSA audio devices."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||
}
|
||||
}
|
53
hassio/api/hassos.py
Normal file
53
hassio/api/hassos.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Init file for Hass.io hassos rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST, ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle rest api for hassos functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return hassos information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update HassOS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request):
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request):
|
||||
"""Trigger config reload on HassOS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
@@ -2,34 +2,36 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPBadGateway
|
||||
from aiohttp.hdrs import CONTENT_TYPE
|
||||
import async_timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
||||
ATTR_BOOT, ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG,
|
||||
CONTENT_TYPE_BINARY, HEADER_HA_ACCESS)
|
||||
from ..validate import HASS_DEVICES, NETWORK_PORT
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
|
||||
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
|
||||
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, ATTR_MACHINE,
|
||||
ATTR_REFRESH_TOKEN, CONTENT_TYPE_BINARY)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import NETWORK_PORT, DOCKER_IMAGE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
||||
vol.Any(None, vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
# Required once we enforce user system
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): str,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -37,67 +39,23 @@ SCHEMA_VERSION = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class APIHomeAssistant(object):
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle rest api for homeassistant functions."""
|
||||
|
||||
def __init__(self, config, loop, homeassistant):
|
||||
"""Initialize homeassistant rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.homeassistant = homeassistant
|
||||
|
||||
async def homeassistant_proxy(self, path, request):
|
||||
"""Return a client request with proxy origin for Home-Assistant."""
|
||||
url = "{}/api/{}".format(self.homeassistant.api_url, path)
|
||||
|
||||
try:
|
||||
data = None
|
||||
headers = {}
|
||||
method = getattr(
|
||||
self.homeassistant.websession, request.method.lower())
|
||||
|
||||
# read data
|
||||
with async_timeout.timeout(10, loop=self.loop):
|
||||
data = await request.read()
|
||||
|
||||
if data:
|
||||
headers.update({CONTENT_TYPE: request.content_type})
|
||||
|
||||
# need api password?
|
||||
if self.homeassistant.api_password:
|
||||
headers = {HEADER_HA_ACCESS: self.homeassistant.api_password}
|
||||
|
||||
# reset headers
|
||||
if not headers:
|
||||
headers = None
|
||||
|
||||
client = await method(
|
||||
url, data=data, headers=headers, timeout=300
|
||||
)
|
||||
|
||||
return client
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Client error on api %s request %s.", path, err)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Client timeout error on api request %s.", path)
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.homeassistant.last_version,
|
||||
ATTR_IMAGE: self.homeassistant.image,
|
||||
ATTR_DEVICES: self.homeassistant.devices,
|
||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self.homeassistant.boot,
|
||||
ATTR_PORT: self.homeassistant.api_port,
|
||||
ATTR_SSL: self.homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self.homeassistant.watchdog,
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||
ATTR_PORT: self.sys_homeassistant.api_port,
|
||||
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -105,78 +63,83 @@ class APIHomeAssistant(object):
|
||||
"""Set homeassistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_DEVICES in body:
|
||||
self.homeassistant.devices = body[ATTR_DEVICES]
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.homeassistant.set_custom(
|
||||
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.homeassistant.boot = body[ATTR_BOOT]
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
|
||||
if ATTR_PORT in body:
|
||||
self.homeassistant.api_port = body[ATTR_PORT]
|
||||
self.sys_homeassistant.api_port = body[ATTR_PORT]
|
||||
|
||||
if ATTR_PASSWORD in body:
|
||||
self.homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
|
||||
if ATTR_SSL in body:
|
||||
self.homeassistant.api_ssl = body[ATTR_SSL]
|
||||
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||
|
||||
if ATTR_WATCHDOG in body:
|
||||
self.homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
return True
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_homeassistant.stats()
|
||||
if not stats:
|
||||
raise RuntimeError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update homeassistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
|
||||
|
||||
if version == self.homeassistant.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.update(version), loop=self.loop)
|
||||
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop homeassistant."""
|
||||
return asyncio.shield(self.homeassistant.stop(), loop=self.loop)
|
||||
return asyncio.shield(self.sys_homeassistant.stop())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
"""Start homeassistant."""
|
||||
return asyncio.shield(self.homeassistant.run(), loop=self.loop)
|
||||
return asyncio.shield(self.sys_homeassistant.start())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart homeassistant."""
|
||||
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
|
||||
return asyncio.shield(self.sys_homeassistant.restart())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return homeassistant docker logs."""
|
||||
return self.homeassistant.logs()
|
||||
return self.sys_homeassistant.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request):
|
||||
"""Check config of homeassistant."""
|
||||
code, message = await self.homeassistant.check_config()
|
||||
if not code:
|
||||
raise RuntimeError(message)
|
||||
result = await self.sys_homeassistant.check_config()
|
||||
if not result.valid:
|
||||
raise RuntimeError(result.log)
|
||||
|
||||
return True
|
||||
|
||||
async def api(self, request):
|
||||
"""Proxy API request to Home-Assistant."""
|
||||
path = request.match_info.get('path')
|
||||
|
||||
client = await self.homeassistant_proxy(path, request)
|
||||
return web.Response(
|
||||
body=await client.read(),
|
||||
status=client.status,
|
||||
content_type=client.content_type
|
||||
)
|
||||
|
@@ -4,88 +4,98 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process_hostcontrol, api_process, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
|
||||
from ..validate import ALSA_CHANNEL
|
||||
ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL, ATTR_OPERATING_SYSTEM,
|
||||
ATTR_CHASSIS, ATTR_DEPLOYMENT, ATTR_STATE, ATTR_NAME, ATTR_DESCRIPTON,
|
||||
ATTR_SERVICES, ATTR_CPE)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SERVICE = 'service'
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHost(object):
|
||||
class APIHost(CoreSysAttributes):
|
||||
"""Handle rest api for host functions."""
|
||||
|
||||
def __init__(self, config, loop, host_control, hardware):
|
||||
"""Initialize host rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_control = host_control
|
||||
self.local_hw = hardware
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_TYPE: self.host_control.type,
|
||||
ATTR_VERSION: self.host_control.version,
|
||||
ATTR_LAST_VERSION: self.host_control.last_version,
|
||||
ATTR_FEATURES: self.host_control.features,
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
ATTR_OS: self.host_control.os_info,
|
||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_FEATURES: self.sys_host.supperted_features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_KERNEL: self.sys_host.info.kernel,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Process host options."""
|
||||
"""Edit host settings."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self.config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
return True
|
||||
|
||||
@api_process_hostcontrol
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return self.host_control.reboot()
|
||||
|
||||
@api_process_hostcontrol
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return self.host_control.shutdown()
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
||||
|
||||
if version == self.host_control.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.host_control.update(version=version), loop=self.loop)
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
await asyncio.shield(
|
||||
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
|
||||
|
||||
@api_process
|
||||
async def hardware(self, request):
|
||||
"""Return local hardware infos."""
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return asyncio.shield(self.sys_host.control.reboot())
|
||||
|
||||
@api_process
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return asyncio.shield(self.sys_host.control.shutdown())
|
||||
|
||||
@api_process
|
||||
def reload(self, request):
|
||||
"""Reload host data."""
|
||||
return asyncio.shield(self.sys_host.reload())
|
||||
|
||||
@api_process
|
||||
async def services(self, request):
|
||||
"""Return list of available services."""
|
||||
services = []
|
||||
for unit in self.sys_host.services:
|
||||
services.append({
|
||||
ATTR_NAME: unit.name,
|
||||
ATTR_DESCRIPTON: unit.description,
|
||||
ATTR_STATE: unit.state,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_SERIAL: list(self.local_hw.serial_devices),
|
||||
ATTR_INPUT: list(self.local_hw.input_devices),
|
||||
ATTR_DISK: list(self.local_hw.disk_devices),
|
||||
ATTR_GPIO: list(self.local_hw.gpio_devices),
|
||||
ATTR_AUDIO: self.local_hw.audio_devices,
|
||||
ATTR_SERVICES: services
|
||||
}
|
||||
|
||||
@api_process
|
||||
def service_start(self, request):
|
||||
"""Start a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.start(unit))
|
||||
|
||||
@api_process
|
||||
def service_stop(self, request):
|
||||
"""Stop a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.stop(unit))
|
||||
|
||||
@api_process
|
||||
def service_reload(self, request):
|
||||
"""Reload a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.reload(unit))
|
||||
|
||||
@api_process
|
||||
def service_restart(self, request):
|
||||
"""Restart a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.restart(unit))
|
||||
|
@@ -1,43 +0,0 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_hostcontrol, api_validate
|
||||
from ..const import ATTR_HOSTNAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APINetwork(object):
|
||||
"""Handle rest api for network functions."""
|
||||
|
||||
def __init__(self, config, loop, host_control):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_control = host_control
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show network settings."""
|
||||
return {
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
}
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def options(self, request):
|
||||
"""Edit network settings."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
if self.host_control.hostname != body[ATTR_HOSTNAME]:
|
||||
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||
|
||||
return True
|
1
hassio/api/panel/chunk.0ef4ef1053fe3d5107b5.js
Normal file
1
hassio/api/panel/chunk.0ef4ef1053fe3d5107b5.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.0ef4ef1053fe3d5107b5.js.gz
Normal file
BIN
hassio/api/panel/chunk.0ef4ef1053fe3d5107b5.js.gz
Normal file
Binary file not shown.
2
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js
Normal file
2
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js
Normal file
File diff suppressed because one or more lines are too long
419
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js.LICENSE
Normal file
419
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js.LICENSE
Normal file
@@ -0,0 +1,419 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js.gz
Normal file
BIN
hassio/api/panel/chunk.a8e86d80be46b3b6e16d.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.c77b56beea1d4547ff5f.js
Normal file
1
hassio/api/panel/chunk.c77b56beea1d4547ff5f.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.c77b56beea1d4547ff5f.js.gz
Normal file
BIN
hassio/api/panel/chunk.c77b56beea1d4547ff5f.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.c93f37c558ff32991708.js
Normal file
1
hassio/api/panel/chunk.c93f37c558ff32991708.js
Normal file
@@ -0,0 +1 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[5],{104:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(99),i=t.n(e),o=t(97),u=t.n(o),a=i.a,c=u.a}}]);
|
BIN
hassio/api/panel/chunk.c93f37c558ff32991708.js.gz
Normal file
BIN
hassio/api/panel/chunk.c93f37c558ff32991708.js.gz
Normal file
Binary file not shown.
2
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js
Normal file
2
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js
Normal file
File diff suppressed because one or more lines are too long
389
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js.LICENSE
Normal file
389
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js.LICENSE
Normal file
@@ -0,0 +1,389 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js.gz
Normal file
BIN
hassio/api/panel/chunk.f3880aa331d3ef2ddf32.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.ff92199b0d422767d108.js
Normal file
1
hassio/api/panel/chunk.ff92199b0d422767d108.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.ff92199b0d422767d108.js.gz
Normal file
BIN
hassio/api/panel/chunk.ff92199b0d422767d108.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/entrypoint.js
Normal file
1
hassio/api/panel/entrypoint.js
Normal file
@@ -0,0 +1 @@
|
||||
!function(e){function n(n){for(var t,o,i=n[0],u=n[1],a=0,l=[];a<i.length;a++)o=i[a],r[o]&&l.push(r[o][0]),r[o]=0;for(t in u)Object.prototype.hasOwnProperty.call(u,t)&&(e[t]=u[t]);for(f&&f(n);l.length;)l.shift()()}var t={},r={6:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var u,a=document.getElementsByTagName("head")[0],f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"f3880aa331d3ef2ddf32",1:"a8e86d80be46b3b6e16d",2:"0ef4ef1053fe3d5107b5",3:"ff92199b0d422767d108",4:"c77b56beea1d4547ff5f",5:"c93f37c558ff32991708"}[e]+".js"}(e),u=function(n){f.onerror=f.onload=null,clearTimeout(l);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,t[1](u)}r[e]=void 0}};var l=setTimeout(function(){u({type:"timeout",target:f})},12e4);f.onerror=f.onload=u,a.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=n,i=i.slice();for(var a=0;a<i.length;a++)n(i[a]);var f=u;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(3)]).then(t.bind(null,1)),Promise.all([t.e(0),t.e(1),t.e(2)]).then(t.bind(null,2))})}]);
|
BIN
hassio/api/panel/entrypoint.js.gz
Normal file
BIN
hassio/api/panel/entrypoint.js.gz
Normal file
Binary file not shown.
3
hassio/api/panel/hassio-app.html
Normal file
3
hassio/api/panel/hassio-app.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
Binary file not shown.
72
hassio/api/panel/hassio-main-es5.html
Normal file
72
hassio/api/panel/hassio-main-es5.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-main-es5.html.gz
Normal file
BIN
hassio/api/panel/hassio-main-es5.html.gz
Normal file
Binary file not shown.
72
hassio/api/panel/hassio-main-latest.html
Normal file
72
hassio/api/panel/hassio-main-latest.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-main-latest.html.gz
Normal file
BIN
hassio/api/panel/hassio-main-latest.html.gz
Normal file
Binary file not shown.
38
hassio/api/panel/index.html
Normal file
38
hassio/api/panel/index.html
Normal file
@@ -0,0 +1,38 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Hass.io</title>
|
||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||
<style>
|
||||
body {
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
||||
</head>
|
||||
<body>
|
||||
<hassio-app></hassio-app>
|
||||
<script>
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.write(e.outerHTML);
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
</script>
|
||||
<!--
|
||||
Disabled while we make Home Assistant able to serve the right files.
|
||||
<script src="./app.js"></script>
|
||||
-->
|
||||
<link rel='import' href='./hassio-app.html'>
|
||||
</body>
|
||||
</html>
|
BIN
hassio/api/panel/index.html.gz
Normal file
BIN
hassio/api/panel/index.html.gz
Normal file
Binary file not shown.
251
hassio/api/proxy.py
Normal file
251
hassio/api/proxy.py
Normal file
@@ -0,0 +1,251 @@
|
||||
"""Utils for HomeAssistant Proxy."""
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadGateway, HTTPInternalServerError, HTTPUnauthorized)
|
||||
from aiohttp.hdrs import CONTENT_TYPE
|
||||
import async_timeout
|
||||
|
||||
from ..const import HEADER_HA_ACCESS
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HomeAssistantAuthError, HomeAssistantAPIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIProxy(CoreSysAttributes):
|
||||
"""API Proxy for Home-Assistant."""
|
||||
|
||||
def _check_access(self, request):
|
||||
"""Check the Hass.io token."""
|
||||
hassio_token = request.headers.get(HEADER_HA_ACCESS)
|
||||
addon = self.sys_addons.from_uuid(hassio_token)
|
||||
|
||||
if not addon:
|
||||
_LOGGER.warning("Unknown HomeAssistant API access!")
|
||||
elif not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Not permitted API access: %s", addon.slug)
|
||||
else:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
return
|
||||
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
@asynccontextmanager
|
||||
async def _api_client(self, request, path, timeout=300):
|
||||
"""Return a client request with proxy origin for Home-Assistant."""
|
||||
try:
|
||||
# read data
|
||||
with async_timeout.timeout(30):
|
||||
data = await request.read()
|
||||
|
||||
if data:
|
||||
content_type = request.content_type
|
||||
else:
|
||||
content_type = None
|
||||
|
||||
async with self.sys_homeassistant.make_request(
|
||||
request.method.lower(), f'api/{path}',
|
||||
content_type=content_type,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
) as resp:
|
||||
yield resp
|
||||
return
|
||||
|
||||
except HomeAssistantAuthError:
|
||||
_LOGGER.error("Authenticate error on API for request %s", path)
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Error on API for request %s", path)
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Client error on API %s request %s", path, err)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Client timeout error on API request %s", path)
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def stream(self, request):
|
||||
"""Proxy HomeAssistant EventStream Requests."""
|
||||
self._check_access(request)
|
||||
|
||||
_LOGGER.info("Home-Assistant EventStream start")
|
||||
async with self._api_client(request, 'stream', timeout=None) as client:
|
||||
response = web.StreamResponse()
|
||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||
try:
|
||||
await response.prepare(request)
|
||||
while True:
|
||||
data = await client.content.read(10)
|
||||
if not data:
|
||||
break
|
||||
await response.write(data)
|
||||
|
||||
except aiohttp.ClientError:
|
||||
pass
|
||||
|
||||
finally:
|
||||
client.close()
|
||||
_LOGGER.info("Home-Assistant EventStream close")
|
||||
|
||||
return response
|
||||
|
||||
async def api(self, request):
|
||||
"""Proxy HomeAssistant API Requests."""
|
||||
self._check_access(request)
|
||||
|
||||
# Normal request
|
||||
path = request.match_info.get('path', '')
|
||||
async with self._api_client(request, path) as client:
|
||||
data = await client.read()
|
||||
return web.Response(
|
||||
body=data,
|
||||
status=client.status,
|
||||
content_type=client.content_type
|
||||
)
|
||||
|
||||
async def _websocket_client(self):
|
||||
"""Initialize a websocket api connection."""
|
||||
url = f"{self.sys_homeassistant.api_url}/api/websocket"
|
||||
|
||||
try:
|
||||
client = await self.sys_websession_ssl.ws_connect(
|
||||
url, heartbeat=60, verify_ssl=False)
|
||||
|
||||
# handle authentication
|
||||
data = await client.receive_json()
|
||||
|
||||
if data.get('type') == 'auth_ok':
|
||||
return client
|
||||
|
||||
if data.get('type') != 'auth_required':
|
||||
# Invalid protocol
|
||||
_LOGGER.error(
|
||||
'Got unexpected response from HA websocket: %s', data)
|
||||
raise HTTPBadGateway()
|
||||
|
||||
if self.sys_homeassistant.refresh_token:
|
||||
await self.sys_homeassistant.ensure_access_token()
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'access_token': self.sys_homeassistant.access_token,
|
||||
})
|
||||
else:
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'api_password': self.sys_homeassistant.api_password,
|
||||
})
|
||||
|
||||
data = await client.receive_json()
|
||||
|
||||
if data.get('type') == 'auth_ok':
|
||||
return client
|
||||
|
||||
# Renew the Token is invalid
|
||||
if (data.get('type') == 'invalid_auth' and
|
||||
self.sys_homeassistant.refresh_token):
|
||||
self.sys_homeassistant.access_token = None
|
||||
return await self._websocket_client()
|
||||
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Client error on websocket API %s.", err)
|
||||
except HomeAssistantAuthError as err:
|
||||
_LOGGER.error("Failed authentication to HomeAssistant websocket")
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def websocket(self, request):
|
||||
"""Initialize a websocket api connection."""
|
||||
_LOGGER.info("Home-Assistant Websocket API request initialze")
|
||||
|
||||
# init server
|
||||
server = web.WebSocketResponse(heartbeat=60)
|
||||
await server.prepare(request)
|
||||
|
||||
# handle authentication
|
||||
try:
|
||||
await server.send_json({
|
||||
'type': 'auth_required',
|
||||
'ha_version': self.sys_homeassistant.version,
|
||||
})
|
||||
|
||||
# Check API access
|
||||
response = await server.receive_json()
|
||||
hassio_token = (response.get('api_password') or
|
||||
response.get('access_token'))
|
||||
addon = self.sys_addons.from_uuid(hassio_token)
|
||||
|
||||
if not addon or not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Unauthorized websocket access!")
|
||||
await server.send_json({
|
||||
'type': 'auth_invalid',
|
||||
'message': 'Invalid access',
|
||||
})
|
||||
return server
|
||||
|
||||
_LOGGER.info("Websocket access from %s", addon.slug)
|
||||
|
||||
await server.send_json({
|
||||
'type': 'auth_ok',
|
||||
'ha_version': self.sys_homeassistant.version,
|
||||
})
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||
raise HTTPInternalServerError() from None
|
||||
|
||||
# init connection to hass
|
||||
client = await self._websocket_client()
|
||||
|
||||
_LOGGER.info("Home-Assistant Websocket API request running")
|
||||
try:
|
||||
client_read = None
|
||||
server_read = None
|
||||
while not server.closed and not client.closed:
|
||||
if not client_read:
|
||||
client_read = self.sys_create_task(
|
||||
client.receive_str())
|
||||
if not server_read:
|
||||
server_read = self.sys_create_task(
|
||||
server.receive_str())
|
||||
|
||||
# wait until data need to be processed
|
||||
await asyncio.wait(
|
||||
[client_read, server_read],
|
||||
return_when=asyncio.FIRST_COMPLETED
|
||||
)
|
||||
|
||||
# server
|
||||
if server_read.done() and not client.closed:
|
||||
server_read.exception()
|
||||
await client.send_str(server_read.result())
|
||||
server_read = None
|
||||
|
||||
# client
|
||||
if client_read.done() and not server.closed:
|
||||
client_read.exception()
|
||||
await server.send_str(client_read.result())
|
||||
client_read = None
|
||||
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
except RuntimeError as err:
|
||||
_LOGGER.info("Home-Assistant Websocket API error: %s", err)
|
||||
|
||||
finally:
|
||||
if client_read:
|
||||
client_read.cancel()
|
||||
if server_read:
|
||||
server_read.cancel()
|
||||
|
||||
# close connections
|
||||
await client.close()
|
||||
await server.close()
|
||||
|
||||
_LOGGER.info("Home-Assistant Websocket API connection is closed")
|
||||
return server
|
@@ -1,102 +1,59 @@
|
||||
"""Init file for HassIO security rest api."""
|
||||
from datetime import datetime, timedelta
|
||||
import io
|
||||
"""Handle security part of this API."""
|
||||
import logging
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
import pyotp
|
||||
import pyqrcode
|
||||
from aiohttp.web import middleware
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
|
||||
from .util import api_process, api_validate, hash_password
|
||||
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
|
||||
from ..const import HEADER_TOKEN, REQUEST_FROM
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_PASSWORD = vol.Schema({
|
||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
|
||||
vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str),
|
||||
})
|
||||
NO_SECURITY_CHECK = set((
|
||||
re.compile(r"^/homeassistant/api/.*$"),
|
||||
re.compile(r"^/homeassistant/websocket$"),
|
||||
re.compile(r"^/supervisor/ping$"),
|
||||
))
|
||||
|
||||
|
||||
class APISecurity(object):
|
||||
"""Handle rest api for security functions."""
|
||||
class SecurityMiddleware(CoreSysAttributes):
|
||||
"""Security middleware functions."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize security rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
def __init__(self, coresys):
|
||||
"""Initialize security middleware."""
|
||||
self.coresys = coresys
|
||||
|
||||
def _check_password(self, body):
|
||||
"""Check if password is valid and security is initialize."""
|
||||
if not self.config.security_initialize:
|
||||
raise RuntimeError("First set a password")
|
||||
@middleware
|
||||
async def token_validation(self, request, handler):
|
||||
"""Check security access of this layer."""
|
||||
hassio_token = request.headers.get(HEADER_TOKEN)
|
||||
|
||||
password = hash_password(body[ATTR_PASSWORD])
|
||||
if password != self.config.security_password:
|
||||
raise RuntimeError("Wrong password")
|
||||
# Ignore security check
|
||||
for rule in NO_SECURITY_CHECK:
|
||||
if rule.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
return await handler(request)
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_INITIALIZE: self.config.security_initialize,
|
||||
ATTR_TOTP: self.config.security_totp is not None,
|
||||
}
|
||||
# Home-Assistant
|
||||
if hassio_token == self.sys_homeassistant.uuid:
|
||||
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
||||
request[REQUEST_FROM] = 'homeassistant'
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set options / password."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
# Host
|
||||
if hassio_token == self.sys_machine_id:
|
||||
_LOGGER.debug("%s access from Host", request.path)
|
||||
request[REQUEST_FROM] = 'host'
|
||||
|
||||
if self.config.security_initialize:
|
||||
raise RuntimeError("Password is already set!")
|
||||
# Add-on
|
||||
addon = self.sys_addons.from_uuid(hassio_token) \
|
||||
if hassio_token else None
|
||||
if addon:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
request[REQUEST_FROM] = addon.slug
|
||||
|
||||
self.config.security_password = hash_password(body[ATTR_PASSWORD])
|
||||
self.config.security_initialize = True
|
||||
return True
|
||||
if request.get(REQUEST_FROM):
|
||||
return await handler(request)
|
||||
|
||||
@api_process
|
||||
async def totp(self, request):
|
||||
"""Set and initialze TOTP."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
self._check_password(body)
|
||||
|
||||
# generate TOTP
|
||||
totp_init_key = pyotp.random_base32()
|
||||
totp = pyotp.TOTP(totp_init_key)
|
||||
|
||||
# init qrcode
|
||||
buff = io.BytesIO()
|
||||
|
||||
qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO"))
|
||||
qrcode.svg(buff)
|
||||
|
||||
# finish
|
||||
self.config.security_totp = totp_init_key
|
||||
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
|
||||
|
||||
@api_process
|
||||
async def session(self, request):
|
||||
"""Set and initialze session."""
|
||||
body = await api_validate(SCHEMA_SESSION, request)
|
||||
self._check_password(body)
|
||||
|
||||
# check TOTP
|
||||
if self.config.security_totp:
|
||||
totp = pyotp.TOTP(self.config.security_totp)
|
||||
if body[ATTR_TOTP] != totp.now():
|
||||
raise RuntimeError("Invalid TOTP token!")
|
||||
|
||||
# create session
|
||||
valid_until = datetime.now() + timedelta(days=1)
|
||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
||||
|
||||
# store session
|
||||
self.config.add_security_session(session, valid_until)
|
||||
return {ATTR_SESSION: session}
|
||||
_LOGGER.warning("Invalid token for access %s", request.path)
|
||||
raise HTTPUnauthorized()
|
||||
|
55
hassio/api/services.py
Normal file
55
hassio/api/services.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_AVAILABLE, ATTR_PROVIDER, ATTR_SLUG, ATTR_SERVICES, REQUEST_FROM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class APIServices(CoreSysAttributes):
|
||||
"""Handle rest api for services functions."""
|
||||
|
||||
def _extract_service(self, request):
|
||||
"""Return service, throw an exception if it doesn't exist."""
|
||||
service = self.sys_services.get(request.match_info.get('service'))
|
||||
if not service:
|
||||
raise RuntimeError("Service does not exist")
|
||||
|
||||
return service
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
services = []
|
||||
for service in self.sys_services.list_services:
|
||||
services.append({
|
||||
ATTR_SLUG: service.slug,
|
||||
ATTR_AVAILABLE: service.enabled,
|
||||
ATTR_PROVIDER: service.provider,
|
||||
})
|
||||
|
||||
return {ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_service(self, request):
|
||||
"""Write data into a service."""
|
||||
service = self._extract_service(request)
|
||||
body = await api_validate(service.schema, request)
|
||||
|
||||
return service.set_service_data(request[REQUEST_FROM], body)
|
||||
|
||||
@api_process
|
||||
async def get_service(self, request):
|
||||
"""Read data into a service."""
|
||||
service = self._extract_service(request)
|
||||
|
||||
return {
|
||||
ATTR_AVAILABLE: service.enabled,
|
||||
service.slug: service.get_service_data(),
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_service(self, request):
|
||||
"""Delete data into a service."""
|
||||
service = self._extract_service(request)
|
||||
return service.del_service_data(request[REQUEST_FROM])
|
@@ -1,61 +1,71 @@
|
||||
"""Init file for HassIO snapshot rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||
ATTR_DEVICES, ATTR_SNAPSHOTS)
|
||||
ATTR_SNAPSHOTS, ATTR_PASSWORD, ATTR_PROTECTED, CONTENT_TYPE_TAR)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS):
|
||||
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
})
|
||||
|
||||
SCHEMA_RESTORE_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS):
|
||||
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
})
|
||||
|
||||
|
||||
class APISnapshots(object):
|
||||
class APISnapshots(CoreSysAttributes):
|
||||
"""Handle rest api for snapshot functions."""
|
||||
|
||||
def __init__(self, config, loop, snapshots):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.snapshots = snapshots
|
||||
|
||||
def _extract_snapshot(self, request):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
|
||||
"""Return snapshot, throw an exception if it doesn't exist."""
|
||||
snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
|
||||
if not snapshot:
|
||||
raise RuntimeError("Snapshot not exists")
|
||||
raise RuntimeError("Snapshot does not exist")
|
||||
return snapshot
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return snapshot list."""
|
||||
data_snapshots = []
|
||||
for snapshot in self.snapshots.list_snapshots:
|
||||
for snapshot in self.sys_snapshots.list_snapshots:
|
||||
data_snapshots.append({
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_TYPE: snapshot.sys_type,
|
||||
ATTR_PROTECTED: snapshot.protected,
|
||||
})
|
||||
|
||||
return {
|
||||
@@ -65,7 +75,7 @@ class APISnapshots(object):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload snapshot list."""
|
||||
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
|
||||
await asyncio.shield(self.sys_snapshots.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
@@ -79,6 +89,7 @@ class APISnapshots(object):
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
ATTR_SIZE: addon_data[ATTR_SIZE],
|
||||
})
|
||||
|
||||
return {
|
||||
@@ -87,10 +98,8 @@ class APISnapshots(object):
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_SIZE: snapshot.size,
|
||||
ATTR_HOMEASSISTANT: {
|
||||
ATTR_VERSION: snapshot.homeassistant_version,
|
||||
ATTR_DEVICES: snapshot.homeassistant_devices,
|
||||
},
|
||||
ATTR_PROTECTED: snapshot.protected,
|
||||
ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: snapshot.repositories,
|
||||
ATTR_FOLDERS: snapshot.folders,
|
||||
@@ -100,36 +109,78 @@ class APISnapshots(object):
|
||||
async def snapshot_full(self, request):
|
||||
"""Full-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_full(**body), loop=self.loop)
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.do_snapshot_full(**body))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
async def snapshot_partial(self, request):
|
||||
"""Partial-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.do_snapshot_partial(**body))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
def restore_full(self, request):
|
||||
async def restore_full(self, request):
|
||||
"""Full-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return asyncio.shield(
|
||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.sys_snapshots.do_restore_full(snapshot, **body))
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self.loop
|
||||
)
|
||||
self.sys_snapshots.do_restore_partial(snapshot, **body))
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return self.snapshots.remove(snapshot)
|
||||
return self.sys_snapshots.remove(snapshot)
|
||||
|
||||
async def download(self, request):
|
||||
"""Download a snapshot file."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
|
||||
_LOGGER.info("Download snapshot %s", snapshot.slug)
|
||||
response = web.FileResponse(snapshot.tarfile)
|
||||
response.content_type = CONTENT_TYPE_TAR
|
||||
return response
|
||||
|
||||
@api_process
|
||||
async def upload(self, request):
|
||||
"""Upload a snapshot file."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
||||
tar_file = Path(temp_dir, f"snapshot.tar")
|
||||
|
||||
try:
|
||||
with tar_file.open('wb') as snapshot:
|
||||
async for data in request.content.iter_any():
|
||||
snapshot.write(data)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write new snapshot file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.import_snapshot(tar_file))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
@@ -4,21 +4,24 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, CONTENT_TYPE_BINARY)
|
||||
from ..validate import validate_timezone
|
||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES, CHANNELS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -26,20 +29,9 @@ SCHEMA_VERSION = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class APISupervisor(object):
|
||||
class APISupervisor(CoreSysAttributes):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||
host_control, updater):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.supervisor = supervisor
|
||||
self.addons = addons
|
||||
self.snapshots = snapshots
|
||||
self.host_control = host_control
|
||||
self.updater = updater
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
"""Return ok for signal that the api is ready."""
|
||||
@@ -49,7 +41,7 @@ class APISupervisor(object):
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
for addon in self.sys_addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -59,17 +51,19 @@ class APISupervisor(object):
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.updater.version_hassio,
|
||||
ATTR_BETA_CHANNEL: self.updater.beta_channel,
|
||||
ATTR_ARCH: self.config.arch,
|
||||
ATTR_TIMEZONE: self.config.timezone,
|
||||
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_ARCH: self.sys_arch,
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
||||
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -77,41 +71,60 @@ class APISupervisor(object):
|
||||
"""Set supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||
if ATTR_CHANNEL in body:
|
||||
self.sys_updater.channel = body[ATTR_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.config.timezone = body[ATTR_TIMEZONE]
|
||||
self.sys_config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.addons.load_repositories(new))
|
||||
await asyncio.shield(self.sys_addons.load_repositories(new))
|
||||
|
||||
self.sys_updater.save_data()
|
||||
self.sys_config.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_supervisor.stats()
|
||||
if not stats:
|
||||
raise RuntimeError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.updater.version_hassio)
|
||||
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
|
||||
|
||||
if version == self.supervisor.version:
|
||||
if version == self.sys_supervisor.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.supervisor.update(version), loop=self.loop)
|
||||
self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload addons, config ect."""
|
||||
"""Reload addons, config etc."""
|
||||
tasks = [
|
||||
self.addons.reload(),
|
||||
self.snapshots.reload(),
|
||||
self.updater.fetch_data(),
|
||||
self.host_control.load()
|
||||
self.sys_updater.reload(),
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
||||
asyncio.wait(tasks))
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
@@ -122,4 +135,4 @@ class APISupervisor(object):
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return supervisor docker logs."""
|
||||
return self.supervisor.logs()
|
||||
return self.sys_supervisor.logs()
|
||||
|
@@ -1,26 +1,27 @@
|
||||
"""Init file for HassIO util for rest api."""
|
||||
import json
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPServiceUnavailable
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY)
|
||||
from ..exceptions import HassioError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def json_loads(data):
|
||||
"""Extract json from string with support for '' and None."""
|
||||
if not data:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
raise RuntimeError("Invalid json")
|
||||
|
||||
|
||||
def api_process(method):
|
||||
@@ -31,41 +32,20 @@ def api_process(method):
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
return api_return_error(message=str(err))
|
||||
except HassioError:
|
||||
return api_return_error()
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
if isinstance(answer, web.Response):
|
||||
return answer
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
elif isinstance(answer, bool) and not answer:
|
||||
return api_return_error()
|
||||
return api_return_ok()
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
def api_process_hostcontrol(method):
|
||||
"""Wrap HostControl calls to rest api."""
|
||||
async def wrap_hostcontrol(api, *args, **kwargs):
|
||||
"""Return host information."""
|
||||
if not api.host_control.active:
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
return api_return_error(message=str(err))
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
elif answer is None:
|
||||
return api_return_error("Function is not supported")
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
|
||||
return wrap_hostcontrol
|
||||
|
||||
|
||||
def api_process_raw(content):
|
||||
"""Wrap content_type into function."""
|
||||
def wrap_method(method):
|
||||
@@ -78,6 +58,9 @@ def api_process_raw(content):
|
||||
except RuntimeError as err:
|
||||
msg_data = str(err).encode()
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
except HassioError:
|
||||
msg_data = b''
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
|
||||
return web.Response(body=msg_data, content_type=msg_type)
|
||||
|
||||
@@ -86,7 +69,7 @@ def api_process_raw(content):
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return a API error message."""
|
||||
"""Return an API error message."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
@@ -94,7 +77,7 @@ def api_return_error(message=None):
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
"""Return a API ok answer."""
|
||||
"""Return an API ok answer."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_OK,
|
||||
JSON_DATA: data or {},
|
||||
@@ -110,9 +93,3 @@ async def api_validate(schema, request):
|
||||
raise RuntimeError(humanize_error(data, ex)) from None
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def hash_password(password):
|
||||
"""Hash and salt our passwords."""
|
||||
key = ")*()*SALT_HASSIO2123{}6554547485HSKA!!*JSLAfdasda$".format(password)
|
||||
return hashlib.sha256(key.encode()).hexdigest()
|
@@ -7,15 +7,63 @@ from pathlib import Path
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .core import HassIO
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .const import SOCKET_DOCKER
|
||||
from .config import CoreConfig
|
||||
from .coresys import CoreSys
|
||||
from .supervisor import Supervisor
|
||||
from .homeassistant import HomeAssistant
|
||||
from .snapshots import SnapshotManager
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
from .services import ServiceManager
|
||||
from .services import Discovery
|
||||
from .host import HostManager
|
||||
from .dbus import DBusManager
|
||||
from .hassos import HassOS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENV_SHARE = 'SUPERVISOR_SHARE'
|
||||
ENV_NAME = 'SUPERVISOR_NAME'
|
||||
ENV_REPO = 'HOMEASSISTANT_REPOSITORY'
|
||||
|
||||
def initialize_system_data():
|
||||
MACHINE_ID = Path('/etc/machine-id')
|
||||
|
||||
|
||||
def initialize_coresys(loop):
|
||||
"""Initialize HassIO coresys/objects."""
|
||||
coresys = CoreSys(loop)
|
||||
|
||||
# Initialize core objects
|
||||
coresys.core = HassIO(coresys)
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
coresys.homeassistant = HomeAssistant(coresys)
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.hassos = HassOS(coresys)
|
||||
|
||||
# bootstrap config
|
||||
initialize_system_data(coresys)
|
||||
|
||||
# Set Machine/Host ID
|
||||
if MACHINE_ID.exists():
|
||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
||||
|
||||
return coresys
|
||||
|
||||
|
||||
def initialize_system_data(coresys):
|
||||
"""Setup default config and create folders."""
|
||||
config = CoreConfig()
|
||||
config = coresys.config
|
||||
|
||||
# homeassistant config folder
|
||||
if not config.path_config.is_dir():
|
||||
@@ -59,11 +107,17 @@ def initialize_system_data():
|
||||
_LOGGER.info("Create hassio share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
# apparmor folder
|
||||
if not config.path_apparmor.is_dir():
|
||||
_LOGGER.info("Create hassio apparmor folder %s", config.path_apparmor)
|
||||
config.path_apparmor.mkdir()
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def migrate_system_env(config):
|
||||
def migrate_system_env(coresys):
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
@@ -102,8 +156,7 @@ def initialize_logging():
|
||||
def check_environment():
|
||||
"""Check if all environment are exists."""
|
||||
# check environment variables
|
||||
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
||||
'HOMEASSISTANT_REPOSITORY'):
|
||||
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
|
||||
try:
|
||||
os.environ[key]
|
||||
except KeyError:
|
||||
@@ -117,7 +170,12 @@ def check_environment():
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which('socat'):
|
||||
_LOGGER.fatal("Can0t find socat program!")
|
||||
_LOGGER.fatal("Can't find socat program!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which('gdbus'):
|
||||
_LOGGER.fatal("Can't find gdbus program!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
109
hassio/config.py
109
hassio/config.py
@@ -5,10 +5,10 @@ import os
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
||||
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT)
|
||||
from .tools import JsonConfig, parse_datetime
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -25,6 +25,7 @@ ADDONS_DATA = PurePath("addons/data")
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
APPARMOR_DATA = PurePath("apparmor")
|
||||
|
||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||
|
||||
@@ -35,7 +36,6 @@ class CoreConfig(JsonConfig):
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||
self.arch = None
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
@@ -46,7 +46,16 @@ class CoreConfig(JsonConfig):
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
"""Return wait time for auto boot stages."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
@@ -62,7 +71,6 @@ class CoreConfig(JsonConfig):
|
||||
def last_boot(self, value):
|
||||
"""Set last boot datetime."""
|
||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def path_hassio(self):
|
||||
@@ -129,6 +137,11 @@ class CoreConfig(JsonConfig):
|
||||
"""Return hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_tmp(self):
|
||||
"""Return hass.io temp folder for docker."""
|
||||
return PurePath(self.path_extern_hassio, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
@@ -144,6 +157,11 @@ class CoreConfig(JsonConfig):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return root apparmor profile folder."""
|
||||
return Path(HASSIO_DATA, APPARMOR_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder extern for docker."""
|
||||
@@ -160,7 +178,6 @@ class CoreConfig(JsonConfig):
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||
self.save()
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
@@ -168,79 +185,3 @@ class CoreConfig(JsonConfig):
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_initialize(self):
|
||||
"""Return is security was initialize."""
|
||||
return self._data[ATTR_SECURITY]
|
||||
|
||||
@security_initialize.setter
|
||||
def security_initialize(self, value):
|
||||
"""Set is security initialize."""
|
||||
self._data[ATTR_SECURITY] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_totp(self):
|
||||
"""Return the TOTP key."""
|
||||
return self._data.get(ATTR_TOTP)
|
||||
|
||||
@security_totp.setter
|
||||
def security_totp(self, value):
|
||||
"""Set the TOTP key."""
|
||||
self._data[ATTR_TOTP] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_password(self):
|
||||
"""Return the password key."""
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@security_password.setter
|
||||
def security_password(self, value):
|
||||
"""Set the password key."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_sessions(self):
|
||||
"""Return api sessions."""
|
||||
return {
|
||||
session: parse_datetime(until) for
|
||||
session, until in self._data[ATTR_SESSIONS].items()
|
||||
}
|
||||
|
||||
def add_security_session(self, session, valid):
|
||||
"""Set the a new session."""
|
||||
self._data[ATTR_SESSIONS].update(
|
||||
{session: valid.isoformat()}
|
||||
)
|
||||
self.save()
|
||||
|
||||
def drop_security_session(self, session):
|
||||
"""Delete the a session."""
|
||||
self._data[ATTR_SESSIONS].pop(session, None)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA audio output card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_OUTPUT)
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set ALSA audio output card,dev."""
|
||||
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA audio input card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_INPUT)
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set ALSA audio input card,dev."""
|
||||
self._data[ATTR_AUDIO_INPUT] = value
|
||||
self.save()
|
||||
|
102
hassio/const.py
102
hassio/const.py
@@ -2,31 +2,27 @@
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = '0.73'
|
||||
HASSIO_VERSION = '124'
|
||||
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/{}/version.json')
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
URL_HASSIO_VERSION = \
|
||||
"https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
URL_HASSIO_APPARMOR = \
|
||||
"https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
||||
|
||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||
URL_HASSOS_OTA = (
|
||||
"https://github.com/home-assistant/hassos/releases/download/"
|
||||
"{version}/hassos_{board}-{version}.raucb")
|
||||
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||
RUN_CLEANUP_API_SESSIONS = 900
|
||||
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||
|
||||
DOCKER_NETWORK = 'hassio'
|
||||
DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23')
|
||||
@@ -35,6 +31,7 @@ DOCKER_NETWORK_RANGE = ip_network('172.30.33.0/24')
|
||||
LABEL_VERSION = 'io.hass.version'
|
||||
LABEL_ARCH = 'io.hass.arch'
|
||||
LABEL_TYPE = 'io.hass.type'
|
||||
LABEL_MACHINE = 'io.hass.machine'
|
||||
|
||||
META_ADDON = 'addon'
|
||||
META_SUPERVISOR = 'supervisor'
|
||||
@@ -50,23 +47,39 @@ RESULT_OK = 'ok'
|
||||
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||
CONTENT_TYPE_PNG = 'image/png'
|
||||
CONTENT_TYPE_JSON = 'application/json'
|
||||
CONTENT_TYPE_TEXT = 'text/plain'
|
||||
CONTENT_TYPE_TAR = 'application/tar'
|
||||
HEADER_HA_ACCESS = 'x-ha-access'
|
||||
HEADER_TOKEN = 'x-hassio-key'
|
||||
|
||||
ENV_TOKEN = 'HASSIO_TOKEN'
|
||||
ENV_TIME = 'TZ'
|
||||
|
||||
REQUEST_FROM = 'HASSIO_FROM'
|
||||
|
||||
ATTR_MACHINE = 'machine'
|
||||
ATTR_WAIT_BOOT = 'wait_boot'
|
||||
ATTR_DEPLOYMENT = 'deployment'
|
||||
ATTR_WATCHDOG = 'watchdog'
|
||||
ATTR_CHANGELOG = 'changelog'
|
||||
ATTR_DATE = 'date'
|
||||
ATTR_ARCH = 'arch'
|
||||
ATTR_LONG_DESCRIPTION = 'long_description'
|
||||
ATTR_HOSTNAME = 'hostname'
|
||||
ATTR_TIMEZONE = 'timezone'
|
||||
ATTR_ARGS = 'args'
|
||||
ATTR_OS = 'os'
|
||||
ATTR_OPERATING_SYSTEM = 'operating_system'
|
||||
ATTR_CHASSIS = 'chassis'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_SOURCE = 'source'
|
||||
ATTR_FEATURES = 'features'
|
||||
ATTR_ADDONS = 'addons'
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_VERSION_LATEST = 'version_latest'
|
||||
ATTR_AUTO_UART = 'auto_uart'
|
||||
ATTR_LAST_BOOT = 'last_boot'
|
||||
ATTR_LAST_VERSION = 'last_version'
|
||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||
ATTR_CHANNEL = 'channel'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_SLUG = 'slug'
|
||||
ATTR_DESCRIPTON = 'description'
|
||||
@@ -83,6 +96,7 @@ ATTR_DETACHED = 'detached'
|
||||
ATTR_STATE = 'state'
|
||||
ATTR_SCHEMA = 'schema'
|
||||
ATTR_IMAGE = 'image'
|
||||
ATTR_ICON = 'icon'
|
||||
ATTR_LOGO = 'logo'
|
||||
ATTR_STDIN = 'stdin'
|
||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||
@@ -100,6 +114,8 @@ ATTR_BUILD = 'build'
|
||||
ATTR_DEVICES = 'devices'
|
||||
ATTR_ENVIRONMENT = 'environment'
|
||||
ATTR_HOST_NETWORK = 'host_network'
|
||||
ATTR_HOST_IPC = 'host_ipc'
|
||||
ATTR_HOST_DBUS = 'host_dbus'
|
||||
ATTR_NETWORK = 'network'
|
||||
ATTR_TMPFS = 'tmpfs'
|
||||
ATTR_PRIVILEGED = 'privileged'
|
||||
@@ -110,6 +126,7 @@ ATTR_HOMEASSISTANT = 'homeassistant'
|
||||
ATTR_HASSIO = 'hassio'
|
||||
ATTR_HASSIO_API = 'hassio_api'
|
||||
ATTR_HOMEASSISTANT_API = 'homeassistant_api'
|
||||
ATTR_UUID = 'uuid'
|
||||
ATTR_FOLDERS = 'folders'
|
||||
ATTR_SIZE = 'size'
|
||||
ATTR_TYPE = 'type'
|
||||
@@ -127,8 +144,43 @@ ATTR_SECURITY = 'security'
|
||||
ATTR_BUILD_FROM = 'build_from'
|
||||
ATTR_SQUASH = 'squash'
|
||||
ATTR_GPIO = 'gpio'
|
||||
ATTR_LEGACY = 'ATTR_LEGACY'
|
||||
ATTR_LEGACY = 'legacy'
|
||||
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
ATTR_CPU_PERCENT = 'cpu_percent'
|
||||
ATTR_NETWORK_RX = 'network_rx'
|
||||
ATTR_NETWORK_TX = 'network_tx'
|
||||
ATTR_MEMORY_LIMIT = 'memory_limit'
|
||||
ATTR_MEMORY_USAGE = 'memory_usage'
|
||||
ATTR_BLK_READ = 'blk_read'
|
||||
ATTR_BLK_WRITE = 'blk_write'
|
||||
ATTR_PROVIDER = 'provider'
|
||||
ATTR_AVAILABLE = 'available'
|
||||
ATTR_HOST = 'host'
|
||||
ATTR_USERNAME = 'username'
|
||||
ATTR_PROTOCOL = 'protocol'
|
||||
ATTR_DISCOVERY = 'discovery'
|
||||
ATTR_PLATFORM = 'platform'
|
||||
ATTR_COMPONENT = 'component'
|
||||
ATTR_CONFIG = 'config'
|
||||
ATTR_DISCOVERY_ID = 'discovery_id'
|
||||
ATTR_SERVICES = 'services'
|
||||
ATTR_DISCOVERY = 'discovery'
|
||||
ATTR_PROTECTED = 'protected'
|
||||
ATTR_CRYPTO = 'crypto'
|
||||
ATTR_BRANCH = 'branch'
|
||||
ATTR_KERNEL = 'kernel'
|
||||
ATTR_APPARMOR = 'apparmor'
|
||||
ATTR_DEVICETREE = 'devicetree'
|
||||
ATTR_CPE = 'cpe'
|
||||
ATTR_BOARD = 'board'
|
||||
ATTR_HASSOS = 'hassos'
|
||||
ATTR_HASSOS_CLI = 'hassos_cli'
|
||||
ATTR_VERSION_CLI = 'version_cli'
|
||||
ATTR_VERSION_CLI_LATEST = 'version_cli_latest'
|
||||
ATTR_REFRESH_TOKEN = 'refresh_token'
|
||||
ATTR_DOCKER_API = 'docker_api'
|
||||
|
||||
SERVICE_MQTT = 'mqtt'
|
||||
|
||||
STARTUP_INITIALIZE = 'initialize'
|
||||
STARTUP_SYSTEM = 'system'
|
||||
@@ -154,6 +206,10 @@ ARCH_AARCH64 = 'aarch64'
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_I386 = 'i386'
|
||||
|
||||
CHANNEL_STABLE = 'stable'
|
||||
CHANNEL_BETA = 'beta'
|
||||
CHANNEL_DEV = 'dev'
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
|
||||
@@ -164,3 +220,15 @@ FOLDER_SSL = 'ssl'
|
||||
|
||||
SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
|
||||
CRYPTO_AES128 = 'aes128'
|
||||
|
||||
SECURITY_PROFILE = 'profile'
|
||||
SECURITY_DEFAULT = 'default'
|
||||
SECURITY_DISABLE = 'disable'
|
||||
|
||||
FEATURES_SHUTDOWN = 'shutdown'
|
||||
FEATURES_REBOOT = 'reboot'
|
||||
FEATURES_HASSOS = 'hassos'
|
||||
FEATURES_HOSTNAME = 'hostname'
|
||||
FEATURES_SERVICES = 'services'
|
||||
|
216
hassio/core.py
216
hassio/core.py
@@ -1,190 +1,140 @@
|
||||
"""Main file for HassIO."""
|
||||
from contextlib import suppress
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .host_control import HostControl
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT_DOCKER,
|
||||
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
||||
RUN_UPDATE_ADDONS_TASKS)
|
||||
from .hardware import Hardware
|
||||
from .homeassistant import HomeAssistant
|
||||
from .scheduler import Scheduler
|
||||
from .dock import DockerAPI
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .dns import DNSForward
|
||||
from .snapshots import SnapshotsManager
|
||||
from .updater import Updater
|
||||
from .tasks import (
|
||||
hassio_update, homeassistant_watchdog_docker, api_sessions_cleanup,
|
||||
addons_update)
|
||||
from .tools import fetch_timezone
|
||||
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
|
||||
from .exceptions import HassioError, HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassIO(object):
|
||||
class HassIO(CoreSysAttributes):
|
||||
"""Main object of hassio."""
|
||||
|
||||
def __init__(self, loop, config):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize hassio object."""
|
||||
self.exit_code = 0
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.websession = aiohttp.ClientSession(loop=loop)
|
||||
self.updater = Updater(config, loop, self.websession)
|
||||
self.scheduler = Scheduler(loop)
|
||||
self.api = RestAPI(config, loop)
|
||||
self.hardware = Hardware()
|
||||
self.docker = DockerAPI()
|
||||
self.dns = DNSForward()
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(
|
||||
config, loop, self.docker, self.stop)
|
||||
|
||||
# init homeassistant
|
||||
self.homeassistant = HomeAssistant(
|
||||
config, loop, self.docker, self.updater)
|
||||
|
||||
# init HostControl
|
||||
self.host_control = HostControl(loop)
|
||||
|
||||
# init addon system
|
||||
self.addons = AddonManager(config, loop, self.docker)
|
||||
|
||||
# init snapshot system
|
||||
self.snapshots = SnapshotsManager(
|
||||
config, loop, self.scheduler, self.addons, self.homeassistant)
|
||||
self.coresys = coresys
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
if not await self.supervisor.attach():
|
||||
_LOGGER.fatal("Can't setup supervisor docker container!")
|
||||
await self.supervisor.cleanup()
|
||||
# Load Supervisor
|
||||
await self.sys_supervisor.load()
|
||||
|
||||
# set running arch
|
||||
self.config.arch = self.supervisor.arch
|
||||
# Load DBus
|
||||
await self.sys_dbus.load()
|
||||
|
||||
# update timezone
|
||||
if self.config.timezone == 'UTC':
|
||||
self.config.timezone = await fetch_timezone(self.websession)
|
||||
# Load Host
|
||||
await self.sys_host.load()
|
||||
|
||||
# hostcontrol
|
||||
await self.host_control.load()
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
||||
# Load Home Assistant
|
||||
await self.sys_homeassistant.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
# rest api views
|
||||
self.api.register_host(self.host_control, self.hardware)
|
||||
self.api.register_network(self.host_control)
|
||||
self.api.register_supervisor(
|
||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||
self.updater)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
self.api.register_security()
|
||||
self.api.register_snapshots(self.snapshots)
|
||||
self.api.register_panel()
|
||||
await self.sys_api.load()
|
||||
|
||||
# schedule api session cleanup
|
||||
self.scheduler.register_task(
|
||||
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
|
||||
now=True)
|
||||
# load last available data
|
||||
await self.sys_updater.load()
|
||||
|
||||
# Load homeassistant
|
||||
await self.homeassistant.prepare()
|
||||
# load last available data
|
||||
await self.sys_snapshots.load()
|
||||
|
||||
# Load addons
|
||||
await self.addons.prepare()
|
||||
|
||||
# schedule addon update task
|
||||
self.scheduler.register_task(
|
||||
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
|
||||
self.scheduler.register_task(
|
||||
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
hassio_update(self.supervisor, self.updater),
|
||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
|
||||
# schedule snapshot update tasks
|
||||
self.scheduler.register_task(
|
||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
||||
# load services
|
||||
await self.sys_services.load()
|
||||
|
||||
# start dns forwarding
|
||||
self.loop.create_task(self.dns.start())
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
||||
self.sys_create_task(self.sys_dns.start())
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
await asyncio.wait(
|
||||
[hassio_update(self.supervisor, self.updater)()],
|
||||
loop=self.loop
|
||||
)
|
||||
# on dev mode, only read new versions
|
||||
if not self.sys_dev and self.sys_supervisor.need_update:
|
||||
if await self.sys_supervisor.update():
|
||||
return
|
||||
else:
|
||||
_LOGGER.info("Ignore Hass.io auto updates on dev channel")
|
||||
|
||||
# start api
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.docker.network.supervisor)
|
||||
await self.sys_api.start()
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if self.hardware.last_boot == self.config.last_boot:
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
||||
_LOGGER.info("Hass.io reboot detected")
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self.sys_services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self.addons.auto_boot(STARTUP_SERVICES)
|
||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
if self.homeassistant.boot:
|
||||
await self.homeassistant.run()
|
||||
if self.sys_homeassistant.boot:
|
||||
with suppress(HomeAssistantError):
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
# start addon mark as application
|
||||
await self.addons.auto_boot(STARTUP_APPLICATION)
|
||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self.config.last_boot = self.hardware.last_boot
|
||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
||||
self.sys_config.save_data()
|
||||
|
||||
finally:
|
||||
# schedule homeassistant watchdog
|
||||
self.scheduler.register_task(
|
||||
homeassistant_watchdog_docker(self.loop, self.homeassistant),
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER)
|
||||
|
||||
# self.scheduler.register_task(
|
||||
# homeassistant_watchdog_api(self.loop, self.homeassistant),
|
||||
# RUN_WATCHDOG_HOMEASSISTANT_API)
|
||||
# Add core tasks into scheduler
|
||||
await self.sys_tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.homeassistant.version == 'landingpage':
|
||||
self.loop.create_task(self.homeassistant.install())
|
||||
if self.sys_homeassistant.version == 'landingpage':
|
||||
self.sys_create_task(self.sys_homeassistant.install())
|
||||
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.suspend = True
|
||||
|
||||
# process stop tasks
|
||||
self.websession.close()
|
||||
self.homeassistant.websession.close()
|
||||
self.sys_scheduler.suspend = True
|
||||
|
||||
# process async stop tasks
|
||||
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await asyncio.wait([
|
||||
self.sys_api.stop(),
|
||||
self.sys_dns.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close()
|
||||
])
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Force Shutdown!")
|
||||
|
||||
_LOGGER.info("Hass.io is down")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown all running containers in correct order."""
|
||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
||||
|
||||
# Close Home Assistant
|
||||
with suppress(HassioError):
|
||||
await self.sys_homeassistant.stop()
|
||||
|
||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
||||
|
283
hassio/coresys.py
Normal file
283
hassio/coresys.py
Normal file
@@ -0,0 +1,283 @@
|
||||
"""Handle core shared data."""
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .const import CHANNEL_DEV
|
||||
from .config import CoreConfig
|
||||
from .docker import DockerAPI
|
||||
from .misc.dns import DNSForward
|
||||
from .misc.hardware import Hardware
|
||||
from .misc.scheduler import Scheduler
|
||||
|
||||
|
||||
class CoreSys:
|
||||
"""Class that handle all shared data."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize coresys."""
|
||||
# Static attributes
|
||||
self.exit_code = 0
|
||||
self.machine_id = None
|
||||
|
||||
# External objects
|
||||
self._loop = loop
|
||||
self._websession = aiohttp.ClientSession(loop=loop)
|
||||
self._websession_ssl = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
|
||||
|
||||
# Global objects
|
||||
self._config = CoreConfig()
|
||||
self._hardware = Hardware()
|
||||
self._docker = DockerAPI()
|
||||
self._scheduler = Scheduler(loop=loop)
|
||||
self._dns = DNSForward(loop=loop)
|
||||
|
||||
# Internal objects pointers
|
||||
self._core = None
|
||||
self._homeassistant = None
|
||||
self._supervisor = None
|
||||
self._addons = None
|
||||
self._api = None
|
||||
self._updater = None
|
||||
self._snapshots = None
|
||||
self._tasks = None
|
||||
self._host = None
|
||||
self._dbus = None
|
||||
self._hassos = None
|
||||
self._services = None
|
||||
self._discovery = None
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return running arch of hass.io system."""
|
||||
if self._supervisor:
|
||||
return self._supervisor.arch
|
||||
return None
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return running machine type of hass.io system."""
|
||||
if self._homeassistant:
|
||||
return self._homeassistant.machine
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
"""Return True if we run dev modus."""
|
||||
return self._updater.channel == CHANNEL_DEV
|
||||
|
||||
@property
|
||||
def loop(self):
|
||||
"""Return loop object."""
|
||||
return self._loop
|
||||
|
||||
@property
|
||||
def websession(self):
|
||||
"""Return websession object."""
|
||||
return self._websession
|
||||
|
||||
@property
|
||||
def websession_ssl(self):
|
||||
"""Return websession object with disabled SSL."""
|
||||
return self._websession_ssl
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
"""Return CoreConfig object."""
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def hardware(self):
|
||||
"""Return Hardware object."""
|
||||
return self._hardware
|
||||
|
||||
@property
|
||||
def docker(self):
|
||||
"""Return DockerAPI object."""
|
||||
return self._docker
|
||||
|
||||
@property
|
||||
def scheduler(self):
|
||||
"""Return Scheduler object."""
|
||||
return self._scheduler
|
||||
|
||||
@property
|
||||
def dns(self):
|
||||
"""Return DNSForward object."""
|
||||
return self._dns
|
||||
|
||||
@property
|
||||
def core(self):
|
||||
"""Return HassIO object."""
|
||||
return self._core
|
||||
|
||||
@core.setter
|
||||
def core(self, value):
|
||||
"""Set a HassIO object."""
|
||||
if self._core:
|
||||
raise RuntimeError("HassIO already set!")
|
||||
self._core = value
|
||||
|
||||
@property
|
||||
def homeassistant(self):
|
||||
"""Return HomeAssistant object."""
|
||||
return self._homeassistant
|
||||
|
||||
@homeassistant.setter
|
||||
def homeassistant(self, value):
|
||||
"""Set a HomeAssistant object."""
|
||||
if self._homeassistant:
|
||||
raise RuntimeError("HomeAssistant already set!")
|
||||
self._homeassistant = value
|
||||
|
||||
@property
|
||||
def supervisor(self):
|
||||
"""Return Supervisor object."""
|
||||
return self._supervisor
|
||||
|
||||
@supervisor.setter
|
||||
def supervisor(self, value):
|
||||
"""Set a Supervisor object."""
|
||||
if self._supervisor:
|
||||
raise RuntimeError("Supervisor already set!")
|
||||
self._supervisor = value
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
"""Return API object."""
|
||||
return self._api
|
||||
|
||||
@api.setter
|
||||
def api(self, value):
|
||||
"""Set an API object."""
|
||||
if self._api:
|
||||
raise RuntimeError("API already set!")
|
||||
self._api = value
|
||||
|
||||
@property
|
||||
def updater(self):
|
||||
"""Return Updater object."""
|
||||
return self._updater
|
||||
|
||||
@updater.setter
|
||||
def updater(self, value):
|
||||
"""Set a Updater object."""
|
||||
if self._updater:
|
||||
raise RuntimeError("Updater already set!")
|
||||
self._updater = value
|
||||
|
||||
@property
|
||||
def addons(self):
|
||||
"""Return AddonManager object."""
|
||||
return self._addons
|
||||
|
||||
@addons.setter
|
||||
def addons(self, value):
|
||||
"""Set a AddonManager object."""
|
||||
if self._addons:
|
||||
raise RuntimeError("AddonManager already set!")
|
||||
self._addons = value
|
||||
|
||||
@property
|
||||
def snapshots(self):
|
||||
"""Return SnapshotManager object."""
|
||||
return self._snapshots
|
||||
|
||||
@snapshots.setter
|
||||
def snapshots(self, value):
|
||||
"""Set a SnapshotManager object."""
|
||||
if self._snapshots:
|
||||
raise RuntimeError("SnapshotsManager already set!")
|
||||
self._snapshots = value
|
||||
|
||||
@property
|
||||
def tasks(self):
|
||||
"""Return Tasks object."""
|
||||
return self._tasks
|
||||
|
||||
@tasks.setter
|
||||
def tasks(self, value):
|
||||
"""Set a Tasks object."""
|
||||
if self._tasks:
|
||||
raise RuntimeError("Tasks already set!")
|
||||
self._tasks = value
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
"""Return ServiceManager object."""
|
||||
return self._services
|
||||
|
||||
@services.setter
|
||||
def services(self, value):
|
||||
"""Set a ServiceManager object."""
|
||||
if self._services:
|
||||
raise RuntimeError("Services already set!")
|
||||
self._services = value
|
||||
|
||||
@property
|
||||
def discovery(self):
|
||||
"""Return ServiceManager object."""
|
||||
return self._discovery
|
||||
|
||||
@discovery.setter
|
||||
def discovery(self, value):
|
||||
"""Set a Discovery object."""
|
||||
if self._discovery:
|
||||
raise RuntimeError("Discovery already set!")
|
||||
self._discovery = value
|
||||
|
||||
@property
|
||||
def dbus(self):
|
||||
"""Return DBusManager object."""
|
||||
return self._dbus
|
||||
|
||||
@dbus.setter
|
||||
def dbus(self, value):
|
||||
"""Set a DBusManager object."""
|
||||
if self._dbus:
|
||||
raise RuntimeError("DBusManager already set!")
|
||||
self._dbus = value
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
"""Return HostManager object."""
|
||||
return self._host
|
||||
|
||||
@host.setter
|
||||
def host(self, value):
|
||||
"""Set a HostManager object."""
|
||||
if self._host:
|
||||
raise RuntimeError("HostManager already set!")
|
||||
self._host = value
|
||||
|
||||
@property
|
||||
def hassos(self):
|
||||
"""Return HassOS object."""
|
||||
return self._hassos
|
||||
|
||||
@hassos.setter
|
||||
def hassos(self, value):
|
||||
"""Set a HassOS object."""
|
||||
if self._hassos:
|
||||
raise RuntimeError("HassOS already set!")
|
||||
self._hassos = value
|
||||
|
||||
def run_in_executor(self, funct, *args):
|
||||
"""Wrapper for executor pool."""
|
||||
return self._loop.run_in_executor(None, funct, *args)
|
||||
|
||||
def create_task(self, coroutine):
|
||||
"""Wrapper for async task."""
|
||||
return self._loop.create_task(coroutine)
|
||||
|
||||
|
||||
class CoreSysAttributes:
|
||||
"""Inheret basic CoreSysAttributes."""
|
||||
|
||||
coresys = None
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Mapping to coresys."""
|
||||
if name.startswith("sys_") and hasattr(self.coresys, name[4:]):
|
||||
return getattr(self.coresys, name[4:])
|
||||
raise AttributeError(f"Can't resolve {name} on {self}")
|
39
hassio/dbus/__init__.py
Normal file
39
hassio/dbus/__init__.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""DBus interface objects."""
|
||||
|
||||
from .systemd import Systemd
|
||||
from .hostname import Hostname
|
||||
from .rauc import Rauc
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class DBusManager(CoreSysAttributes):
|
||||
"""DBus Interface handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize DBus Interface."""
|
||||
self.coresys = coresys
|
||||
|
||||
self._systemd = Systemd()
|
||||
self._hostname = Hostname()
|
||||
self._rauc = Rauc()
|
||||
|
||||
@property
|
||||
def systemd(self):
|
||||
"""Return Systemd Interface."""
|
||||
return self._systemd
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return hostname Interface."""
|
||||
return self._hostname
|
||||
|
||||
@property
|
||||
def rauc(self):
|
||||
"""Return rauc Interface."""
|
||||
return self._rauc
|
||||
|
||||
async def load(self):
|
||||
"""Connect interfaces to dbus."""
|
||||
await self.systemd.connect()
|
||||
await self.hostname.connect()
|
||||
await self.rauc.connect()
|
39
hassio/dbus/hostname.py
Normal file
39
hassio/dbus/hostname.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""DBus interface for hostname."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'org.freedesktop.hostname1'
|
||||
DBUS_OBJECT = '/org/freedesktop/hostname1'
|
||||
|
||||
|
||||
class Hostname(DBusInterface):
|
||||
"""Handle DBus interface for hostname/system."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect do bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to hostname")
|
||||
|
||||
@dbus_connected
|
||||
def set_static_hostname(self, hostname):
|
||||
"""Change local hostname.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.SetStaticHostname(hostname, False)
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return local host informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(DBUS_NAME)
|
18
hassio/dbus/interface.py
Normal file
18
hassio/dbus/interface.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Interface class for dbus wrappers."""
|
||||
|
||||
|
||||
class DBusInterface:
|
||||
"""Handle DBus interface for hostname/system."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize systemd."""
|
||||
self.dbus = None
|
||||
|
||||
@property
|
||||
def is_connected(self):
|
||||
"""Return True, if they is connected to dbus."""
|
||||
return self.dbus is not None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect do bus."""
|
||||
raise NotImplementedError()
|
55
hassio/dbus/rauc.py
Normal file
55
hassio/dbus/rauc.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""DBus interface for rauc."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'de.pengutronix.rauc'
|
||||
DBUS_OBJECT = '/'
|
||||
|
||||
|
||||
class Rauc(DBusInterface):
|
||||
"""Handle DBus interface for rauc."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect do bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to rauc")
|
||||
|
||||
@dbus_connected
|
||||
def install(self, raucb_file):
|
||||
"""Install rauc bundle file.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.Install(raucb_file)
|
||||
|
||||
@dbus_connected
|
||||
def get_slot_status(self):
|
||||
"""Get slot status.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.GetSlotStatus()
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return rauc informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
||||
|
||||
@dbus_connected
|
||||
def signal_completed(self):
|
||||
"""Return a signal wrapper for completed signal.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
79
hassio/dbus/systemd.py
Normal file
79
hassio/dbus/systemd.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""Interface to Systemd over dbus."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'org.freedesktop.systemd1'
|
||||
DBUS_OBJECT = '/org/freedesktop/systemd1'
|
||||
|
||||
|
||||
class Systemd(DBusInterface):
|
||||
"""Systemd function handler."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect do bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to systemd")
|
||||
|
||||
@dbus_connected
|
||||
def reboot(self):
|
||||
"""Reboot host computer.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.Reboot()
|
||||
|
||||
@dbus_connected
|
||||
def power_off(self):
|
||||
"""Power off host computer.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.PowerOff()
|
||||
|
||||
@dbus_connected
|
||||
def start_unit(self, unit, mode):
|
||||
"""Start a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.StartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def stop_unit(self, unit, mode):
|
||||
"""Stop a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.StopUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def reload_unit(self, unit, mode):
|
||||
"""Reload a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.ReloadOrRestartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def restart_unit(self, unit, mode):
|
||||
"""Restart a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.RestartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def list_units(self):
|
||||
"""Return a list of available systemd services.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.ListUnits()
|
14
hassio/dbus/utils.py
Normal file
14
hassio/dbus/utils.py
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Utils for dbus."""
|
||||
|
||||
from ..exceptions import DBusNotConnectedError
|
||||
|
||||
|
||||
def dbus_connected(method):
|
||||
"""Wrapper for check if dbus is connected."""
|
||||
def wrap_dbus(api, *args, **kwargs):
|
||||
"""Check if dbus is connected before call a method."""
|
||||
if api.dbus is None:
|
||||
raise DBusNotConnectedError()
|
||||
return method(api, *args, **kwargs)
|
||||
|
||||
return wrap_dbus
|
@@ -1,77 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from .util import docker_process
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerInterface):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, api, stop_callback, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config, loop, api, image=image)
|
||||
self.stop_callback = stop_callback
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self.process_metadata(container.attrs)
|
||||
_LOGGER.info("Attach to supervisor %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
# if already attach
|
||||
if container in self.docker.network.containers:
|
||||
return True
|
||||
|
||||
# attach to network
|
||||
return self.docker.network.attach_container(
|
||||
container, alias=['hassio'], ipv4=self.docker.network.supervisor)
|
||||
|
||||
@docker_process
|
||||
async def update(self, tag):
|
||||
"""Update a supervisor docker image."""
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.loop.call_later(1, self.loop.stop)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def remove(self):
|
||||
"""Remove docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def restart(self):
|
||||
"""Restart docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
@@ -1,20 +0,0 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def docker_process(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api._lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api._lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
@@ -2,6 +2,7 @@
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import attr
|
||||
import docker
|
||||
|
||||
from .network import DockerNetwork
|
||||
@@ -9,8 +10,11 @@ from ..const import SOCKET_DOCKER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
CommandReturn = attr.make_class('CommandReturn', ['exit_code', 'output'])
|
||||
|
||||
class DockerAPI(object):
|
||||
|
||||
class DockerAPI:
|
||||
"""Docker hassio wrapper.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
@@ -19,7 +23,8 @@ class DockerAPI(object):
|
||||
def __init__(self):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.docker = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
|
||||
version='auto', timeout=900)
|
||||
self.network = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
@@ -47,8 +52,10 @@ class DockerAPI(object):
|
||||
hostname = kwargs.get('hostname')
|
||||
|
||||
# setup network
|
||||
kwargs['dns_search'] = ["."]
|
||||
if network_mode:
|
||||
kwargs['dns'] = [str(self.network.supervisor)]
|
||||
kwargs['dns_opt'] = ["ndots:0"]
|
||||
else:
|
||||
kwargs['network'] = None
|
||||
|
||||
@@ -56,7 +63,7 @@ class DockerAPI(object):
|
||||
try:
|
||||
container = self.docker.containers.create(image, **kwargs)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s -> %s", name, err)
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
return False
|
||||
|
||||
# attach network
|
||||
@@ -71,7 +78,7 @@ class DockerAPI(object):
|
||||
try:
|
||||
container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s -> %s", name, err)
|
||||
_LOGGER.error("Can't start %s: %s", name, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -94,15 +101,15 @@ class DockerAPI(object):
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
exit_code = container.wait()
|
||||
result = container.wait()
|
||||
output = container.logs(stdout=stdout, stderr=stderr)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command -> %s", err)
|
||||
return (None, b"")
|
||||
_LOGGER.error("Can't execute command: %s", err)
|
||||
return CommandReturn(None, b"")
|
||||
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return (exit_code, output)
|
||||
return CommandReturn(result.get('StatusCode'), output)
|
@@ -1,15 +1,17 @@
|
||||
"""Init file for HassIO addon docker object."""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from .util import docker_process
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (
|
||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN,
|
||||
ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,32 +21,52 @@ AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
class DockerAddon(DockerInterface):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, api, addon):
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(
|
||||
config, loop, api, image=addon.image, timeout=addon.timeout)
|
||||
self.addon = addon
|
||||
super().__init__(coresys)
|
||||
self._id = slug
|
||||
|
||||
def process_metadata(self, metadata, force=False):
|
||||
"""Use addon data instead meta data with legacy."""
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return addon of docker image."""
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of docker image."""
|
||||
return self.addon.image
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout for docker actions."""
|
||||
return self.addon.timeout
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of docker image."""
|
||||
if not self.addon.legacy:
|
||||
return super().process_metadata(metadata, force=force)
|
||||
return super().version
|
||||
return self.addon.version_installed
|
||||
|
||||
# set meta data
|
||||
if not self.version or force:
|
||||
if force: # called on install/update/build
|
||||
self.version = self.addon.last_version
|
||||
else:
|
||||
self.version = self.addon.version_installed
|
||||
|
||||
if not self.arch:
|
||||
self.arch = self.config.arch
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of docker image."""
|
||||
if not self.addon.legacy:
|
||||
return super().arch
|
||||
return self.sys_arch
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def ipc(self):
|
||||
"""Return the IPC namespace."""
|
||||
if self.addon.host_ipc:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of addon."""
|
||||
@@ -54,6 +76,8 @@ class DockerAddon(DockerInterface):
|
||||
def environment(self):
|
||||
"""Return environment for docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
# Need audio settings
|
||||
if self.addon.with_audio:
|
||||
addon_env.update({
|
||||
'ALSA_OUTPUT': self.addon.audio_output,
|
||||
@@ -62,7 +86,8 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
'TZ': self.config.timezone,
|
||||
ENV_TIME: self.sys_config.timezone,
|
||||
ENV_TOKEN: self.addon.uuid,
|
||||
}
|
||||
|
||||
@property
|
||||
@@ -70,14 +95,17 @@ class DockerAddon(DockerInterface):
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# use audio devices
|
||||
# Use audio devices
|
||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Auto mapping UART devices
|
||||
if self.addon.auto_uart:
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
|
||||
# Return None if no devices is present
|
||||
if devices:
|
||||
return devices
|
||||
return None
|
||||
return devices or None
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
@@ -91,20 +119,38 @@ class DockerAddon(DockerInterface):
|
||||
if host_port
|
||||
}
|
||||
|
||||
@property
|
||||
def security_opt(self):
|
||||
"""Controlling security opt."""
|
||||
security = []
|
||||
|
||||
# AppArmor
|
||||
apparmor = self.sys_host.apparmor.available
|
||||
if not apparmor or self.addon.apparmor == SECURITY_DISABLE:
|
||||
security.append("apparmor:unconfined")
|
||||
elif self.addon.apparmor == SECURITY_PROFILE:
|
||||
security.append(f"apparmor={self.addon.slug}")
|
||||
|
||||
# Disable Seccomp / We don't support it official and it
|
||||
# make troubles on some kind of host systems.
|
||||
security.append("seccomp=unconfined")
|
||||
|
||||
return security
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": "{}".format(options)}
|
||||
return {"/tmpfs": f"{options}"}
|
||||
return None
|
||||
|
||||
@property
|
||||
def network_mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
return {
|
||||
'homeassistant': self.docker.network.gateway,
|
||||
'hassio': self.docker.network.supervisor,
|
||||
'homeassistant': self.sys_docker.network.gateway,
|
||||
'hassio': self.sys_docker.network.supervisor,
|
||||
}
|
||||
|
||||
@property
|
||||
@@ -119,7 +165,7 @@ class DockerAddon(DockerInterface):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
'bind': "/data", 'mode': 'rw'
|
||||
}}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
@@ -127,45 +173,77 @@ class DockerAddon(DockerInterface):
|
||||
# setup config mappings
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_config): {
|
||||
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
|
||||
str(self.sys_config.path_extern_config): {
|
||||
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_ssl): {
|
||||
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_addons_local): {
|
||||
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_backup): {
|
||||
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_share): {
|
||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
||||
str(self.sys_config.path_extern_share): {
|
||||
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
|
||||
# init other hardware mappings
|
||||
# Init other hardware mappings
|
||||
|
||||
# GPIO support
|
||||
if self.addon.with_gpio:
|
||||
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
|
||||
if not Path(gpio_path).exists():
|
||||
continue
|
||||
volumes.update({
|
||||
'/sys/class/gpio': {
|
||||
'bind': '/sys/class/gpio', 'mode': "rw"
|
||||
},
|
||||
'/sys/devices/platform/soc': {
|
||||
'bind': '/sys/devices/platform/soc', 'mode': "rw"
|
||||
gpio_path: {
|
||||
'bind': gpio_path, 'mode': 'rw'
|
||||
},
|
||||
})
|
||||
|
||||
# DeviceTree support
|
||||
if self.addon.with_devicetree:
|
||||
volumes.update({
|
||||
"/sys/firmware/devicetree/base": {
|
||||
'bind': "/device-tree", 'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Docker API support
|
||||
if self.addon.with_docker_api:
|
||||
volumes.update({
|
||||
"/var/run/docker.sock": {
|
||||
'bind': "/var/run/docker.sock", 'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Host dbus system
|
||||
if self.addon.host_dbus:
|
||||
volumes.update({
|
||||
"/var/run/dbus": {
|
||||
'bind': "/var/run/dbus", 'mode': 'rw'
|
||||
}})
|
||||
|
||||
# ALSA configuration
|
||||
if self.addon.with_audio:
|
||||
volumes.update({
|
||||
str(self.addon.path_extern_asound): {
|
||||
'bind': "/etc/asound.conf", 'mode': 'ro'
|
||||
}})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
@@ -179,21 +257,20 @@ class DockerAddon(DockerInterface):
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
# write config
|
||||
if not self.addon.write_options():
|
||||
return False
|
||||
|
||||
ret = self.docker.run(
|
||||
ret = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
init=True,
|
||||
ipc_mode=self.ipc,
|
||||
stdin_open=self.addon.with_stdin,
|
||||
network_mode=self.network_mode,
|
||||
ports=self.ports,
|
||||
extra_hosts=self.network_mapping,
|
||||
devices=self.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
security_opt=self.security_opt,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs
|
||||
@@ -220,26 +297,30 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_env = AddonBuild(self.config, self.addon)
|
||||
build_env = AddonBuild(self.coresys, self._id)
|
||||
|
||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||
try:
|
||||
image = self.docker.images.build(**build_env.get_docker_args(tag))
|
||||
image, log = self.sys_docker.images.build(
|
||||
**build_env.get_docker_args(tag))
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
|
||||
# Update meta data
|
||||
self._meta = image.attrs
|
||||
|
||||
except (docker.errors.DockerException) as err:
|
||||
_LOGGER.error("Can't build %s:%s -> %s", self.image, tag, err)
|
||||
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
return self.loop.run_in_executor(None, self._export_image, path)
|
||||
return self.sys_run_in_executor(self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
@@ -247,26 +328,27 @@ class DockerAddon(DockerInterface):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.docker.api.get_image(self.image)
|
||||
image = self.sys_docker.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image.stream():
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
|
||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image:
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s done", self.image)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
return self.loop.run_in_executor(None, self._import_image, path, tag)
|
||||
return self.sys_run_in_executor(self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
@@ -275,32 +357,23 @@ class DockerAddon(DockerInterface):
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.docker.api.load_image(read_tar)
|
||||
self.sys_docker.api.load_image(read_tar, quiet=True)
|
||||
|
||||
image = self.docker.images.get(self.image)
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
self._meta = image.attrs
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Addons prepare some thing on start and that is normaly not repeatable.
|
||||
Need run inside executor.
|
||||
"""
|
||||
self._stop()
|
||||
return self._run()
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def write_stdin(self, data):
|
||||
"""Write to add-on stdin."""
|
||||
return self.loop.run_in_executor(None, self._write_stdin, data)
|
||||
return self.sys_run_in_executor(self._write_stdin, data)
|
||||
|
||||
def _write_stdin(self, data):
|
||||
"""Write to add-on stdin.
|
||||
@@ -312,10 +385,10 @@ class DockerAddon(DockerInterface):
|
||||
|
||||
try:
|
||||
# load needed docker objects
|
||||
container = self.docker.containers.get(self.name)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't attach to %s stdin -> %s", self.name, err)
|
||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -324,7 +397,7 @@ class DockerAddon(DockerInterface):
|
||||
os.write(socket.fileno(), data)
|
||||
socket.close()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write to %s stdin -> %s", self.name, err)
|
||||
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
return True
|
37
hassio/docker/hassos_cli.py
Normal file
37
hassio/docker/hassos_cli.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""HassOS Cli docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Docker hassio wrapper for HassOS Cli."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS cli image."""
|
||||
return f"homeassistant/{self.sys_arch}-hassio-cli"
|
||||
|
||||
def _stop(self):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find a HassOS cli %s", self.image)
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info("Found HassOS cli %s with version %s",
|
||||
self.image, self.version)
|
@@ -4,6 +4,7 @@ import logging
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..const import ENV_TOKEN, ENV_TIME, LABEL_MACHINE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -13,10 +14,17 @@ HASS_DOCKER_NAME = 'homeassistant'
|
||||
class DockerHomeAssistant(DockerInterface):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, api, data):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(config, loop, api, image=data.image)
|
||||
self.data = data
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return machine of Home-Assistant docker image."""
|
||||
if self._meta and LABEL_MACHINE in self._meta['Config']['Labels']:
|
||||
return self._meta['Config']['Labels'][LABEL_MACHINE]
|
||||
return None
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of docker image."""
|
||||
return self.sys_homeassistant.image
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -26,14 +34,10 @@ class DockerHomeAssistant(DockerInterface):
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into docker."""
|
||||
if not self.data.devices:
|
||||
return
|
||||
|
||||
devices = []
|
||||
for device in self.data.devices:
|
||||
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
|
||||
|
||||
return devices
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
return devices or None
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
@@ -41,29 +45,31 @@ class DockerHomeAssistant(DockerInterface):
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return
|
||||
return False
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self.docker.run(
|
||||
ret = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self.docker.network.supervisor,
|
||||
'TZ': self.config.timezone,
|
||||
'HASSIO': self.sys_docker.network.supervisor,
|
||||
ENV_TIME: self.sys_config.timezone,
|
||||
ENV_TOKEN: self.sys_homeassistant.uuid,
|
||||
},
|
||||
volumes={
|
||||
str(self.config.path_extern_config):
|
||||
str(self.sys_config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.config.path_extern_ssl):
|
||||
str(self.sys_config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.config.path_extern_share):
|
||||
str(self.sys_config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
}
|
||||
)
|
||||
@@ -79,26 +85,29 @@ class DockerHomeAssistant(DockerInterface):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
return self.docker.run_command(
|
||||
return self.sys_docker.run_command(
|
||||
self.image,
|
||||
command,
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={
|
||||
'TZ': self.config.timezone,
|
||||
ENV_TIME: self.sys_config.timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'ro'},
|
||||
str(self.config.path_extern_ssl):
|
||||
str(self.sys_config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.sys_config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
}
|
||||
)
|
||||
|
||||
def is_initialize(self):
|
||||
"""Return True if docker container exists."""
|
||||
return self.loop.run_in_executor(None, self._is_initialize)
|
||||
return self.sys_run_in_executor(self._is_initialize)
|
||||
|
||||
def _is_initialize(self):
|
||||
"""Return True if docker container exists.
|
||||
@@ -106,7 +115,7 @@ class DockerHomeAssistant(DockerInterface):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.docker.containers.get(self.name)
|
||||
self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
@@ -5,59 +5,63 @@ import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .util import docker_process
|
||||
from .stats import DockerStats
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerInterface(object):
|
||||
class DockerInterface(CoreSysAttributes):
|
||||
"""Docker hassio interface."""
|
||||
|
||||
def __init__(self, config, loop, api, image=None, timeout=30):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.docker = api
|
||||
self.coresys = coresys
|
||||
self._meta = None
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
self.image = image
|
||||
self.timeout = timeout
|
||||
self.version = None
|
||||
self.arch = None
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout for docker actions."""
|
||||
return 30
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of docker image."""
|
||||
if not self._meta:
|
||||
return None
|
||||
return self._meta['Config']['Image']
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of docker image."""
|
||||
if self._meta and LABEL_VERSION in self._meta['Config']['Labels']:
|
||||
return self._meta['Config']['Labels'][LABEL_VERSION]
|
||||
return None
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of docker image."""
|
||||
if self._meta and LABEL_ARCH in self._meta['Config']['Labels']:
|
||||
return self._meta['Config']['Labels'][LABEL_ARCH]
|
||||
return None
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self._lock.locked()
|
||||
return self.lock.locked()
|
||||
|
||||
def process_metadata(self, metadata, force=False):
|
||||
"""Read metadata and set it to object."""
|
||||
# read image
|
||||
if not self.image:
|
||||
self.image = metadata['Config']['Image']
|
||||
|
||||
# read version
|
||||
need_version = force or not self.version
|
||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
||||
elif need_version:
|
||||
_LOGGER.warning("Can't read version from %s", self.name)
|
||||
|
||||
# read arch
|
||||
need_arch = force or not self.arch
|
||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
return self.loop.run_in_executor(None, self._install, tag)
|
||||
return self.sys_run_in_executor(self._install, tag)
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image.
|
||||
@@ -66,10 +70,10 @@ class DockerInterface(object):
|
||||
"""
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||
image = self.docker.images.pull("{}:{}".format(self.image, tag))
|
||||
image = self.sys_docker.images.pull(f"{self.image}:{tag}")
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
self._meta = image.attrs
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
return False
|
||||
@@ -79,7 +83,7 @@ class DockerInterface(object):
|
||||
|
||||
def exists(self):
|
||||
"""Return True if docker image exists in local repo."""
|
||||
return self.loop.run_in_executor(None, self._exists)
|
||||
return self.sys_run_in_executor(self._exists)
|
||||
|
||||
def _exists(self):
|
||||
"""Return True if docker image exists in local repo.
|
||||
@@ -87,8 +91,9 @@ class DockerInterface(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
assert f"{self.image}:{self.version}" in image.tags
|
||||
except (docker.errors.DockerException, AssertionError):
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -98,7 +103,7 @@ class DockerInterface(object):
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.loop.run_in_executor(None, self._is_running)
|
||||
return self.sys_run_in_executor(self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
"""Return True if docker is Running.
|
||||
@@ -106,8 +111,8 @@ class DockerInterface(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.docker.containers.get(self.name)
|
||||
image = self.docker.images.get(self.image)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -115,16 +120,16 @@ class DockerInterface(object):
|
||||
if container.status != 'running':
|
||||
return False
|
||||
|
||||
# we run on a old image, stop and start it
|
||||
# we run on an old image, stop and start it
|
||||
if container.image.id != image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
return self.loop.run_in_executor(None, self._attach)
|
||||
return self.sys_run_in_executor(self._attach)
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
@@ -133,22 +138,21 @@ class DockerInterface(object):
|
||||
"""
|
||||
try:
|
||||
if self.image:
|
||||
obj_data = self.docker.images.get(self.image).attrs
|
||||
self._meta = self.sys_docker.images.get(self.image).attrs
|
||||
else:
|
||||
obj_data = self.docker.containers.get(self.name).attrs
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self.process_metadata(obj_data)
|
||||
_LOGGER.info(
|
||||
"Attach to image %s with version %s", self.image, self.version)
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def run(self):
|
||||
"""Run docker image."""
|
||||
return self.loop.run_in_executor(None, self._run)
|
||||
return self.sys_run_in_executor(self._run)
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
@@ -157,10 +161,10 @@ class DockerInterface(object):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
return self.loop.run_in_executor(None, self._stop)
|
||||
return self.sys_run_in_executor(self._stop)
|
||||
|
||||
def _stop(self):
|
||||
"""Stop/remove and remove docker container.
|
||||
@@ -168,7 +172,7 @@ class DockerInterface(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.docker.containers.get(self.name)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -183,10 +187,10 @@ class DockerInterface(object):
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def remove(self):
|
||||
"""Remove docker images."""
|
||||
return self.loop.run_in_executor(None, self._remove)
|
||||
return self.sys_run_in_executor(self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker images.
|
||||
@@ -201,27 +205,24 @@ class DockerInterface(object):
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.docker.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:latest", force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.docker.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:{self.version}", force=True)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
# clean metadata
|
||||
self.version = None
|
||||
self.arch = None
|
||||
|
||||
self._meta = None
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def update(self, tag):
|
||||
"""Update a docker image."""
|
||||
return self.loop.run_in_executor(None, self._update, tag)
|
||||
return self.sys_run_in_executor(self._update, tag)
|
||||
|
||||
def _update(self, tag):
|
||||
"""Update a docker image.
|
||||
@@ -246,7 +247,7 @@ class DockerInterface(object):
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.loop.run_in_executor(None, self._logs)
|
||||
return self.sys_run_in_executor(self._logs)
|
||||
|
||||
def _logs(self):
|
||||
"""Return docker logs of container.
|
||||
@@ -254,44 +255,19 @@ class DockerInterface(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.docker.containers.get(self.name)
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||
_LOGGER.warning("Can't grap logs from %s: %s", self.image, err)
|
||||
|
||||
@docker_process
|
||||
def restart(self):
|
||||
"""Restart docker container."""
|
||||
return self.loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self.loop.run_in_executor(None, self._cleanup)
|
||||
return self.sys_run_in_executor(self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
@@ -299,25 +275,25 @@ class DockerInterface(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self.docker.images.get(self.image)
|
||||
latest = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return False
|
||||
|
||||
for image in self.docker.images.list(name=self.image):
|
||||
for image in self.sys_docker.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||
self.docker.images.remove(image.id, force=True)
|
||||
self.sys_docker.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def execute_command(self, command):
|
||||
"""Create a temporary container and run command."""
|
||||
return self.loop.run_in_executor(None, self._execute_command, command)
|
||||
return self.sys_run_in_executor(self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
@@ -325,3 +301,24 @@ class DockerInterface(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def stats(self):
|
||||
"""Read and return stats from container."""
|
||||
return self.sys_run_in_executor(self._stats)
|
||||
|
||||
def _stats(self):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return None
|
||||
|
||||
try:
|
||||
stats = container.stats(stream=False)
|
||||
return DockerStats(stats)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||
return None
|
@@ -8,8 +8,11 @@ from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerNetwork(object):
|
||||
"""Internal HassIO Network."""
|
||||
class DockerNetwork:
|
||||
"""Internal HassIO Network.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self, dock):
|
||||
"""Initialize internal hassio network."""
|
||||
@@ -52,7 +55,8 @@ class DockerNetwork(object):
|
||||
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||
|
||||
return self.docker.networks.create(
|
||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config, options={
|
||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
|
||||
enable_ipv6=False, options={
|
||||
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||
})
|
||||
|
||||
@@ -66,7 +70,7 @@ class DockerNetwork(object):
|
||||
try:
|
||||
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't link container to hassio-net -> %s", err)
|
||||
_LOGGER.error("Can't link container to hassio-net: %s", err)
|
||||
return False
|
||||
|
||||
self.network.reload()
|
||||
@@ -86,4 +90,4 @@ class DockerNetwork(object):
|
||||
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning(
|
||||
"Can't disconnect container from default -> %s", err)
|
||||
"Can't disconnect container from default: %s", err)
|
90
hassio/docker/stats.py
Normal file
90
hassio/docker/stats.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Calc & represent docker stats data."""
|
||||
from contextlib import suppress
|
||||
|
||||
|
||||
class DockerStats:
|
||||
"""Hold stats data from container inside."""
|
||||
|
||||
def __init__(self, stats):
|
||||
"""Initialize docker stats."""
|
||||
self._cpu = 0.0
|
||||
self._network_rx = 0
|
||||
self._network_tx = 0
|
||||
self._blk_read = 0
|
||||
self._blk_write = 0
|
||||
|
||||
try:
|
||||
self._memory_usage = stats['memory_stats']['usage']
|
||||
self._memory_limit = stats['memory_stats']['limit']
|
||||
except KeyError:
|
||||
self._memory_usage = 0
|
||||
self._memory_limit = 0
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_cpu_percent(stats)
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_network(stats['networks'])
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_block_io(stats['blkio_stats'])
|
||||
|
||||
def _calc_cpu_percent(self, stats):
|
||||
"""Calculate CPU percent."""
|
||||
cpu_delta = stats['cpu_stats']['cpu_usage']['total_usage'] - \
|
||||
stats['precpu_stats']['cpu_usage']['total_usage']
|
||||
system_delta = stats['cpu_stats']['system_cpu_usage'] - \
|
||||
stats['precpu_stats']['system_cpu_usage']
|
||||
|
||||
if system_delta > 0.0 and cpu_delta > 0.0:
|
||||
self._cpu = (cpu_delta / system_delta) * \
|
||||
len(stats['cpu_stats']['cpu_usage']['percpu_usage']) * 100.0
|
||||
|
||||
def _calc_network(self, networks):
|
||||
"""Calculate Network IO stats."""
|
||||
for _, stats in networks.items():
|
||||
self._network_rx += stats['rx_bytes']
|
||||
self._network_tx += stats['tx_bytes']
|
||||
|
||||
def _calc_block_io(self, blkio):
|
||||
"""Calculate block IO stats."""
|
||||
for stats in blkio['io_service_bytes_recursive']:
|
||||
if stats['op'] == 'Read':
|
||||
self._blk_read += stats['value']
|
||||
elif stats['op'] == 'Write':
|
||||
self._blk_write += stats['value']
|
||||
|
||||
@property
|
||||
def cpu_percent(self):
|
||||
"""Return CPU percent."""
|
||||
return self._cpu
|
||||
|
||||
@property
|
||||
def memory_usage(self):
|
||||
"""Return memory usage."""
|
||||
return self._memory_usage
|
||||
|
||||
@property
|
||||
def memory_limit(self):
|
||||
"""Return memory limit."""
|
||||
return self._memory_limit
|
||||
|
||||
@property
|
||||
def network_rx(self):
|
||||
"""Return network rx stats."""
|
||||
return self._network_rx
|
||||
|
||||
@property
|
||||
def network_tx(self):
|
||||
"""Return network rx stats."""
|
||||
return self._network_tx
|
||||
|
||||
@property
|
||||
def blk_read(self):
|
||||
"""Return block IO read stats."""
|
||||
return self._blk_read
|
||||
|
||||
@property
|
||||
def blk_write(self):
|
||||
"""Return block IO write stats."""
|
||||
return self._blk_write
|
42
hassio/docker/supervisor.py
Normal file
42
hassio/docker/supervisor.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
"""Docker hassio wrapper for Supervisor."""
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self._meta = container.attrs
|
||||
_LOGGER.info("Attach to supervisor %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
# if already attach
|
||||
if container in self.sys_docker.network.containers:
|
||||
return True
|
||||
|
||||
# attach to network
|
||||
return self.sys_docker.network.attach_container(
|
||||
container, alias=['hassio'],
|
||||
ipv4=self.sys_docker.network.supervisor)
|
116
hassio/exceptions.py
Normal file
116
hassio/exceptions.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Core Exceptions."""
|
||||
|
||||
|
||||
class HassioError(Exception):
|
||||
"""Root exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HassioNotSupportedError(HassioError):
|
||||
"""Function is not supported."""
|
||||
pass
|
||||
|
||||
|
||||
# HomeAssistant
|
||||
|
||||
class HomeAssistantError(HassioError):
|
||||
"""Home Assistant exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantUpdateError(HomeAssistantError):
|
||||
"""Error on update of a Home Assistant."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantAPIError(HomeAssistantError):
|
||||
"""Home Assistant API exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HomeAssistantAuthError(HomeAssistantAPIError):
|
||||
"""Home Assistant Auth API exception."""
|
||||
pass
|
||||
|
||||
|
||||
# HassOS
|
||||
|
||||
class HassOSError(HassioError):
|
||||
"""HassOS exception."""
|
||||
pass
|
||||
|
||||
|
||||
class HassOSUpdateError(HassOSError):
|
||||
"""Error on update of a HassOS."""
|
||||
pass
|
||||
|
||||
|
||||
class HassOSNotSupportedError(HassioNotSupportedError):
|
||||
"""Function not supported by HassOS."""
|
||||
pass
|
||||
|
||||
|
||||
# Updater
|
||||
|
||||
class HassioUpdaterError(HassioError):
|
||||
"""Error on Updater."""
|
||||
pass
|
||||
|
||||
|
||||
# Host
|
||||
|
||||
class HostError(HassioError):
|
||||
"""Internal Host error."""
|
||||
pass
|
||||
|
||||
|
||||
class HostNotSupportedError(HassioNotSupportedError):
|
||||
"""Host function is not supprted."""
|
||||
pass
|
||||
|
||||
|
||||
class HostServiceError(HostError):
|
||||
"""Host service functions fails."""
|
||||
pass
|
||||
|
||||
|
||||
class HostAppArmorError(HostError):
|
||||
"""Host apparmor functions fails."""
|
||||
|
||||
|
||||
# utils/gdbus
|
||||
|
||||
class DBusError(HassioError):
|
||||
"""DBus generic error."""
|
||||
pass
|
||||
|
||||
|
||||
class DBusNotConnectedError(HostNotSupportedError):
|
||||
"""DBus is not connected and call a method."""
|
||||
|
||||
|
||||
class DBusFatalError(DBusError):
|
||||
"""DBus call going wrong."""
|
||||
pass
|
||||
|
||||
|
||||
class DBusParseError(DBusError):
|
||||
"""DBus parse error."""
|
||||
pass
|
||||
|
||||
|
||||
# util/apparmor
|
||||
|
||||
class AppArmorError(HostAppArmorError):
|
||||
"""General AppArmor error."""
|
||||
pass
|
||||
|
||||
|
||||
class AppArmorFileError(AppArmorError):
|
||||
"""AppArmor profile file error."""
|
||||
pass
|
||||
|
||||
|
||||
class AppArmorInvalidError(AppArmorError):
|
||||
"""AppArmor profile validate error."""
|
||||
pass
|
186
hassio/hassos.py
Normal file
186
hassio/hassos.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""HassOS support on supervisor."""
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import aiohttp
|
||||
from cpe import CPE
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import URL_HASSOS_OTA
|
||||
from .docker.hassos_cli import DockerHassOSCli
|
||||
from .exceptions import HassOSNotSupportedError, HassOSUpdateError, DBusError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassOS(CoreSysAttributes):
|
||||
"""HassOS interface inside HassIO."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize HassOS handler."""
|
||||
self.coresys = coresys
|
||||
self.instance = DockerHassOSCli(coresys)
|
||||
self._available = False
|
||||
self._version = None
|
||||
self._board = None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return True, if HassOS on host."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of HassOS."""
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def version_cli(self):
|
||||
"""Return version of HassOS cli."""
|
||||
return self.instance.version
|
||||
|
||||
@property
|
||||
def version_latest(self):
|
||||
"""Return version of HassOS."""
|
||||
return self.sys_updater.version_hassos
|
||||
|
||||
@property
|
||||
def version_cli_latest(self):
|
||||
"""Return version of HassOS."""
|
||||
return self.sys_updater.version_hassos_cli
|
||||
|
||||
@property
|
||||
def need_update(self):
|
||||
"""Return true if a HassOS update is available."""
|
||||
return self.version != self.version_latest
|
||||
|
||||
@property
|
||||
def need_cli_update(self):
|
||||
"""Return true if a HassOS cli update is available."""
|
||||
return self.version_cli != self.version_cli_latest
|
||||
|
||||
@property
|
||||
def board(self):
|
||||
"""Return board name."""
|
||||
return self._board
|
||||
|
||||
def _check_host(self):
|
||||
"""Check if HassOS is availabe."""
|
||||
if not self.available:
|
||||
_LOGGER.error("No HassOS availabe")
|
||||
raise HassOSNotSupportedError()
|
||||
|
||||
async def _download_raucb(self, version):
|
||||
"""Download rauc bundle (OTA) from github."""
|
||||
url = URL_HASSOS_OTA.format(version=version, board=self.board)
|
||||
raucb = Path(self.sys_config.path_tmp, f"hassos-{version}.raucb")
|
||||
|
||||
try:
|
||||
_LOGGER.info("Fetch OTA update from %s", url)
|
||||
async with self.sys_websession.get(url) as request:
|
||||
if request.status != 200:
|
||||
raise HassOSUpdateError()
|
||||
|
||||
# Download RAUCB file
|
||||
with raucb.open('wb') as ota_file:
|
||||
while True:
|
||||
chunk = await request.content.read(1048576)
|
||||
if not chunk:
|
||||
break
|
||||
ota_file.write(chunk)
|
||||
|
||||
_LOGGER.info("OTA update is downloaded on %s", raucb)
|
||||
return raucb
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write ota file: %s", err)
|
||||
|
||||
raise HassOSUpdateError()
|
||||
|
||||
async def load(self):
|
||||
"""Load HassOS data."""
|
||||
try:
|
||||
# Check needed host functions
|
||||
assert self.sys_dbus.rauc.is_connected
|
||||
assert self.sys_dbus.systemd.is_connected
|
||||
assert self.sys_dbus.hostname.is_connected
|
||||
|
||||
assert self.sys_host.info.cpe is not None
|
||||
cpe = CPE(self.sys_host.info.cpe)
|
||||
assert cpe.get_product()[0] == 'hassos'
|
||||
except (AssertionError, NotImplementedError):
|
||||
_LOGGER.debug("Found no HassOS")
|
||||
return
|
||||
|
||||
# Store meta data
|
||||
self._available = True
|
||||
self._version = cpe.get_version()[0]
|
||||
self._board = cpe.get_target_hardware()[0]
|
||||
|
||||
_LOGGER.info("Detect HassOS %s on host system", self.version)
|
||||
await self.instance.attach()
|
||||
|
||||
def config_sync(self):
|
||||
"""Trigger a host config reload from usb.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
self._check_host()
|
||||
|
||||
_LOGGER.info("Sync config from USB on HassOS.")
|
||||
return self.sys_host.services.restart('hassos-config.service')
|
||||
|
||||
async def update(self, version=None):
|
||||
"""Update HassOS system."""
|
||||
version = version or self.version_latest
|
||||
|
||||
# Check installed version
|
||||
self._check_host()
|
||||
if version == self.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
raise HassOSUpdateError()
|
||||
|
||||
# Fetch files from internet
|
||||
int_ota = await self._download_raucb(version)
|
||||
ext_ota = Path(self.sys_config.path_extern_tmp, int_ota.name)
|
||||
|
||||
try:
|
||||
await self.sys_dbus.rauc.install(ext_ota)
|
||||
completed = await self.sys_dbus.rauc.signal_completed()
|
||||
|
||||
except DBusError:
|
||||
_LOGGER.error("Rauc communication error")
|
||||
raise HassOSUpdateError() from None
|
||||
|
||||
finally:
|
||||
int_ota.unlink()
|
||||
|
||||
# Update success
|
||||
if 0 in completed:
|
||||
_LOGGER.info("Install HassOS %s success", version)
|
||||
self.sys_create_task(self.sys_host.control.reboot())
|
||||
return
|
||||
|
||||
# Update fails
|
||||
rauc_status = await self.sys_dbus.get_properties()
|
||||
_LOGGER.error(
|
||||
"HassOS update fails with: %s", rauc_status.get('LastError'))
|
||||
raise HassOSUpdateError()
|
||||
|
||||
async def update_cli(self, version=None):
|
||||
"""Update local HassOS cli."""
|
||||
version = version or self.version_cli_latest
|
||||
|
||||
if version == self.version_cli:
|
||||
_LOGGER.warning("Version %s is already installed for CLI", version)
|
||||
raise HassOSUpdateError()
|
||||
|
||||
if await self.instance.update(version):
|
||||
return
|
||||
|
||||
_LOGGER.error("HassOS CLI update fails.")
|
||||
raise HassOSUpdateError()
|
@@ -1,50 +1,73 @@
|
||||
"""HomeAssistant control object."""
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
|
||||
import aiohttp
|
||||
from aiohttp.hdrs import CONTENT_TYPE
|
||||
import async_timeout
|
||||
from aiohttp import hdrs
|
||||
import attr
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||
ATTR_VERSION, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||
HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .tools import JsonConfig, convert_to_ascii
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID,
|
||||
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||
ATTR_WAIT_BOOT, ATTR_REFRESH_TOKEN,
|
||||
HEADER_HA_ACCESS)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .exceptions import (
|
||||
HomeAssistantUpdateError, HomeAssistantError, HomeAssistantAPIError,
|
||||
HomeAssistantAuthError)
|
||||
from .utils import convert_to_ascii, process_lock
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
ConfigResult = attr.make_class('ConfigResult', ['valid', 'log'], frozen=True)
|
||||
|
||||
class HomeAssistant(JsonConfig):
|
||||
|
||||
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, config, loop, docker, updater):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize hass object."""
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.updater = updater
|
||||
self.docker = DockerHomeAssistant(config, loop, docker, self)
|
||||
self.api_ip = docker.network.gateway
|
||||
self.websession = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
|
||||
self.coresys = coresys
|
||||
self.instance = DockerHomeAssistant(coresys)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
self._error_state = False
|
||||
# We don't persist access tokens. Instead we fetch new ones when needed
|
||||
self.access_token = None
|
||||
|
||||
async def prepare(self):
|
||||
async def load(self):
|
||||
"""Prepare HomeAssistant object."""
|
||||
if not await self.docker.exists():
|
||||
if await self.instance.attach():
|
||||
return
|
||||
|
||||
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
|
||||
if self.is_custom_image:
|
||||
await self.install()
|
||||
else:
|
||||
await self.install_landingpage()
|
||||
else:
|
||||
await self.docker.attach()
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return System Machines."""
|
||||
return self.instance.machine
|
||||
|
||||
@property
|
||||
def error_state(self):
|
||||
"""Return True if system is in error."""
|
||||
return self._error_state
|
||||
|
||||
@property
|
||||
def api_ip(self):
|
||||
"""Return IP of HomeAssistant instance."""
|
||||
return self.sys_docker.network.gateway
|
||||
|
||||
@property
|
||||
def api_port(self):
|
||||
@@ -55,7 +78,6 @@ class HomeAssistant(JsonConfig):
|
||||
def api_port(self, value):
|
||||
"""Set network port for home-assistant instance."""
|
||||
self._data[ATTR_PORT] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def api_password(self):
|
||||
@@ -66,7 +88,6 @@ class HomeAssistant(JsonConfig):
|
||||
def api_password(self, value):
|
||||
"""Set password for home-assistant instance."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def api_ssl(self):
|
||||
@@ -77,7 +98,6 @@ class HomeAssistant(JsonConfig):
|
||||
def api_ssl(self, value):
|
||||
"""Set SSL for home-assistant instance."""
|
||||
self._data[ATTR_SSL] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def api_url(self):
|
||||
@@ -95,42 +115,57 @@ class HomeAssistant(JsonConfig):
|
||||
def watchdog(self, value):
|
||||
"""Return True if the watchdog should protect Home-Assistant."""
|
||||
self._data[ATTR_WATCHDOG] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
"""Return time to wait for Home-Assistant startup."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
"""Set time to wait for Home-Assistant startup."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of running homeassistant."""
|
||||
return self.docker.version
|
||||
return self.instance.version
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return last available version of homeassistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.updater.version_homeassistant
|
||||
return self.sys_updater.version_homeassistant
|
||||
|
||||
@last_version.setter
|
||||
def last_version(self, value):
|
||||
"""Set last available version of homeassistant."""
|
||||
if value:
|
||||
self._data[ATTR_LAST_VERSION] = value
|
||||
else:
|
||||
self._data.pop(ATTR_LAST_VERSION, None)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of hass containter."""
|
||||
if ATTR_IMAGE in self._data:
|
||||
if self._data.get(ATTR_IMAGE):
|
||||
return self._data[ATTR_IMAGE]
|
||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
||||
|
||||
@image.setter
|
||||
def image(self, value):
|
||||
"""Set image name of hass containter."""
|
||||
if value:
|
||||
self._data[ATTR_IMAGE] = value
|
||||
else:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
|
||||
@property
|
||||
def is_custom_image(self):
|
||||
"""Return True if a custom image is used."""
|
||||
return ATTR_IMAGE in self._data
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return extend device mapping."""
|
||||
return self._data[ATTR_DEVICES]
|
||||
|
||||
@devices.setter
|
||||
def devices(self, value):
|
||||
"""Set extend device mapping."""
|
||||
self._data[ATTR_DEVICES] = value
|
||||
self.save()
|
||||
return all(attr in self._data for attr in
|
||||
(ATTR_IMAGE, ATTR_LAST_VERSION))
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
@@ -141,150 +176,297 @@ class HomeAssistant(JsonConfig):
|
||||
def boot(self, value):
|
||||
"""Set home-assistant boot options."""
|
||||
self._data[ATTR_BOOT] = value
|
||||
self.save()
|
||||
|
||||
def set_custom(self, image, version):
|
||||
"""Set a custom image for homeassistant."""
|
||||
# reset
|
||||
if image is None and version is None:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
self._data.pop(ATTR_VERSION, None)
|
||||
@property
|
||||
def uuid(self):
|
||||
"""Return a UUID of this HomeAssistant."""
|
||||
return self._data[ATTR_UUID]
|
||||
|
||||
self.docker.image = self.image
|
||||
else:
|
||||
if image:
|
||||
self._data[ATTR_IMAGE] = image
|
||||
self.docker.image = image
|
||||
if version:
|
||||
self._data[ATTR_VERSION] = version
|
||||
self.save()
|
||||
@property
|
||||
def refresh_token(self):
|
||||
"""Return the refresh token to authenticate with HomeAssistant."""
|
||||
return self._data.get(ATTR_REFRESH_TOKEN)
|
||||
|
||||
@refresh_token.setter
|
||||
def refresh_token(self, value):
|
||||
"""Set Home Assistant refresh_token."""
|
||||
self._data[ATTR_REFRESH_TOKEN] = value
|
||||
|
||||
@process_lock
|
||||
async def install_landingpage(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
while True:
|
||||
if await self.docker.install('landingpage'):
|
||||
if await self.instance.install('landingpage'):
|
||||
break
|
||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# run landingpage after installation
|
||||
await self.docker.run()
|
||||
# Run landingpage after installation
|
||||
_LOGGER.info("Start landingpage")
|
||||
try:
|
||||
await self._start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.warning("Can't start landingpage")
|
||||
|
||||
@process_lock
|
||||
async def install(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.last_version:
|
||||
await self.updater.fetch_data()
|
||||
await self.sys_updater.reload()
|
||||
|
||||
tag = self.last_version
|
||||
if tag and await self.docker.install(tag):
|
||||
if tag and await self.instance.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
await asyncio.sleep(60)
|
||||
|
||||
# finishing
|
||||
_LOGGER.info("HomeAssistant docker now installed")
|
||||
if self.boot:
|
||||
await self.docker.run()
|
||||
await self.docker.cleanup()
|
||||
try:
|
||||
if not self.boot:
|
||||
return
|
||||
_LOGGER.info("Start HomeAssistant")
|
||||
await self._start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Can't start HomeAssistant!")
|
||||
finally:
|
||||
await self.instance.cleanup()
|
||||
|
||||
@process_lock
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
running = await self.docker.is_running()
|
||||
rollback = self.version if not self.error_state else None
|
||||
running = await self.instance.is_running()
|
||||
exists = await self.instance.exists()
|
||||
|
||||
if version == self.docker.version:
|
||||
if exists and version == self.instance.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return False
|
||||
return HomeAssistantUpdateError()
|
||||
|
||||
# process a update
|
||||
async def _update(to_version):
|
||||
"""Run Home Assistant update."""
|
||||
try:
|
||||
return await self.docker.update(version)
|
||||
_LOGGER.info("Update HomeAssistant to version %s", to_version)
|
||||
if not await self.instance.update(to_version):
|
||||
raise HomeAssistantUpdateError()
|
||||
finally:
|
||||
if running:
|
||||
await self.docker.run()
|
||||
await self._start()
|
||||
_LOGGER.info("Successfull run HomeAssistant %s", to_version)
|
||||
|
||||
def run(self):
|
||||
# Update Home Assistant
|
||||
with suppress(HomeAssistantError):
|
||||
await _update(version)
|
||||
return
|
||||
|
||||
# Update going wrong, revert it
|
||||
if self.error_state and rollback:
|
||||
_LOGGER.fatal("HomeAssistant update fails -> rollback!")
|
||||
await _update(rollback)
|
||||
else:
|
||||
raise HomeAssistantUpdateError()
|
||||
|
||||
async def _start(self):
|
||||
"""Start HomeAssistant docker & wait."""
|
||||
if not await self.instance.run():
|
||||
raise HomeAssistantError()
|
||||
await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
def start(self):
|
||||
"""Run HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.run()
|
||||
return self._start()
|
||||
|
||||
@process_lock
|
||||
def stop(self):
|
||||
"""Stop HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.stop()
|
||||
return self.instance.stop()
|
||||
|
||||
def restart(self):
|
||||
"""Restart HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.restart()
|
||||
@process_lock
|
||||
async def restart(self):
|
||||
"""Restart HomeAssistant docker."""
|
||||
await self.instance.stop()
|
||||
await self._start()
|
||||
|
||||
def logs(self):
|
||||
"""Get HomeAssistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.logs()
|
||||
return self.instance.logs()
|
||||
|
||||
def stats(self):
|
||||
"""Return stats of HomeAssistant.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.is_running()
|
||||
return self.instance.is_running()
|
||||
|
||||
def is_initialize(self):
|
||||
"""Return True if a docker container is exists.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.is_initialize()
|
||||
return self.instance.is_initialize()
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.docker.in_progress
|
||||
return self.instance.in_progress or self.lock.locked()
|
||||
|
||||
async def check_config(self):
|
||||
"""Run homeassistant config check."""
|
||||
exit_code, log = await self.docker.execute_command(
|
||||
result = await self.instance.execute_command(
|
||||
"python3 -m homeassistant -c /config --script check_config"
|
||||
)
|
||||
|
||||
# if not valid
|
||||
if exit_code is None:
|
||||
return (False, "")
|
||||
if result.exit_code is None:
|
||||
raise HomeAssistantError()
|
||||
|
||||
# parse output
|
||||
log = convert_to_ascii(log)
|
||||
if exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||
return (False, log)
|
||||
return (True, log)
|
||||
log = convert_to_ascii(result.output)
|
||||
if result.exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||
return ConfigResult(False, log)
|
||||
return ConfigResult(True, log)
|
||||
|
||||
async def check_api_state(self):
|
||||
"""Check if Home-Assistant up and running."""
|
||||
url = "{}/api/".format(self.api_url)
|
||||
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||
async def ensure_access_token(self):
|
||||
"""Ensures there is an access token."""
|
||||
if self.access_token is not None:
|
||||
return
|
||||
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
async with self.sys_websession_ssl.post(
|
||||
f"{self.api_url}/auth/token",
|
||||
timeout=30,
|
||||
data={
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": self.refresh_token
|
||||
}
|
||||
) as resp:
|
||||
if resp.status == 200:
|
||||
_LOGGER.info("Updated HomeAssistant API token")
|
||||
tokens = await resp.json()
|
||||
self.access_token = tokens['access_token']
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't update HomeAssistant access token!")
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
@asynccontextmanager
|
||||
async def make_request(self, method, path, json=None, content_type=None,
|
||||
data=None, timeout=30):
|
||||
"""Async context manager to make a request with right auth."""
|
||||
url = f"{self.api_url}/{path}"
|
||||
headers = {}
|
||||
|
||||
# Passthrough content type
|
||||
if content_type is not None:
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
|
||||
# Set old API Password
|
||||
if self.api_password:
|
||||
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||
headers[HEADER_HA_ACCESS] = self.api_password
|
||||
|
||||
for _ in (1, 2):
|
||||
# Prepare Access token
|
||||
if self.refresh_token:
|
||||
await self.ensure_access_token()
|
||||
headers[hdrs.AUTHORIZATION] = f'Bearer {self.access_token}'
|
||||
|
||||
try:
|
||||
async with async_timeout.timeout(30, loop=self.loop):
|
||||
async with self.websession.get(url, headers=header) as request:
|
||||
status = request.status
|
||||
async with getattr(self.sys_websession_ssl, method)(
|
||||
url, data=data, timeout=timeout, json=json,
|
||||
headers=headers
|
||||
) as resp:
|
||||
# Access token expired
|
||||
if resp.status == 401 and self.refresh_token:
|
||||
self.access_token = None
|
||||
continue
|
||||
yield resp
|
||||
return
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||
_LOGGER.error("Error on call %s: %s", url, err)
|
||||
break
|
||||
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
raise HomeAssistantAPIError()
|
||||
|
||||
async def check_api_state(self):
|
||||
"""Return True if Home-Assistant up and running."""
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.make_request('get', 'api/') as resp:
|
||||
if resp.status in (200, 201):
|
||||
return True
|
||||
err = resp.status
|
||||
|
||||
_LOGGER.warning("Home-Assistant API config missmatch: %d", err)
|
||||
return False
|
||||
|
||||
if status not in (200, 201):
|
||||
_LOGGER.warning("Home-Assistant API config missmatch")
|
||||
async def send_event(self, event_type, event_data=None):
|
||||
"""Send event to Home-Assistant."""
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.make_request(
|
||||
'get', f'api/events/{event_type}'
|
||||
) as resp:
|
||||
if resp.status in (200, 201):
|
||||
return
|
||||
err = resp.status
|
||||
|
||||
_LOGGER.warning("HomeAssistant event %s fails: %s", event_type, err)
|
||||
return HomeAssistantError()
|
||||
|
||||
async def _block_till_run(self):
|
||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||
start_time = time.monotonic()
|
||||
|
||||
def check_port():
|
||||
"""Check if port is mapped."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
result = sock.connect_ex((str(self.api_ip), self.api_port))
|
||||
sock.close()
|
||||
|
||||
# Check if the port is available
|
||||
if result == 0:
|
||||
return True
|
||||
except OSError:
|
||||
pass
|
||||
return False
|
||||
|
||||
while time.monotonic() - start_time < self.wait_boot:
|
||||
# Check if API response
|
||||
if await self.sys_run_in_executor(check_port):
|
||||
_LOGGER.info("Detect a running HomeAssistant instance")
|
||||
self._error_state = False
|
||||
return
|
||||
|
||||
# wait and don't hit the system
|
||||
await asyncio.sleep(10)
|
||||
|
||||
# Check if Container is is_running
|
||||
if not await self.instance.is_running():
|
||||
_LOGGER.error("Home Assistant is crashed!")
|
||||
break
|
||||
|
||||
_LOGGER.warning("Don't wait anymore of HomeAssistant startup!")
|
||||
self._error_state = True
|
||||
raise HomeAssistantError()
|
||||
|
93
hassio/host/__init__.py
Normal file
93
hassio/host/__init__.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Host function like audio/dbus/systemd."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
from .alsa import AlsaAudio
|
||||
from .apparmor import AppArmorControl
|
||||
from .control import SystemControl
|
||||
from .info import InfoCenter
|
||||
from .services import ServiceManager
|
||||
from ..const import (
|
||||
FEATURES_REBOOT, FEATURES_SHUTDOWN, FEATURES_HOSTNAME, FEATURES_SERVICES,
|
||||
FEATURES_HASSOS)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HassioError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HostManager(CoreSysAttributes):
|
||||
"""Manage supported function from host."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Host manager."""
|
||||
self.coresys = coresys
|
||||
self._alsa = AlsaAudio(coresys)
|
||||
self._apparmor = AppArmorControl(coresys)
|
||||
self._control = SystemControl(coresys)
|
||||
self._info = InfoCenter(coresys)
|
||||
self._services = ServiceManager(coresys)
|
||||
|
||||
@property
|
||||
def alsa(self):
|
||||
"""Return host ALSA handler."""
|
||||
return self._alsa
|
||||
|
||||
@property
|
||||
def apparmor(self):
|
||||
"""Return host apparmor handler."""
|
||||
return self._apparmor
|
||||
|
||||
@property
|
||||
def control(self):
|
||||
"""Return host control handler."""
|
||||
return self._control
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
"""Return host info handler."""
|
||||
return self._info
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
"""Return host services handler."""
|
||||
return self._services
|
||||
|
||||
@property
|
||||
def supperted_features(self):
|
||||
"""Return a list of supported host features."""
|
||||
features = []
|
||||
|
||||
if self.sys_dbus.systemd.is_connected:
|
||||
features.extend([
|
||||
FEATURES_REBOOT,
|
||||
FEATURES_SHUTDOWN,
|
||||
FEATURES_SERVICES,
|
||||
])
|
||||
|
||||
if self.sys_dbus.hostname.is_connected:
|
||||
features.append(FEATURES_HOSTNAME)
|
||||
|
||||
if self.sys_hassos.available:
|
||||
features.append(FEATURES_HASSOS)
|
||||
|
||||
return features
|
||||
|
||||
async def reload(self):
|
||||
"""Reload host functions."""
|
||||
if self.sys_dbus.hostname.is_connected:
|
||||
await self.info.update()
|
||||
|
||||
if self.sys_dbus.systemd.is_connected:
|
||||
await self.services.update()
|
||||
|
||||
async def load(self):
|
||||
"""Load host information."""
|
||||
with suppress(HassioError):
|
||||
await self.reload()
|
||||
|
||||
# Load profile data
|
||||
try:
|
||||
await self.apparmor.load()
|
||||
except HassioError as err:
|
||||
_LOGGER.waring("Load host AppArmor on start fails: %s", err)
|
137
hassio/host/alsa.py
Normal file
137
hassio/host/alsa.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Host Audio-support."""
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
from string import Template
|
||||
|
||||
import attr
|
||||
|
||||
from ..const import ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
DefaultConfig = attr.make_class('DefaultConfig', ['input', 'output'])
|
||||
|
||||
|
||||
class AlsaAudio(CoreSysAttributes):
|
||||
"""Handle Audio ALSA host data."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Alsa audio system."""
|
||||
self.coresys = coresys
|
||||
self._data = {
|
||||
ATTR_INPUT: {},
|
||||
ATTR_OUTPUT: {},
|
||||
}
|
||||
self._cache = 0
|
||||
self._default = None
|
||||
|
||||
@property
|
||||
def input_devices(self):
|
||||
"""Return list of ALSA input devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_INPUT]
|
||||
|
||||
@property
|
||||
def output_devices(self):
|
||||
"""Return list of ALSA output devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_OUTPUT]
|
||||
|
||||
def _update_device(self):
|
||||
"""Update Internal device DB."""
|
||||
current_id = hash(frozenset(self.sys_hardware.audio_devices))
|
||||
|
||||
# Need rebuild?
|
||||
if current_id == self._cache:
|
||||
return
|
||||
|
||||
# Clean old stuff
|
||||
self._data[ATTR_INPUT].clear()
|
||||
self._data[ATTR_OUTPUT].clear()
|
||||
|
||||
# Init database
|
||||
_LOGGER.info("Update ALSA device list")
|
||||
database = self._audio_database()
|
||||
|
||||
# Process devices
|
||||
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
|
||||
for chan_id, chan_type in dev_data[ATTR_DEVICES].items():
|
||||
alsa_id = f"{dev_id},{chan_id}"
|
||||
dev_name = dev_data[ATTR_NAME]
|
||||
|
||||
# Lookup type
|
||||
if chan_type.endswith('playback'):
|
||||
key = ATTR_OUTPUT
|
||||
elif chan_type.endswith('capture'):
|
||||
key = ATTR_INPUT
|
||||
else:
|
||||
_LOGGER.warning("Unknown channel type: %s", chan_type)
|
||||
continue
|
||||
|
||||
# Use name from DB or a generic name
|
||||
self._data[key][alsa_id] = database.get(
|
||||
self.sys_machine, {}).get(
|
||||
dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}")
|
||||
|
||||
self._cache = current_id
|
||||
|
||||
@staticmethod
|
||||
def _audio_database():
|
||||
"""Read local json audio data into dict."""
|
||||
json_file = Path(__file__).parent.joinpath("data/audiodb.json")
|
||||
|
||||
try:
|
||||
# pylint: disable=no-member
|
||||
with json_file.open('r') as database:
|
||||
return json.loads(database.read())
|
||||
except (ValueError, OSError) as err:
|
||||
_LOGGER.warning("Can't read audio DB: %s", err)
|
||||
|
||||
return {}
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
"""Generate ALSA default setting."""
|
||||
# Init defaults
|
||||
if self._default is None:
|
||||
database = self._audio_database()
|
||||
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
|
||||
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
|
||||
|
||||
self._default = DefaultConfig(alsa_input, alsa_output)
|
||||
|
||||
# Search exists/new output
|
||||
if self._default.output is None and self.output_devices:
|
||||
self._default.output = next(iter(self.output_devices))
|
||||
_LOGGER.info("Detect output device %s", self._default.output)
|
||||
|
||||
# Search exists/new input
|
||||
if self._default.input is None and self.input_devices:
|
||||
self._default.input = next(iter(self.input_devices))
|
||||
_LOGGER.info("Detect input device %s", self._default.input)
|
||||
|
||||
return self._default
|
||||
|
||||
def asound(self, alsa_input=None, alsa_output=None):
|
||||
"""Generate an asound data."""
|
||||
alsa_input = alsa_input or self.default.input
|
||||
alsa_output = alsa_output or self.default.output
|
||||
|
||||
# Read Template
|
||||
asound_file = Path(__file__).parent.joinpath("data/asound.tmpl")
|
||||
try:
|
||||
# pylint: disable=no-member
|
||||
with asound_file.open('r') as asound:
|
||||
asound_data = asound.read()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound.tmpl: %s", err)
|
||||
return ""
|
||||
|
||||
# Process Template
|
||||
asound_template = Template(asound_data)
|
||||
return asound_template.safe_substitute(
|
||||
input=alsa_input, output=alsa_output
|
||||
)
|
121
hassio/host/apparmor.py
Normal file
121
hassio/host/apparmor.py
Normal file
@@ -0,0 +1,121 @@
|
||||
"""AppArmor control for host."""
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import DBusError, HostAppArmorError
|
||||
from ..utils.apparmor import validate_profile
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SYSTEMD_SERVICES = {'hassos-apparmor.service', 'hassio-apparmor.service'}
|
||||
|
||||
|
||||
class AppArmorControl(CoreSysAttributes):
|
||||
"""Handle host apparmor controls."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize host power handling."""
|
||||
self.coresys = coresys
|
||||
self._profiles = set()
|
||||
self._service = None
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return True if AppArmor is availabe on host."""
|
||||
return self._service is not None
|
||||
|
||||
def exists(self, profile):
|
||||
"""Return True if a profile exists."""
|
||||
return profile in self._profiles
|
||||
|
||||
async def _reload_service(self):
|
||||
"""Reload internal service."""
|
||||
try:
|
||||
await self.sys_host.services.reload(self._service)
|
||||
except DBusError as err:
|
||||
_LOGGER.error("Can't reload %s: %s", self._service, err)
|
||||
|
||||
def _get_profile(self, profile_name):
|
||||
"""Get a profile from AppArmor store."""
|
||||
if profile_name not in self._profiles:
|
||||
_LOGGER.error("Can't find %s for removing", profile_name)
|
||||
raise HostAppArmorError()
|
||||
return Path(self.sys_config.path_apparmor, profile_name)
|
||||
|
||||
async def load(self):
|
||||
"""Load available profiles."""
|
||||
for content in self.sys_config.path_apparmor.iterdir():
|
||||
if not content.is_file():
|
||||
continue
|
||||
self._profiles.add(content.name)
|
||||
|
||||
# Is connected with systemd?
|
||||
_LOGGER.info("Load AppArmor Profiles: %s", self._profiles)
|
||||
for service in SYSTEMD_SERVICES:
|
||||
if not self.sys_host.services.exists(service):
|
||||
continue
|
||||
self._service = service
|
||||
|
||||
# Load profiles
|
||||
if self.available:
|
||||
await self._reload_service()
|
||||
else:
|
||||
_LOGGER.info("AppArmor is not enabled on Host")
|
||||
|
||||
async def load_profile(self, profile_name, profile_file):
|
||||
"""Load/Update a new/exists profile into AppArmor."""
|
||||
if not validate_profile(profile_name, profile_file):
|
||||
_LOGGER.error("profile is not valid with name %s", profile_name)
|
||||
raise HostAppArmorError()
|
||||
|
||||
# Copy to AppArmor folder
|
||||
dest_profile = Path(self.sys_config.path_apparmor, profile_name)
|
||||
try:
|
||||
shutil.copy(profile_file, dest_profile)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't copy %s: %s", profile_file, err)
|
||||
raise HostAppArmorError() from None
|
||||
|
||||
# Load profiles
|
||||
_LOGGER.info("Add or Update AppArmor profile: %s", profile_name)
|
||||
self._profiles.add(profile_name)
|
||||
if self.available:
|
||||
await self._reload_service()
|
||||
|
||||
async def remove_profile(self, profile_name):
|
||||
"""Remove a AppArmor profile."""
|
||||
profile_file = self._get_profile(profile_name)
|
||||
|
||||
# Only remove file
|
||||
if not self.available:
|
||||
try:
|
||||
profile_file.unlink()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't remove profile: %s", err)
|
||||
raise HostAppArmorError()
|
||||
return
|
||||
|
||||
# Marks als remove and start host process
|
||||
remove_profile = Path(
|
||||
self.sys_config.path_apparmor, 'remove', profile_name)
|
||||
try:
|
||||
profile_file.rename(remove_profile)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't mark profile as remove: %s", err)
|
||||
raise HostAppArmorError()
|
||||
|
||||
_LOGGER.info("Remove AppArmor profile: %s", profile_name)
|
||||
self._profiles.remove(profile_name)
|
||||
await self._reload_service()
|
||||
|
||||
def backup_profile(self, profile_name, backup_file):
|
||||
"""Backup A profile into a new file."""
|
||||
profile_file = self._get_profile(profile_name)
|
||||
|
||||
try:
|
||||
shutil.copy(profile_file, backup_file)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't backup profile %s: %s", profile_name, err)
|
||||
raise HostAppArmorError()
|
56
hassio/host/control.py
Normal file
56
hassio/host/control.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""Power control for host."""
|
||||
import logging
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HostNotSupportedError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MANAGER = 'manager'
|
||||
HOSTNAME = 'hostname'
|
||||
|
||||
|
||||
class SystemControl(CoreSysAttributes):
|
||||
"""Handle host power controls."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize host power handling."""
|
||||
self.coresys = coresys
|
||||
|
||||
def _check_dbus(self, flag):
|
||||
"""Check if systemd is connect or raise error."""
|
||||
if flag == MANAGER and self.sys_dbus.systemd.is_connected:
|
||||
return
|
||||
if flag == HOSTNAME and self.sys_dbus.hostname.is_connected:
|
||||
return
|
||||
|
||||
_LOGGER.error("No %s dbus connection available", flag)
|
||||
raise HostNotSupportedError()
|
||||
|
||||
async def reboot(self):
|
||||
"""Reboot host system."""
|
||||
self._check_dbus(MANAGER)
|
||||
|
||||
_LOGGER.info("Initialize host reboot over systemd")
|
||||
try:
|
||||
await self.sys_core.shutdown()
|
||||
finally:
|
||||
await self.sys_dbus.systemd.reboot()
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown host system."""
|
||||
self._check_dbus(MANAGER)
|
||||
|
||||
_LOGGER.info("Initialize host power off over systemd")
|
||||
try:
|
||||
await self.sys_core.shutdown()
|
||||
finally:
|
||||
await self.sys_dbus.systemd.power_off()
|
||||
|
||||
async def set_hostname(self, hostname):
|
||||
"""Set local a new Hostname."""
|
||||
self._check_dbus(HOSTNAME)
|
||||
|
||||
_LOGGER.info("Set Hostname %s", hostname)
|
||||
await self.sys_dbus.hostname.set_static_hostname(hostname)
|
||||
await self.sys_host.info.update()
|
17
hassio/host/data/asound.tmpl
Normal file
17
hassio/host/data/asound.tmpl
Normal file
@@ -0,0 +1,17 @@
|
||||
pcm.!default {
|
||||
type asym
|
||||
capture.pcm "mic"
|
||||
playback.pcm "speaker"
|
||||
}
|
||||
pcm.mic {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$input"
|
||||
}
|
||||
}
|
||||
pcm.speaker {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$output"
|
||||
}
|
||||
}
|
18
hassio/host/data/audiodb.json
Normal file
18
hassio/host/data/audiodb.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"raspberrypi3": {
|
||||
"bcm2835 - bcm2835 ALSA": {
|
||||
"0,0": "Raspberry Jack",
|
||||
"0,1": "Raspberry HDMI"
|
||||
},
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi2": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
}
|
||||
}
|
58
hassio/host/info.py
Normal file
58
hassio/host/info.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Info control for host."""
|
||||
import logging
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HassioError, HostNotSupportedError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InfoCenter(CoreSysAttributes):
|
||||
"""Handle local system information controls."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize system center handling."""
|
||||
self.coresys = coresys
|
||||
self._data = {}
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return local hostname."""
|
||||
return self._data.get('StaticHostname') or None
|
||||
|
||||
@property
|
||||
def chassis(self):
|
||||
"""Return local chassis type."""
|
||||
return self._data.get('Chassis') or None
|
||||
|
||||
@property
|
||||
def deployment(self):
|
||||
"""Return local deployment type."""
|
||||
return self._data.get('Deployment') or None
|
||||
|
||||
@property
|
||||
def kernel(self):
|
||||
"""Return local kernel version."""
|
||||
return self._data.get('KernelRelease') or None
|
||||
|
||||
@property
|
||||
def operating_system(self):
|
||||
"""Return local operating system."""
|
||||
return self._data.get('OperatingSystemPrettyName') or None
|
||||
|
||||
@property
|
||||
def cpe(self):
|
||||
"""Return local CPE."""
|
||||
return self._data.get('OperatingSystemCPEName') or None
|
||||
|
||||
async def update(self):
|
||||
"""Update properties over dbus."""
|
||||
if not self.sys_dbus.hostname.is_connected:
|
||||
_LOGGER.error("No hostname dbus connection available")
|
||||
raise HostNotSupportedError()
|
||||
|
||||
_LOGGER.info("Update local host information")
|
||||
try:
|
||||
self._data = await self.sys_dbus.hostname.get_properties()
|
||||
except HassioError:
|
||||
_LOGGER.warning("Can't update host system information!")
|
99
hassio/host/services.py
Normal file
99
hassio/host/services.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Service control for host."""
|
||||
import logging
|
||||
|
||||
import attr
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HassioError, HostNotSupportedError, HostServiceError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MOD_REPLACE = 'replace'
|
||||
|
||||
|
||||
class ServiceManager(CoreSysAttributes):
|
||||
"""Handle local service information controls."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize system center handling."""
|
||||
self.coresys = coresys
|
||||
self._services = set()
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterator trought services."""
|
||||
return iter(self._services)
|
||||
|
||||
def _check_dbus(self, unit=None):
|
||||
"""Check available dbus connection."""
|
||||
if not self.sys_dbus.systemd.is_connected:
|
||||
_LOGGER.error("No systemd dbus connection available")
|
||||
raise HostNotSupportedError()
|
||||
|
||||
if unit and not self.exists(unit):
|
||||
_LOGGER.error("Unit '%s' not found", unit)
|
||||
raise HostServiceError()
|
||||
|
||||
def start(self, unit):
|
||||
"""Start a service on host."""
|
||||
self._check_dbus(unit)
|
||||
|
||||
_LOGGER.info("Start local service %s", unit)
|
||||
return self.sys_dbus.systemd.start_unit(unit, MOD_REPLACE)
|
||||
|
||||
def stop(self, unit):
|
||||
"""Stop a service on host."""
|
||||
self._check_dbus(unit)
|
||||
|
||||
_LOGGER.info("Stop local service %s", unit)
|
||||
return self.sys_dbus.systemd.stop_unit(unit, MOD_REPLACE)
|
||||
|
||||
def reload(self, unit):
|
||||
"""Reload a service on host."""
|
||||
self._check_dbus(unit)
|
||||
|
||||
_LOGGER.info("Reload local service %s", unit)
|
||||
return self.sys_dbus.systemd.reload_unit(unit, MOD_REPLACE)
|
||||
|
||||
def restart(self, unit):
|
||||
"""Restart a service on host."""
|
||||
self._check_dbus(unit)
|
||||
|
||||
_LOGGER.info("Restart local service %s", unit)
|
||||
return self.sys_dbus.systemd.restart_unit(unit, MOD_REPLACE)
|
||||
|
||||
def exists(self, unit):
|
||||
"""Check if a unit exists and return True."""
|
||||
for service in self._services:
|
||||
if unit == service.name:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def update(self):
|
||||
"""Update properties over dbus."""
|
||||
self._check_dbus()
|
||||
|
||||
_LOGGER.info("Update service information")
|
||||
self._services.clear()
|
||||
try:
|
||||
systemd_units = await self.sys_dbus.systemd.list_units()
|
||||
for service_data in systemd_units[0]:
|
||||
if not service_data[0].endswith(".service") or \
|
||||
service_data[2] != 'loaded':
|
||||
continue
|
||||
self._services.add(ServiceInfo.read_from(service_data))
|
||||
except (HassioError, IndexError):
|
||||
_LOGGER.warning("Can't update host service information!")
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class ServiceInfo:
|
||||
"""Represent a single Service."""
|
||||
|
||||
name = attr.ib(type=str)
|
||||
description = attr.ib(type=str)
|
||||
state = attr.ib(type=str)
|
||||
|
||||
@staticmethod
|
||||
def read_from(unit):
|
||||
"""Parse data from dbus into this object."""
|
||||
return ServiceInfo(unit[0], unit[1], unit[3])
|
@@ -1,124 +0,0 @@
|
||||
"""Host control for HassIO."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .const import (
|
||||
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
|
||||
ATTR_HOSTNAME, ATTR_OS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 15
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
FEATURES_SHUTDOWN = 'shutdown'
|
||||
FEATURES_REBOOT = 'reboot'
|
||||
FEATURES_UPDATE = 'update'
|
||||
FEATURES_HOSTNAME = 'hostname'
|
||||
FEATURES_NETWORK_INFO = 'network_info'
|
||||
FEATURES_NETWORK_CONTROL = 'network_control'
|
||||
|
||||
|
||||
class HostControl(object):
|
||||
"""Client for host control."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize HostControl socket client."""
|
||||
self.loop = loop
|
||||
self.active = False
|
||||
self.version = UNKNOWN
|
||||
self.last_version = UNKNOWN
|
||||
self.type = UNKNOWN
|
||||
self.features = []
|
||||
self.hostname = UNKNOWN
|
||||
self.os_info = UNKNOWN
|
||||
|
||||
if SOCKET_HC.is_socket():
|
||||
self.active = True
|
||||
|
||||
async def _send_command(self, command):
|
||||
"""Send command to host.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
if not self.active:
|
||||
return
|
||||
|
||||
reader, writer = await asyncio.open_unix_connection(
|
||||
str(SOCKET_HC), loop=self.loop)
|
||||
|
||||
try:
|
||||
# send
|
||||
_LOGGER.info("Send '%s' to HostControl.", command)
|
||||
|
||||
with async_timeout.timeout(TIMEOUT, loop=self.loop):
|
||||
writer.write("{}\n".format(command).encode())
|
||||
data = await reader.readline()
|
||||
|
||||
response = data.decode().rstrip()
|
||||
_LOGGER.info("Receive from HostControl: %s.", response)
|
||||
|
||||
if response == "OK":
|
||||
return True
|
||||
elif response == "ERROR":
|
||||
return False
|
||||
elif response == "WRONG":
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return json.loads(response)
|
||||
except json.JSONDecodeError:
|
||||
_LOGGER.warning("Json parse error from HostControl '%s'.",
|
||||
response)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Timeout from HostControl!")
|
||||
|
||||
finally:
|
||||
writer.close()
|
||||
|
||||
async def load(self):
|
||||
"""Load Info from host.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
info = await self._send_command("info")
|
||||
if not info:
|
||||
return
|
||||
|
||||
self.version = info.get(ATTR_VERSION, UNKNOWN)
|
||||
self.last_version = info.get(ATTR_LAST_VERSION, UNKNOWN)
|
||||
self.type = info.get(ATTR_TYPE, UNKNOWN)
|
||||
self.features = info.get(ATTR_FEATURES, [])
|
||||
self.hostname = info.get(ATTR_HOSTNAME, UNKNOWN)
|
||||
self.os_info = info.get(ATTR_OS, UNKNOWN)
|
||||
|
||||
def reboot(self):
|
||||
"""Reboot the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("reboot")
|
||||
|
||||
def shutdown(self):
|
||||
"""Shutdown the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("shutdown")
|
||||
|
||||
def update(self, version=None):
|
||||
"""Update the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("update {}".format(version))
|
||||
return self._send_command("update")
|
||||
|
||||
def set_hostname(self, hostname):
|
||||
"""Update hostname on host."""
|
||||
return self._send_command("hostname {}".format(hostname))
|
1
hassio/misc/__init__.py
Normal file
1
hassio/misc/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Special object and tools for Hass.io."""
|
@@ -3,16 +3,19 @@ import asyncio
|
||||
import logging
|
||||
import shlex
|
||||
|
||||
import async_timeout
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
||||
|
||||
|
||||
class DNSForward(object):
|
||||
class DNSForward:
|
||||
"""Manage DNS forwarding to internal DNS."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, loop):
|
||||
"""Initialize DNS forwarding."""
|
||||
self.loop = loop
|
||||
self.proc = None
|
||||
|
||||
async def start(self):
|
||||
@@ -23,9 +26,10 @@ class DNSForward(object):
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
loop=self.loop
|
||||
)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't start DNS forwarding -> %s", err)
|
||||
_LOGGER.error("Can't start DNS forwarding: %s", err)
|
||||
else:
|
||||
_LOGGER.info("Start DNS port forwarding for host add-ons")
|
||||
|
||||
@@ -36,5 +40,10 @@ class DNSForward(object):
|
||||
return
|
||||
|
||||
self.proc.kill()
|
||||
try:
|
||||
with async_timeout.timeout(5):
|
||||
await self.proc.wait()
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Stop waiting for DNS shutdown")
|
||||
|
||||
_LOGGER.info("Stop DNS forwarding")
|
@@ -6,7 +6,7 @@ import re
|
||||
|
||||
import pyudev
|
||||
|
||||
from .const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||
from ..const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,10 +20,11 @@ PROC_STAT = Path("/proc/stat")
|
||||
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
||||
|
||||
GPIO_DEVICES = Path("/sys/class/gpio")
|
||||
RE_TTY = re.compile(r"tty[A-Z]+")
|
||||
|
||||
|
||||
class Hardware(object):
|
||||
"""Represent a interface to procfs, sysfs and udev."""
|
||||
class Hardware:
|
||||
"""Represent an interface to procfs, sysfs and udev."""
|
||||
|
||||
def __init__(self):
|
||||
"""Init hardware object."""
|
||||
@@ -34,7 +35,7 @@ class Hardware(object):
|
||||
"""Return all serial and connected devices."""
|
||||
dev_list = set()
|
||||
for device in self.context.list_devices(subsystem='tty'):
|
||||
if 'ID_VENDOR' in device:
|
||||
if 'ID_VENDOR' in device or RE_TTY.search(device.device_node):
|
||||
dev_list.add(device.device_node)
|
||||
|
||||
return dev_list
|
||||
@@ -62,14 +63,18 @@ class Hardware(object):
|
||||
@property
|
||||
def audio_devices(self):
|
||||
"""Return all available audio interfaces."""
|
||||
if not ASOUND_CARDS.exists():
|
||||
_LOGGER.info("No audio devices found")
|
||||
return {}
|
||||
|
||||
try:
|
||||
with ASOUND_CARDS.open('r') as cards_file:
|
||||
cards = cards_file.read()
|
||||
with ASOUND_DEVICES.open('r') as devices_file:
|
||||
devices = devices_file.read()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound data -> %s", err)
|
||||
return
|
||||
_LOGGER.error("Can't read asound data: %s", err)
|
||||
return {}
|
||||
|
||||
audio_list = {}
|
||||
|
||||
@@ -108,13 +113,13 @@ class Hardware(object):
|
||||
with PROC_STAT.open("r") as stat_file:
|
||||
stats = stat_file.read()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read stat data -> %s", err)
|
||||
return
|
||||
_LOGGER.error("Can't read stat data: %s", err)
|
||||
return None
|
||||
|
||||
# parse stat file
|
||||
found = RE_BOOT_TIME.search(stats)
|
||||
if not found:
|
||||
_LOGGER.error("Can't found last boot time!")
|
||||
return
|
||||
return None
|
||||
|
||||
return datetime.utcfromtimestamp(int(found.group(1)))
|
75
hassio/misc/scheduler.py
Normal file
75
hassio/misc/scheduler.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Schedule for HassIO."""
|
||||
import logging
|
||||
from datetime import date, datetime, time, timedelta
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
INTERVAL = 'interval'
|
||||
REPEAT = 'repeat'
|
||||
CALL = 'callback'
|
||||
TASK = 'task'
|
||||
|
||||
|
||||
class Scheduler:
|
||||
"""Schedule task inside HassIO."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self.suspend = False
|
||||
|
||||
def register_task(self, coro_callback, interval, repeat=True):
|
||||
"""Schedule a coroutine.
|
||||
|
||||
The coroutien need to be a callback without arguments.
|
||||
"""
|
||||
task_id = hash(coro_callback)
|
||||
|
||||
# generate data
|
||||
opts = {
|
||||
CALL: coro_callback,
|
||||
INTERVAL: interval,
|
||||
REPEAT: repeat,
|
||||
}
|
||||
|
||||
# schedule task
|
||||
self._data[task_id] = opts
|
||||
self._schedule_task(interval, task_id)
|
||||
|
||||
return task_id
|
||||
|
||||
def _run_task(self, task_id):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data[task_id]
|
||||
|
||||
if not self.suspend:
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
self._schedule_task(data[INTERVAL], task_id)
|
||||
else:
|
||||
self._data.pop(task_id)
|
||||
|
||||
def _schedule_task(self, interval, task_id):
|
||||
"""Schedule a task on loop."""
|
||||
if isinstance(interval, (int, float)):
|
||||
job = self.loop.call_later(interval, self._run_task, task_id)
|
||||
elif isinstance(interval, time):
|
||||
today = datetime.combine(date.today(), interval)
|
||||
tomorrow = datetime.combine(
|
||||
date.today() + timedelta(days=1), interval)
|
||||
|
||||
# check if we run it today or next day
|
||||
if today > datetime.today():
|
||||
calc = today
|
||||
else:
|
||||
calc = tomorrow
|
||||
|
||||
job = self.loop.call_at(calc.timestamp(), self._run_task, task_id)
|
||||
else:
|
||||
_LOGGER.fatal("Unknow interval %s (type: %s) for scheduler %s",
|
||||
interval, type(interval), task_id)
|
||||
|
||||
# Store job
|
||||
self._data[task_id][TASK] = job
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,56 +0,0 @@
|
||||
"""Schedule for HassIO."""
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SEC = 'seconds'
|
||||
REPEAT = 'repeat'
|
||||
CALL = 'callback'
|
||||
TASK = 'task'
|
||||
|
||||
|
||||
class Scheduler(object):
|
||||
"""Schedule task inside HassIO."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self.suspend = False
|
||||
|
||||
def register_task(self, coro_callback, seconds, repeat=True,
|
||||
now=False):
|
||||
"""Schedule a coroutine.
|
||||
|
||||
The coroutien need to be a callback without arguments.
|
||||
"""
|
||||
idx = hash(coro_callback)
|
||||
|
||||
# generate data
|
||||
opts = {
|
||||
CALL: coro_callback,
|
||||
SEC: seconds,
|
||||
REPEAT: repeat,
|
||||
}
|
||||
self._data[idx] = opts
|
||||
|
||||
# schedule task
|
||||
if now:
|
||||
self._run_task(idx)
|
||||
else:
|
||||
task = self.loop.call_later(seconds, self._run_task, idx)
|
||||
self._data[idx][TASK] = task
|
||||
|
||||
return idx
|
||||
|
||||
def _run_task(self, idx):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data.pop(idx)
|
||||
|
||||
if not self.suspend:
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
task = self.loop.call_later(data[SEC], self._run_task, idx)
|
||||
data[TASK] = task
|
||||
self._data[idx] = data
|
44
hassio/services/__init__.py
Normal file
44
hassio/services/__init__.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Handle internal services discovery."""
|
||||
|
||||
from .discovery import Discovery # noqa
|
||||
from .mqtt import MQTTService
|
||||
from .data import ServicesData
|
||||
from ..const import SERVICE_MQTT
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
AVAILABLE_SERVICES = {
|
||||
SERVICE_MQTT: MQTTService
|
||||
}
|
||||
|
||||
|
||||
class ServiceManager(CoreSysAttributes):
|
||||
"""Handle internal services discovery."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Services handler."""
|
||||
self.coresys = coresys
|
||||
self.data = ServicesData()
|
||||
self.services_obj = {}
|
||||
|
||||
@property
|
||||
def list_services(self):
|
||||
"""Return a list of services."""
|
||||
return list(self.services_obj.values())
|
||||
|
||||
def get(self, slug):
|
||||
"""Return service object from slug."""
|
||||
return self.services_obj.get(slug)
|
||||
|
||||
async def load(self):
|
||||
"""Load available services."""
|
||||
for slug, service in AVAILABLE_SERVICES.items():
|
||||
self.services_obj[slug] = service(self.coresys)
|
||||
|
||||
# Read exists discovery messages
|
||||
self.sys_discovery.load()
|
||||
|
||||
def reset(self):
|
||||
"""Reset available data."""
|
||||
self.data.reset_data()
|
||||
self.sys_discovery.load()
|
23
hassio/services/data.py
Normal file
23
hassio/services/data.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Handle service data for persistent supervisor reboot."""
|
||||
|
||||
from .validate import SCHEMA_SERVICES_FILE
|
||||
from ..const import FILE_HASSIO_SERVICES, ATTR_DISCOVERY, SERVICE_MQTT
|
||||
from ..utils.json import JsonConfig
|
||||
|
||||
|
||||
class ServicesData(JsonConfig):
|
||||
"""Class to handle services data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize services data."""
|
||||
super().__init__(FILE_HASSIO_SERVICES, SCHEMA_SERVICES_FILE)
|
||||
|
||||
@property
|
||||
def discovery(self):
|
||||
"""Return discovery data for home-assistant."""
|
||||
return self._data[ATTR_DISCOVERY]
|
||||
|
||||
@property
|
||||
def mqtt(self):
|
||||
"""Return settings for mqtt service."""
|
||||
return self._data[SERVICE_MQTT]
|
107
hassio/services/discovery.py
Normal file
107
hassio/services/discovery.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""Handle discover message for Home-Assistant."""
|
||||
import logging
|
||||
from uuid import uuid4
|
||||
|
||||
from ..const import ATTR_UUID
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
EVENT_DISCOVERY_ADD = 'hassio_discovery_add'
|
||||
EVENT_DISCOVERY_DEL = 'hassio_discovery_del'
|
||||
|
||||
|
||||
class Discovery(CoreSysAttributes):
|
||||
"""Home-Assistant Discovery handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize discovery handler."""
|
||||
self.coresys = coresys
|
||||
self.message_obj = {}
|
||||
|
||||
def load(self):
|
||||
"""Load exists discovery message into storage."""
|
||||
messages = {}
|
||||
for message in self._data:
|
||||
discovery = Message(**message)
|
||||
messages[discovery.uuid] = discovery
|
||||
|
||||
self.message_obj = messages
|
||||
|
||||
def save(self):
|
||||
"""Write discovery message into data file."""
|
||||
messages = []
|
||||
for message in self.message_obj.values():
|
||||
messages.append(message.raw())
|
||||
|
||||
self._data.clear()
|
||||
self._data.extend(messages)
|
||||
self.sys_services.data.save_data()
|
||||
|
||||
def get(self, uuid):
|
||||
"""Return discovery message."""
|
||||
return self.message_obj.get(uuid)
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return discovery data."""
|
||||
return self.sys_services.data.discovery
|
||||
|
||||
@property
|
||||
def list_messages(self):
|
||||
"""Return list of available discovery messages."""
|
||||
return self.message_obj.values()
|
||||
|
||||
def send(self, provider, component, platform=None, config=None):
|
||||
"""Send a discovery message to Home-Assistant."""
|
||||
message = Message(provider, component, platform, config)
|
||||
|
||||
# Already exists?
|
||||
for exists_message in self.message_obj:
|
||||
if exists_message == message:
|
||||
_LOGGER.warning("Found douplicate discovery message from %s",
|
||||
provider)
|
||||
return exists_message
|
||||
|
||||
_LOGGER.info("Send discovery to Home-Assistant %s/%s from %s",
|
||||
component, platform, provider)
|
||||
self.message_obj[message.uuid] = message
|
||||
self.save()
|
||||
|
||||
# send event to Home-Assistant
|
||||
self.sys_create_task(self.sys_homeassistant.send_event(
|
||||
EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid}))
|
||||
|
||||
return message
|
||||
|
||||
def remove(self, message):
|
||||
"""Remove a discovery message from Home-Assistant."""
|
||||
self.message_obj.pop(message.uuid, None)
|
||||
self.save()
|
||||
|
||||
# send event to Home-Assistant
|
||||
self.sys_create_task(self.sys_homeassistant.send_event(
|
||||
EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid}))
|
||||
|
||||
|
||||
class Message:
|
||||
"""Represent a single Discovery message."""
|
||||
|
||||
def __init__(self, provider, component, platform, config, uuid=None):
|
||||
"""Initialize discovery message."""
|
||||
self.provider = provider
|
||||
self.component = component
|
||||
self.platform = platform
|
||||
self.config = config
|
||||
self.uuid = uuid or uuid4().hex
|
||||
|
||||
def raw(self):
|
||||
"""Return raw discovery message."""
|
||||
return self.__dict__
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compare with other message."""
|
||||
for attribute in ('provider', 'component', 'platform', 'config'):
|
||||
if getattr(self, attribute) != getattr(other, attribute):
|
||||
return False
|
||||
return True
|
54
hassio/services/interface.py
Normal file
54
hassio/services/interface.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Interface for single service."""
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class ServiceInterface(CoreSysAttributes):
|
||||
"""Interface class for service integration."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize service interface."""
|
||||
self.coresys = coresys
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug of this service."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return data of this service."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Return data schema of this service."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
"""Return name of service provider."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def enabled(self):
|
||||
"""Return True if the service is in use."""
|
||||
return bool(self._data)
|
||||
|
||||
def save(self):
|
||||
"""Save changes."""
|
||||
self.sys_services.data.save_data()
|
||||
|
||||
def get_service_data(self):
|
||||
"""Return the requested service data."""
|
||||
if self.enabled:
|
||||
return self._data
|
||||
return None
|
||||
|
||||
def set_service_data(self, provider, data):
|
||||
"""Write the data into service object."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def del_service_data(self, provider):
|
||||
"""Remove the data from service object."""
|
||||
raise NotImplementedError()
|
89
hassio/services/mqtt.py
Normal file
89
hassio/services/mqtt.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Provide MQTT Service."""
|
||||
import logging
|
||||
|
||||
from .interface import ServiceInterface
|
||||
from .validate import SCHEMA_SERVICE_MQTT
|
||||
from ..const import (
|
||||
ATTR_PROVIDER, SERVICE_MQTT, ATTR_HOST, ATTR_PORT, ATTR_USERNAME,
|
||||
ATTR_PASSWORD, ATTR_PROTOCOL, ATTR_DISCOVERY_ID)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MQTTService(ServiceInterface):
|
||||
"""Provide mqtt services."""
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug of this service."""
|
||||
return SERVICE_MQTT
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return data of this service."""
|
||||
return self.sys_services.data.mqtt
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Return data schema of this service."""
|
||||
return SCHEMA_SERVICE_MQTT
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
"""Return name of service provider."""
|
||||
return self._data.get(ATTR_PROVIDER)
|
||||
|
||||
@property
|
||||
def hass_config(self):
|
||||
"""Return Home-Assistant mqtt config."""
|
||||
if not self.enabled:
|
||||
return None
|
||||
|
||||
hass_config = {
|
||||
'host': self._data[ATTR_HOST],
|
||||
'port': self._data[ATTR_PORT],
|
||||
'protocol': self._data[ATTR_PROTOCOL]
|
||||
}
|
||||
if ATTR_USERNAME in self._data:
|
||||
hass_config['user']: self._data[ATTR_USERNAME]
|
||||
if ATTR_PASSWORD in self._data:
|
||||
hass_config['password']: self._data[ATTR_PASSWORD]
|
||||
|
||||
return hass_config
|
||||
|
||||
def set_service_data(self, provider, data):
|
||||
"""Write the data into service object."""
|
||||
if self.enabled:
|
||||
_LOGGER.error("It is already a mqtt in use from %s", self.provider)
|
||||
return False
|
||||
|
||||
self._data.update(data)
|
||||
self._data[ATTR_PROVIDER] = provider
|
||||
|
||||
if provider == 'homeassistant':
|
||||
_LOGGER.info("Use mqtt settings from Home-Assistant")
|
||||
self.save()
|
||||
return True
|
||||
|
||||
# discover mqtt to homeassistant
|
||||
message = self.sys_discovery.send(
|
||||
provider, SERVICE_MQTT, None, self.hass_config)
|
||||
|
||||
self._data[ATTR_DISCOVERY_ID] = message.uuid
|
||||
self.save()
|
||||
return True
|
||||
|
||||
def del_service_data(self, provider):
|
||||
"""Remove the data from service object."""
|
||||
if not self.enabled:
|
||||
_LOGGER.warning("Can't remove not exists services.")
|
||||
return False
|
||||
|
||||
discovery_id = self._data.get(ATTR_DISCOVERY_ID)
|
||||
if discovery_id:
|
||||
self.sys_discovery.remove(
|
||||
self.sys_discovery.get(discovery_id))
|
||||
|
||||
self._data.clear()
|
||||
self.save()
|
||||
return True
|
44
hassio/services/validate.py
Normal file
44
hassio/services/validate.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Validate services schema."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
SERVICE_MQTT, ATTR_HOST, ATTR_PORT, ATTR_PASSWORD, ATTR_USERNAME, ATTR_SSL,
|
||||
ATTR_PROVIDER, ATTR_PROTOCOL, ATTR_DISCOVERY, ATTR_COMPONENT, ATTR_UUID,
|
||||
ATTR_PLATFORM, ATTR_CONFIG, ATTR_DISCOVERY_ID)
|
||||
from ..validate import NETWORK_PORT
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema([
|
||||
vol.Schema({
|
||||
vol.Required(ATTR_UUID): vol.Match(r"^[0-9a-f]{32}$"),
|
||||
vol.Required(ATTR_PROVIDER): vol.Coerce(str),
|
||||
vol.Required(ATTR_COMPONENT): vol.Coerce(str),
|
||||
vol.Required(ATTR_PLATFORM): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Required(ATTR_CONFIG): vol.Any(None, dict),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
])
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SERVICE_MQTT = vol.Schema({
|
||||
vol.Required(ATTR_HOST): vol.Coerce(str),
|
||||
vol.Required(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_USERNAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PROTOCOL, default='3.1.1'):
|
||||
vol.All(vol.Coerce(str), vol.In(['3.1', '3.1.1'])),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_CONFIG_MQTT = SCHEMA_SERVICE_MQTT.extend({
|
||||
vol.Required(ATTR_PROVIDER): vol.Coerce(str),
|
||||
vol.Optional(ATTR_DISCOVERY_ID): vol.Match(r"^[0-9a-f]{32}$"),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_SERVICES_FILE = vol.Schema({
|
||||
vol.Optional(SERVICE_MQTT, default=dict): vol.Any({}, SCHEMA_CONFIG_MQTT),
|
||||
vol.Optional(ATTR_DISCOVERY, default=list): vol.Any([], SCHEMA_DISCOVERY),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user