mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-12 10:39:21 +00:00
Compare commits
851 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
d054b6dbb7 | ||
![]() |
3093165325 | ||
![]() |
fd9c5bd412 | ||
![]() |
9a8850fecd | ||
![]() |
b12175ab9a | ||
![]() |
b52f90187b | ||
![]() |
4eb02f474d | ||
![]() |
dfdcddfd0b | ||
![]() |
0391277bad | ||
![]() |
73643b9bfe | ||
![]() |
93a52b8382 | ||
![]() |
7a91bb1f6c | ||
![]() |
26efa998a1 | ||
![]() |
fc9f3fee0a | ||
![]() |
ec19bd570b | ||
![]() |
3335bad9e1 | ||
![]() |
71ae334e24 | ||
![]() |
0807651fbd | ||
![]() |
7026d42d77 | ||
![]() |
31047b9ec2 | ||
![]() |
714791de8f | ||
![]() |
c544fff2b2 | ||
![]() |
fc45670686 | ||
![]() |
5cefa0a2ee | ||
![]() |
a1910d4135 | ||
![]() |
f1fecdde3a | ||
![]() |
9ba4ea7d18 | ||
![]() |
58a455d639 | ||
![]() |
3ea85f6a28 | ||
![]() |
4e1469ada4 | ||
![]() |
5778f78f28 | ||
![]() |
227125cc0b | ||
![]() |
b36e178c45 | ||
![]() |
32c9198fb2 | ||
![]() |
6983dcc267 | ||
![]() |
813fcc41f0 | ||
![]() |
f4e9dd0f1c | ||
![]() |
7f074142bf | ||
![]() |
b6df37628d | ||
![]() |
7867eded50 | ||
![]() |
311abb8a90 | ||
![]() |
21303f4b05 | ||
![]() |
da3270af67 | ||
![]() |
35aae69f23 | ||
![]() |
118a2e1951 | ||
![]() |
9053341581 | ||
![]() |
27532a8a00 | ||
![]() |
7fdfa630b5 | ||
![]() |
3974d5859f | ||
![]() |
aa1c765c4b | ||
![]() |
e78385e7ea | ||
![]() |
9d59b56c94 | ||
![]() |
9d72dcabfc | ||
![]() |
a0b5d0b67e | ||
![]() |
2b5520405f | ||
![]() |
ca376b3fcd | ||
![]() |
11e3c0c547 | ||
![]() |
9da136e037 | ||
![]() |
9b3e59d876 | ||
![]() |
7a592795b5 | ||
![]() |
5b92137699 | ||
![]() |
7520cdfeb4 | ||
![]() |
0ada791e3a | ||
![]() |
73afced4dc | ||
![]() |
633a2e93bf | ||
![]() |
07c4058a8c | ||
![]() |
b6f3938b14 | ||
![]() |
57534fac96 | ||
![]() |
4a03e72983 | ||
![]() |
ddb29ea9b1 | ||
![]() |
95179c30f7 | ||
![]() |
f49970ce2c | ||
![]() |
790818d1aa | ||
![]() |
62f675e613 | ||
![]() |
f33434fb01 | ||
![]() |
254d6aee32 | ||
![]() |
a5ecd597ed | ||
![]() |
0fab3e940a | ||
![]() |
60fbebc16b | ||
![]() |
ec366d8112 | ||
![]() |
b8818788c9 | ||
![]() |
e23f6f6998 | ||
![]() |
05b58d76b9 | ||
![]() |
644d13e3fa | ||
![]() |
9de71472d4 | ||
![]() |
bf28227b91 | ||
![]() |
4c1ee49068 | ||
![]() |
6e7cf5e4c9 | ||
![]() |
11f8c97347 | ||
![]() |
a1461fd518 | ||
![]() |
fa5c2e37d3 | ||
![]() |
1f091b20ad | ||
![]() |
d3b4a03851 | ||
![]() |
fb12fee59b | ||
![]() |
7a87d2334a | ||
![]() |
9591e71138 | ||
![]() |
cecad526a2 | ||
![]() |
53dab4ee45 | ||
![]() |
8abbba46c7 | ||
![]() |
0f01ac1b59 | ||
![]() |
aa8ab593c0 | ||
![]() |
84f791220e | ||
![]() |
cee2c5469f | ||
![]() |
6e75964a8b | ||
![]() |
5ab5036504 | ||
![]() |
000a3c1f7e | ||
![]() |
8ea123eb94 | ||
![]() |
571c42ef7d | ||
![]() |
8443da0b9f | ||
![]() |
7dbbcf24c8 | ||
![]() |
468cb0c36b | ||
![]() |
78e093df96 | ||
![]() |
ec4d7dab21 | ||
![]() |
d00ee0adea | ||
![]() |
55d5ee4ed4 | ||
![]() |
0e51d74265 | ||
![]() |
916f3caedd | ||
![]() |
ff80ccce64 | ||
![]() |
23f28b38e9 | ||
![]() |
da425a0530 | ||
![]() |
79dca1608e | ||
![]() |
33b615e40d | ||
![]() |
c825c40c4d | ||
![]() |
8beb723cc2 | ||
![]() |
94fd24c251 | ||
![]() |
bf75a8a439 | ||
![]() |
36cdb05387 | ||
![]() |
dccc652d42 | ||
![]() |
74e03a9a2e | ||
![]() |
2f6df3a946 | ||
![]() |
2872be6385 | ||
![]() |
af19e95c81 | ||
![]() |
e5451973bd | ||
![]() |
4ef8c9d633 | ||
![]() |
4a9dcb540e | ||
![]() |
61eefea358 | ||
![]() |
f2a5512bbf | ||
![]() |
2f4e114f25 | ||
![]() |
c91bac2527 | ||
![]() |
52da7605f5 | ||
![]() |
267791833e | ||
![]() |
67dcf1563b | ||
![]() |
ccff0f5b9e | ||
![]() |
9f8ad05471 | ||
![]() |
c2299ef8da | ||
![]() |
f5845564db | ||
![]() |
17904d70d8 | ||
![]() |
622e99e04c | ||
![]() |
061420f279 | ||
![]() |
3d459f1b8b | ||
![]() |
5f3dd6190a | ||
![]() |
ac824d3af6 | ||
![]() |
dd25c29544 | ||
![]() |
5cbdbffbb2 | ||
![]() |
bb81f14c2c | ||
![]() |
cecefd6972 | ||
![]() |
ff7f6a0b4c | ||
![]() |
1dc9f35e12 | ||
![]() |
051b63c7cc | ||
![]() |
aac4b9b24a | ||
![]() |
1a208a20b6 | ||
![]() |
b1e8722ead | ||
![]() |
a66af6e903 | ||
![]() |
0c345fc615 | ||
![]() |
087b082a6b | ||
![]() |
0b85209eae | ||
![]() |
d81bc7de46 | ||
![]() |
e3a99b9f89 | ||
![]() |
5d319b37ea | ||
![]() |
9f25606986 | ||
![]() |
ecd12732ee | ||
![]() |
85fbde8e36 | ||
![]() |
6e6c2c3efb | ||
![]() |
0d4a808449 | ||
![]() |
087f746647 | ||
![]() |
640d66ad1a | ||
![]() |
f5f5ed83af | ||
![]() |
95f01a1161 | ||
![]() |
b84e7e7d94 | ||
![]() |
5d7018f3f0 | ||
![]() |
d87a85ceb5 | ||
![]() |
9ab6e80b6f | ||
![]() |
78e91e859e | ||
![]() |
9eee8eade6 | ||
![]() |
124ce0b8b7 | ||
![]() |
00e7d96472 | ||
![]() |
398815efd8 | ||
![]() |
bdc2bdcf56 | ||
![]() |
68eafb0a7d | ||
![]() |
7ca2fd7193 | ||
![]() |
ec823edd8f | ||
![]() |
858c7a1fa7 | ||
![]() |
6ac45a24fc | ||
![]() |
9430b39042 | ||
![]() |
ae7466ccfe | ||
![]() |
2c17fe5da8 | ||
![]() |
a0fb91af29 | ||
![]() |
f626e31fd3 | ||
![]() |
0151a149fd | ||
![]() |
9dea93142b | ||
![]() |
7f878bfac0 | ||
![]() |
ebe9ae2341 | ||
![]() |
e777bbd024 | ||
![]() |
2116d56124 | ||
![]() |
0b6a82b018 | ||
![]() |
b4ea28af4e | ||
![]() |
22f59712df | ||
![]() |
efe95f7bab | ||
![]() |
200c68f67f | ||
![]() |
dcefec7b99 | ||
![]() |
5db798bcf8 | ||
![]() |
70005296cc | ||
![]() |
f2bf8dea93 | ||
![]() |
fee858c956 | ||
![]() |
e3ae48c8ff | ||
![]() |
fa9e20385e | ||
![]() |
f51c9704e0 | ||
![]() |
57c58d81c0 | ||
![]() |
1ec1082068 | ||
![]() |
35b7c2269c | ||
![]() |
cc3e6ec6fd | ||
![]() |
4df42e054d | ||
![]() |
1b481e0b37 | ||
![]() |
3aa4cdf540 | ||
![]() |
029f277945 | ||
![]() |
e7e0b9adda | ||
![]() |
5fbff75da8 | ||
![]() |
58299a0389 | ||
![]() |
1151d7e17b | ||
![]() |
b56ed547e3 | ||
![]() |
a71ebba940 | ||
![]() |
4fcb516c75 | ||
![]() |
22142d32d2 | ||
![]() |
21194f1411 | ||
![]() |
09df046fa8 | ||
![]() |
63d3889d5c | ||
![]() |
0ffc0559e2 | ||
![]() |
78118a502c | ||
![]() |
946cc3d618 | ||
![]() |
c40a3f18e9 | ||
![]() |
f01945bf8c | ||
![]() |
0f72db45f9 | ||
![]() |
83510341b6 | ||
![]() |
70dd6593e4 | ||
![]() |
60ba2db561 | ||
![]() |
5820d16419 | ||
![]() |
9f9ff0d1ad | ||
![]() |
806161e3ac | ||
![]() |
44ae9c7b63 | ||
![]() |
75d24ba534 | ||
![]() |
13243cd02c | ||
![]() |
411fad8a45 | ||
![]() |
5fe9d63c79 | ||
![]() |
33095f8792 | ||
![]() |
0253722369 | ||
![]() |
495c45564a | ||
![]() |
8517b43e85 | ||
![]() |
033ea4e7dc | ||
![]() |
a0c9e5ad26 | ||
![]() |
408d6eafcc | ||
![]() |
054e357483 | ||
![]() |
cb520bff23 | ||
![]() |
024ebe0026 | ||
![]() |
7b62e2f07b | ||
![]() |
7d52b3ba01 | ||
![]() |
46caa23319 | ||
![]() |
9aa5eda2c8 | ||
![]() |
f48182a69c | ||
![]() |
788f883490 | ||
![]() |
e84e82d018 | ||
![]() |
20e73796b8 | ||
![]() |
7769d6fff1 | ||
![]() |
561e80c2be | ||
![]() |
96f47a4c32 | ||
![]() |
7482d6dd45 | ||
![]() |
aea31ee6dd | ||
![]() |
de43965ecb | ||
![]() |
baa61c6aa0 | ||
![]() |
cb22dafb3c | ||
![]() |
ea26784c3e | ||
![]() |
72332ed40f | ||
![]() |
46f2bf16a8 | ||
![]() |
e2725f8033 | ||
![]() |
9084ac119f | ||
![]() |
41943ba61a | ||
![]() |
33794669a1 | ||
![]() |
fe155a4ff0 | ||
![]() |
124e487ef7 | ||
![]() |
f361916a60 | ||
![]() |
20afa1544b | ||
![]() |
c08d5af4db | ||
![]() |
dc341c8af8 | ||
![]() |
2507b52adb | ||
![]() |
1302708135 | ||
![]() |
1314812f92 | ||
![]() |
f739e3ed11 | ||
![]() |
abb526fc0f | ||
![]() |
efb1a24b8f | ||
![]() |
bc0835963d | ||
![]() |
316190dff8 | ||
![]() |
029ead0c7c | ||
![]() |
a85172f30b | ||
![]() |
dfe2532813 | ||
![]() |
cf3bb23629 | ||
![]() |
2132042aca | ||
![]() |
19e448fc54 | ||
![]() |
a4e0fb8e99 | ||
![]() |
5b72e2887e | ||
![]() |
d2b6ec1b7e | ||
![]() |
4b541a23c4 | ||
![]() |
99869449ae | ||
![]() |
eab73f3895 | ||
![]() |
9e96615ffa | ||
![]() |
350010feb5 | ||
![]() |
7395e4620b | ||
![]() |
7d91ae4513 | ||
![]() |
343f759983 | ||
![]() |
24ee3f8cc0 | ||
![]() |
c143eadb62 | ||
![]() |
e7df38f4d1 | ||
![]() |
3e42318ac8 | ||
![]() |
c6e5d2932e | ||
![]() |
1aaf21a350 | ||
![]() |
f185eece8a | ||
![]() |
9d951280ef | ||
![]() |
3f598bafc0 | ||
![]() |
cddd859f56 | ||
![]() |
e7adf50ec1 | ||
![]() |
ac437f809a | ||
![]() |
f13dee9b9d | ||
![]() |
00855c0909 | ||
![]() |
1fafed5a07 | ||
![]() |
7adb81b350 | ||
![]() |
4647035b00 | ||
![]() |
8ad7344e02 | ||
![]() |
f1c46b3385 | ||
![]() |
7f84073b12 | ||
![]() |
e383a11bb7 | ||
![]() |
cc113e2251 | ||
![]() |
c5a3830c7d | ||
![]() |
a2abadc970 | ||
![]() |
db444b89d3 | ||
![]() |
77881e8a58 | ||
![]() |
0b15f88da3 | ||
![]() |
7c6bf96f6f | ||
![]() |
dc77e2d8d9 | ||
![]() |
68824fab4f | ||
![]() |
d6b3a36714 | ||
![]() |
8ab1f703c7 | ||
![]() |
95a4e292aa | ||
![]() |
3b9252558f | ||
![]() |
4a324dccc6 | ||
![]() |
8fffb0f8b5 | ||
![]() |
87adfce211 | ||
![]() |
297813f6e6 | ||
![]() |
362315852a | ||
![]() |
d221f36cf8 | ||
![]() |
9e18589b6b | ||
![]() |
c4d09210e1 | ||
![]() |
43797c5eb5 | ||
![]() |
fe38fe94dc | ||
![]() |
f185291eca | ||
![]() |
7541ae6476 | ||
![]() |
d94715be2b | ||
![]() |
99cc5972c8 | ||
![]() |
3d101a24a1 | ||
![]() |
2ed3ddf05b | ||
![]() |
10b3658bd7 | ||
![]() |
9f5903089e | ||
![]() |
0593885ed4 | ||
![]() |
3efbe11d49 | ||
![]() |
1c2e0e5749 | ||
![]() |
f64da6a547 | ||
![]() |
94fba7e175 | ||
![]() |
a59245e6bb | ||
![]() |
217c1acc62 | ||
![]() |
2c0a68bd8f | ||
![]() |
e37ffd6107 | ||
![]() |
3bde598fa7 | ||
![]() |
53f42ff934 | ||
![]() |
9041eb9e9a | ||
![]() |
70ac395232 | ||
![]() |
82f68b4a7b | ||
![]() |
2b2f3214e9 | ||
![]() |
1c0d63a02e | ||
![]() |
de77215630 | ||
![]() |
f300b843c1 | ||
![]() |
0bb81136bb | ||
![]() |
2a81ced817 | ||
![]() |
7363951a9a | ||
![]() |
6f770b78af | ||
![]() |
10219a348f | ||
![]() |
23d1013cfa | ||
![]() |
05980d4147 | ||
![]() |
e5e25c895f | ||
![]() |
b486883ff6 | ||
![]() |
42dd4d9557 | ||
![]() |
7dff9e09a7 | ||
![]() |
c315b026a3 | ||
![]() |
a4ba4c80e8 | ||
![]() |
ccd48b63a2 | ||
![]() |
6d5f70ced6 | ||
![]() |
ccffb4b786 | ||
![]() |
68dbbe212c | ||
![]() |
5df869e08a | ||
![]() |
63b9e023b4 | ||
![]() |
8f357739ec | ||
![]() |
808fc0f8b6 | ||
![]() |
1a6f6085e6 | ||
![]() |
0de3e9a233 | ||
![]() |
f1237f124f | ||
![]() |
69142b6fb0 | ||
![]() |
28f295a1e2 | ||
![]() |
55c2127baa | ||
![]() |
265c36b345 | ||
![]() |
9f081fe32f | ||
![]() |
e4fb6ad727 | ||
![]() |
1040a1624a | ||
![]() |
a2ee2852a0 | ||
![]() |
b2e3b726d9 | ||
![]() |
0f4e557552 | ||
![]() |
2efa9f9483 | ||
![]() |
43e6ca8f4a | ||
![]() |
34d67a7bcd | ||
![]() |
5a6051f9a1 | ||
![]() |
157e48f946 | ||
![]() |
9469a258ff | ||
![]() |
fd0aeb5341 | ||
![]() |
4d4a4ce043 | ||
![]() |
678f77cc05 | ||
![]() |
6c30248389 | ||
![]() |
fda7c1cf11 | ||
![]() |
364e5ec0b8 | ||
![]() |
947bf7799c | ||
![]() |
e22836d706 | ||
![]() |
6c8fcbfb80 | ||
![]() |
f1fe1877fe | ||
![]() |
3c0831c8eb | ||
![]() |
35b3f364c9 | ||
![]() |
c4299b51cd | ||
![]() |
31caed20fa | ||
![]() |
41fed656c1 | ||
![]() |
c5ee2ebc49 | ||
![]() |
743a218219 | ||
![]() |
093ef17fb7 | ||
![]() |
a41912be0a | ||
![]() |
5becd51b50 | ||
![]() |
ef7a375396 | ||
![]() |
19879e3287 | ||
![]() |
d1c4f342fc | ||
![]() |
2f62b7046c | ||
![]() |
0cca8f522b | ||
![]() |
39decec001 | ||
![]() |
3489db2768 | ||
![]() |
3382688669 | ||
![]() |
cf00ce7d78 | ||
![]() |
2c714aa003 | ||
![]() |
1e7858bf06 | ||
![]() |
4e428c2e41 | ||
![]() |
b95ab3e95a | ||
![]() |
0dd7f8fbaa | ||
![]() |
a2789ac540 | ||
![]() |
a785e10a3f | ||
![]() |
10dad5a209 | ||
![]() |
9327b24d44 | ||
![]() |
7d02bb2fe9 | ||
![]() |
a2d3ee0d67 | ||
![]() |
d29fab69e8 | ||
![]() |
6205f40298 | ||
![]() |
6b169f3f17 | ||
![]() |
0d4a5a7ffb | ||
![]() |
dac90d29dd | ||
![]() |
7e815633e7 | ||
![]() |
f062f31ca2 | ||
![]() |
1374f90433 | ||
![]() |
b692b19a4d | ||
![]() |
92d5b14cf5 | ||
![]() |
6a84829c16 | ||
![]() |
7036ecbd0a | ||
![]() |
19b5059972 | ||
![]() |
cebc377fa7 | ||
![]() |
d36c3919d7 | ||
![]() |
0684427373 | ||
![]() |
8ff79e85bf | ||
![]() |
ee4b28a490 | ||
![]() |
fddd5b8860 | ||
![]() |
72279072ac | ||
![]() |
0b70448273 | ||
![]() |
4eb24fcbc5 | ||
![]() |
06edf59d14 | ||
![]() |
36ca851bc2 | ||
![]() |
a4e453bf83 | ||
![]() |
d211eec66f | ||
![]() |
db8540d4ab | ||
![]() |
30e270e7c0 | ||
![]() |
9734307551 | ||
![]() |
c650f8d1e1 | ||
![]() |
10005898f8 | ||
![]() |
716389e0c1 | ||
![]() |
658729feb5 | ||
![]() |
ae7808eb2a | ||
![]() |
d8e0e9e0b0 | ||
![]() |
a860a3c122 | ||
![]() |
fe60d526b9 | ||
![]() |
769904778f | ||
![]() |
a3a40c79d6 | ||
![]() |
b44f613136 | ||
![]() |
801be9c60b | ||
![]() |
b6db6a1287 | ||
![]() |
4181174bcc | ||
![]() |
3be46e6011 | ||
![]() |
98b93efc5c | ||
![]() |
6156019c2f | ||
![]() |
80d60148a9 | ||
![]() |
8baf59a608 | ||
![]() |
b546365aaa | ||
![]() |
0a68698912 | ||
![]() |
45288a2491 | ||
![]() |
f34a175e4f | ||
![]() |
6e7e145822 | ||
![]() |
9abebe2d5d | ||
![]() |
b0c5884c3f | ||
![]() |
a79e6a8eea | ||
![]() |
c1f1aed9ca | ||
![]() |
65b0e17b5b | ||
![]() |
6947131b47 | ||
![]() |
914dd53da0 | ||
![]() |
58616ef686 | ||
![]() |
563e0c1e0e | ||
![]() |
437070fd7a | ||
![]() |
baa9cf451c | ||
![]() |
c2918d4519 | ||
![]() |
1efdcd4691 | ||
![]() |
2a43087ed7 | ||
![]() |
5716324934 | ||
![]() |
ae267e0380 | ||
![]() |
3918a2a228 | ||
![]() |
e375fc36d3 | ||
![]() |
f5e29b4651 | ||
![]() |
524d875516 | ||
![]() |
60bdc00ce9 | ||
![]() |
073166190f | ||
![]() |
b80e4d7d70 | ||
![]() |
cc434e27cf | ||
![]() |
8377e04b62 | ||
![]() |
0a47fb9c83 | ||
![]() |
a5d3c850e9 | ||
![]() |
d6391f62be | ||
![]() |
c6f302e448 | ||
![]() |
9706022c21 | ||
![]() |
1d858f4920 | ||
![]() |
e09ba30d46 | ||
![]() |
38ec3d14ed | ||
![]() |
8ee9380cc7 | ||
![]() |
6e74e4c008 | ||
![]() |
5ebc58851b | ||
![]() |
16b09bbfc5 | ||
![]() |
d4b5fc79f4 | ||
![]() |
e51c044ccd | ||
![]() |
d3b1ba81f7 | ||
![]() |
26f55f02c0 | ||
![]() |
8050707ff9 | ||
![]() |
46252030cf | ||
![]() |
681fa835ef | ||
![]() |
d6560eb976 | ||
![]() |
3770b307af | ||
![]() |
0dacbb31be | ||
![]() |
bbdbd756a7 | ||
![]() |
508e38e622 | ||
![]() |
ffe45d0d02 | ||
![]() |
9206d1acf8 | ||
![]() |
da867ef8ef | ||
![]() |
4826201e51 | ||
![]() |
463c97f9e7 | ||
![]() |
3983928c6c | ||
![]() |
15e626027f | ||
![]() |
d46810752e | ||
![]() |
3d10b502a0 | ||
![]() |
433c5cef3b | ||
![]() |
697caf553a | ||
![]() |
1e11359c71 | ||
![]() |
5285431825 | ||
![]() |
7743a572a9 | ||
![]() |
3b974920d3 | ||
![]() |
6bc9792248 | ||
![]() |
da55f6fb10 | ||
![]() |
ffa90a3407 | ||
![]() |
0a13ea3743 | ||
![]() |
0e2e588145 | ||
![]() |
b8c50fee36 | ||
![]() |
8cb0b7c498 | ||
![]() |
699fcdafba | ||
![]() |
b4d5aeb5d0 | ||
![]() |
d067dd643e | ||
![]() |
65a2bf2d18 | ||
![]() |
e826e8184f | ||
![]() |
dacbde7d77 | ||
![]() |
5b0587b672 | ||
![]() |
f0320c0f6d | ||
![]() |
e05c32df25 | ||
![]() |
9c40c32e95 | ||
![]() |
ac60de0360 | ||
![]() |
587047f9d6 | ||
![]() |
e815223047 | ||
![]() |
b6fb5ab950 | ||
![]() |
a0906937c4 | ||
![]() |
07c47df369 | ||
![]() |
85e9a949cc | ||
![]() |
3933fb0664 | ||
![]() |
a885fbdb41 | ||
![]() |
210793eb34 | ||
![]() |
0235c7bce0 | ||
![]() |
4419c0fc6c | ||
![]() |
2f3701693d | ||
![]() |
3bf446cbdb | ||
![]() |
0c67cc13a1 | ||
![]() |
0b80d7b6f4 | ||
![]() |
23c35d4c80 | ||
![]() |
e939c29efa | ||
![]() |
ea0655b4e5 | ||
![]() |
4117ce2e86 | ||
![]() |
dec04386bf | ||
![]() |
b50756785e | ||
![]() |
b9538bdc67 | ||
![]() |
a928281bbe | ||
![]() |
4533d17e27 | ||
![]() |
546df6d001 | ||
![]() |
f14eef62ae | ||
![]() |
ee86770570 | ||
![]() |
385a4e9f6f | ||
![]() |
142cdcffca | ||
![]() |
eb6c753514 | ||
![]() |
c3b62c80fb | ||
![]() |
f77e176a6e | ||
![]() |
3f99dec858 | ||
![]() |
81b0cf55b0 | ||
![]() |
1d5d2dc731 | ||
![]() |
04f5ee0a80 | ||
![]() |
7a02777cfb | ||
![]() |
7257c44d27 | ||
![]() |
cb15602814 | ||
![]() |
0f2c333484 | ||
![]() |
6f2cf2ef85 | ||
![]() |
70a721a47d | ||
![]() |
b32947af98 | ||
![]() |
94b44ec7fe | ||
![]() |
5c8aa71c31 | ||
![]() |
a6c424b7c8 | ||
![]() |
38e40c342d | ||
![]() |
26d390b66e | ||
![]() |
baddafa552 | ||
![]() |
f443d3052b | ||
![]() |
8fc27ff28e | ||
![]() |
3784d759f5 | ||
![]() |
61037f3852 | ||
![]() |
db8aaecdbe | ||
![]() |
15a4541595 | ||
![]() |
50ae8e2335 | ||
![]() |
279df17ba4 | ||
![]() |
f8e6362283 | ||
![]() |
0c44064926 | ||
![]() |
73c437574c | ||
![]() |
69a2182c04 | ||
![]() |
ce80e6cd32 | ||
![]() |
054def09f7 | ||
![]() |
eebe90bd14 | ||
![]() |
6ea280ce60 | ||
![]() |
e992b70f92 | ||
![]() |
0f58bb35ba | ||
![]() |
56abfb6adc | ||
![]() |
8352d61f8d | ||
![]() |
51d585f299 | ||
![]() |
d017a52922 | ||
![]() |
78ec0d1314 | ||
![]() |
c84151e9e8 | ||
![]() |
e8e599cb8c | ||
![]() |
232b9ea239 | ||
![]() |
1c49351e66 | ||
![]() |
34d1f4725d | ||
![]() |
7cd81dcc95 | ||
![]() |
1bdd3d88de | ||
![]() |
d105552fa9 | ||
![]() |
b5af35bd6c | ||
![]() |
7d46487491 | ||
![]() |
38a599011e | ||
![]() |
e59e2fc8d7 | ||
![]() |
b9ce405ada | ||
![]() |
d7df423deb | ||
![]() |
99eea99e93 | ||
![]() |
63d82ce03e | ||
![]() |
13a2c1ecd9 | ||
![]() |
627ab4ee81 | ||
![]() |
54f45539be | ||
![]() |
53297205c8 | ||
![]() |
0f09fdfcce | ||
![]() |
24db0fdb86 | ||
![]() |
7349234638 | ||
![]() |
c691f2a559 | ||
![]() |
110cd32dc3 | ||
![]() |
26d8dc0ec6 | ||
![]() |
fd41bda828 | ||
![]() |
1e3868bb70 | ||
![]() |
ece6c644cf | ||
![]() |
6a5bd5a014 | ||
![]() |
664334f1ad | ||
![]() |
e5e28747d4 | ||
![]() |
c7956d95ae | ||
![]() |
5ce6abdbb6 | ||
![]() |
fad0185c26 | ||
![]() |
86faf32709 | ||
![]() |
19f413796d | ||
![]() |
8f94b4d63f | ||
![]() |
db263f84af | ||
![]() |
747810b729 | ||
![]() |
d6768f15a1 | ||
![]() |
6c75957578 | ||
![]() |
3a8307acfe | ||
![]() |
f20c7d42ee | ||
![]() |
9419fbff94 | ||
![]() |
3ac6c03637 | ||
![]() |
a95274f1b3 | ||
![]() |
9d2fb87cec | ||
![]() |
ce9c3565b6 | ||
![]() |
b0ec58ed1b | ||
![]() |
893a5f8dd3 | ||
![]() |
98064f6a90 | ||
![]() |
5146f89354 | ||
![]() |
fb46592d48 | ||
![]() |
b4fb5ac681 | ||
![]() |
4b7201dc59 | ||
![]() |
3a5a4e4c27 | ||
![]() |
70104a9280 | ||
![]() |
efbc7b17a1 | ||
![]() |
64c5e20fc4 | ||
![]() |
13498afa97 | ||
![]() |
f6375f1bd6 | ||
![]() |
8fd1599173 | ||
![]() |
63302b73b0 | ||
![]() |
f591f67a2a | ||
![]() |
cda3184a55 | ||
![]() |
afc811e975 | ||
![]() |
2e169dcb42 | ||
![]() |
34e24e184f | ||
![]() |
2e4751ed7d | ||
![]() |
8c82c467d4 | ||
![]() |
f3f6771534 | ||
![]() |
0a75a4dcbc | ||
![]() |
1a4542fc4e | ||
![]() |
7e0525749e | ||
![]() |
b33b26018d | ||
![]() |
66c93e7176 | ||
![]() |
5674d32bad | ||
![]() |
7a84972770 | ||
![]() |
638f0f5371 | ||
![]() |
dca1b6f1d3 | ||
![]() |
2b0ee109d6 | ||
![]() |
e7430d87d7 | ||
![]() |
9751c1de79 | ||
![]() |
c497167b64 | ||
![]() |
7fb2aca88b | ||
![]() |
0d544845b1 | ||
![]() |
602eb472f9 | ||
![]() |
f22fa46bdb | ||
![]() |
4171a28260 | ||
![]() |
55365a631a | ||
![]() |
547415b30b | ||
![]() |
cbf79f1fab | ||
![]() |
31cc1dce82 | ||
![]() |
8a11e6c845 | ||
![]() |
2df4f80aa5 | ||
![]() |
68566ee9e1 | ||
![]() |
fe04b7ec59 | ||
![]() |
38f96d7ddd | ||
![]() |
2b2edd6e98 | ||
![]() |
361969aca2 | ||
![]() |
e61e7f41f2 | ||
![]() |
75150fd149 | ||
![]() |
bd1c8be1e1 | ||
![]() |
f167197640 | ||
![]() |
f084ecc007 | ||
![]() |
65becbd0ae | ||
![]() |
f38e28a4d9 | ||
![]() |
2998cd94ff | ||
![]() |
79e2f3e8ab | ||
![]() |
13291f52f2 | ||
![]() |
4baa80c3de | ||
![]() |
be28a6b012 | ||
![]() |
d94ada6216 | ||
![]() |
b2d7743e06 | ||
![]() |
40324beb72 | ||
![]() |
c02f6913b3 | ||
![]() |
d56af22d5e | ||
![]() |
1795103086 | ||
![]() |
02e1689dd1 | ||
![]() |
ab4d96331f | ||
![]() |
cb881cba28 | ||
![]() |
44b247f397 | ||
![]() |
8bb43daf91 | ||
![]() |
a7e65613d6 | ||
![]() |
3c04c71401 | ||
![]() |
1353d52bd1 | ||
![]() |
7701457791 | ||
![]() |
b7820bc6a6 | ||
![]() |
df66102de0 | ||
![]() |
4b308d0de1 | ||
![]() |
4448ba886b | ||
![]() |
f39006be01 | ||
![]() |
e5204eef8a | ||
![]() |
1f07d47fd6 | ||
![]() |
ba352abf0b | ||
![]() |
2bf440a744 | ||
![]() |
3b26136636 | ||
![]() |
8249f042c0 | ||
![]() |
84bbaeee5f | ||
![]() |
b7620b7adf | ||
![]() |
5a80be9fd4 | ||
![]() |
a733886803 | ||
![]() |
834fd29fab | ||
![]() |
fd1caf8aa6 | ||
![]() |
975c9e8061 | ||
![]() |
0b3c5885ec | ||
![]() |
711b63e2d0 | ||
![]() |
c7b833b5eb | ||
![]() |
fd472b3084 | ||
![]() |
dcbb6a2160 | ||
![]() |
56fa1550d2 | ||
![]() |
e1f97860ee | ||
![]() |
6ab3fe18d9 | ||
![]() |
7969f3dfd7 | ||
![]() |
6f05b90e4e | ||
![]() |
3aa53d99d7 | ||
![]() |
3525f5a02f | ||
![]() |
04514a9f5c | ||
![]() |
1c915ef4cd | ||
![]() |
b03a2c5c5f | ||
![]() |
64988b285e | ||
![]() |
5c69dca7b3 | ||
![]() |
dfda7dc748 | ||
![]() |
cb7710c23f | ||
![]() |
f9b12a2eb2 | ||
![]() |
6a7617faad | ||
![]() |
a94e6c5303 | ||
![]() |
a3209c4bde | ||
![]() |
09bba96940 | ||
![]() |
6cab017042 | ||
![]() |
d08343d040 | ||
![]() |
8ab0ed5047 | ||
![]() |
1382a7b36e | ||
![]() |
47491ca55b | ||
![]() |
261bda82db | ||
![]() |
f751b0e6fc |
13
.dockerignore
Normal file
13
.dockerignore
Normal file
@@ -0,0 +1,13 @@
|
||||
# General files
|
||||
.git
|
||||
.github
|
||||
|
||||
# Test related files
|
||||
.tox
|
||||
|
||||
# Temporary files
|
||||
**/__pycache__
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
ENV/
|
29
.github/ISSUE_TEMPLATE.md
vendored
Normal file
29
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with a Add-on, make a issue on there repository.
|
||||
-->
|
||||
|
||||
**Home Assistant release with the issue:**
|
||||
<!--
|
||||
- Frontend -> Developer tools -> Info
|
||||
- Or use this command: hass --version
|
||||
-->
|
||||
|
||||
**Operating environment (HassOS/Generic):**
|
||||
<!--
|
||||
Please provide details about your environment.
|
||||
-->
|
||||
|
||||
**Supervisor logs:**
|
||||
<!--
|
||||
- Frontend -> Hass.io -> System
|
||||
- Or use this command: hassio su logs
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
16
.github/main.workflow
vendored
Normal file
16
.github/main.workflow
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
workflow "tox" {
|
||||
on = "push"
|
||||
resolves = [
|
||||
"Python 3.7",
|
||||
"Json Files",
|
||||
]
|
||||
}
|
||||
|
||||
action "Python 3.7" {
|
||||
uses = "home-assistant/actions/py37-tox@master"
|
||||
}
|
||||
|
||||
action "Json Files" {
|
||||
uses = "home-assistant/actions/jq@master"
|
||||
args = "**/*.json"
|
||||
}
|
13
.github/move.yml
vendored
Normal file
13
.github/move.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Configuration for move-issues - https://github.com/dessant/move-issues
|
||||
|
||||
# Delete the command comment. Ignored when the comment also contains other content
|
||||
deleteCommand: true
|
||||
# Close the source issue after moving
|
||||
closeSourceIssue: true
|
||||
# Lock the source issue after moving
|
||||
lockSourceIssue: false
|
||||
# Set custom aliases for targets
|
||||
# aliases:
|
||||
# r: repo
|
||||
# or: owner/repo
|
||||
|
4
.github/release-drafter.yml
vendored
Normal file
4
.github/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
$CHANGES
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -90,3 +90,6 @@ ENV/
|
||||
|
||||
# pylint
|
||||
.pylint.d/
|
||||
|
||||
# VS Code
|
||||
.vscode/
|
||||
|
1
.gitmodules
vendored
1
.gitmodules
vendored
@@ -1,3 +1,4 @@
|
||||
[submodule "home-assistant-polymer"]
|
||||
path = home-assistant-polymer
|
||||
url = https://github.com/home-assistant/home-assistant-polymer
|
||||
branch = dev
|
||||
|
12
.travis.yml
12
.travis.yml
@@ -1,12 +0,0 @@
|
||||
sudo: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.6"
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
install: pip install -U tox
|
||||
language: python
|
||||
script: tox
|
498
API.md
498
API.md
@@ -1,10 +1,10 @@
|
||||
# Hass.io Server
|
||||
# Hass.io
|
||||
|
||||
## Hass.io RESTful API
|
||||
|
||||
Interface for Home Assistant to control things from supervisor.
|
||||
|
||||
On error:
|
||||
On error / Code 400:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -13,7 +13,7 @@ On error:
|
||||
}
|
||||
```
|
||||
|
||||
On success:
|
||||
On success / Code 200:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -22,9 +22,14 @@ On success:
|
||||
}
|
||||
```
|
||||
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with environment `HASSIO_TOKEN`.
|
||||
|
||||
### Hass.io
|
||||
|
||||
- GET `/supervisor/ping`
|
||||
|
||||
This API call don't need a token.
|
||||
|
||||
- GET `/supervisor/info`
|
||||
|
||||
The addons from `addons` are only installed one.
|
||||
@@ -34,8 +39,9 @@ The addons from `addons` are only installed one.
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"beta_channel": "true|false",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
@@ -44,6 +50,7 @@ The addons from `addons` are only installed one.
|
||||
"repository": "12345678|null",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"state": "started|stopped",
|
||||
}
|
||||
@@ -68,8 +75,9 @@ Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"beta_channel": "true|false",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
@@ -84,44 +92,20 @@ Reload addons/version.
|
||||
|
||||
Output is the raw docker log.
|
||||
|
||||
### Security
|
||||
|
||||
- GET `/security/info`
|
||||
|
||||
- GET `/supervisor/stats`
|
||||
```json
|
||||
{
|
||||
"initialize": "bool",
|
||||
"totp": "bool"
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/security/totp`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "xy"
|
||||
}
|
||||
```
|
||||
|
||||
Return QR-Code
|
||||
|
||||
- POST `/security/session`
|
||||
```json
|
||||
{
|
||||
"password": "xy",
|
||||
"totp": "null|123456"
|
||||
}
|
||||
```
|
||||
|
||||
### Backup/Snapshot
|
||||
### Snapshot
|
||||
|
||||
- GET `/snapshots`
|
||||
|
||||
@@ -131,7 +115,9 @@ Return QR-Code
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"date": "ISO",
|
||||
"name": "Custom name"
|
||||
"name": "Custom name",
|
||||
"type": "full|partial",
|
||||
"protected": "bool"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -139,11 +125,28 @@ Return QR-Code
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- POST `/snapshots/new/upload`
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional"
|
||||
"name": "Optional",
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
@@ -153,7 +156,15 @@ Return QR-Code
|
||||
{
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
@@ -168,15 +179,14 @@ Return QR-Code
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"homeassistant": {
|
||||
"version": "INSTALLED_HASS_VERSION",
|
||||
"devices": []
|
||||
},
|
||||
"protected": "bool",
|
||||
"homeassistant": "version",
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION"
|
||||
"version": "INSTALLED_VERSION",
|
||||
"size": "SIZE_IN_MB"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
@@ -185,36 +195,47 @@ Return QR-Code
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
|
||||
- GET `/snapshots/{slug}/download`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
- POST `/host/shutdown`
|
||||
|
||||
- POST `/host/reboot`
|
||||
|
||||
- GET `/host/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "",
|
||||
"version": "",
|
||||
"last_version": "",
|
||||
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
|
||||
"hostname": "",
|
||||
"os": "",
|
||||
"audio": {
|
||||
"input": "0,0",
|
||||
"output": "0,0"
|
||||
}
|
||||
"hostname": "hostname|null",
|
||||
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
||||
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
||||
"kernel": "4.15.7|null",
|
||||
"chassis": "specific|null",
|
||||
"deployment": "stable|beta|dev|null",
|
||||
"cpe": "xy|null",
|
||||
}
|
||||
```
|
||||
|
||||
@@ -222,54 +243,98 @@ Return QR-Code
|
||||
|
||||
```json
|
||||
{
|
||||
"audio_input": "0,0",
|
||||
"audio_output": "0,0"
|
||||
"hostname": "",
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/update`
|
||||
- POST `/host/reload`
|
||||
|
||||
Optional:
|
||||
#### Services
|
||||
|
||||
- GET `/host/services`
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
"services": [
|
||||
{
|
||||
"name": "xy.service",
|
||||
"description": "XY ...",
|
||||
"state": "active|"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/host/hardware`
|
||||
- POST `/host/service/{unit}/stop`
|
||||
|
||||
- POST `/host/service/{unit}/start`
|
||||
|
||||
- POST `/host/service/{unit}/reload`
|
||||
|
||||
### HassOS
|
||||
|
||||
- GET `/hassos/info`
|
||||
```json
|
||||
{
|
||||
"version": "2.3",
|
||||
"version_cli": "7",
|
||||
"version_latest": "2.4",
|
||||
"version_cli_latest": "8",
|
||||
"board": "ova|rpi"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update`
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update/cli`
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/config/sync`
|
||||
|
||||
Load host configs from a USB stick.
|
||||
|
||||
### Hardware
|
||||
|
||||
- GET `/hardware/info`
|
||||
```json
|
||||
{
|
||||
"serial": ["/dev/xy"],
|
||||
"input": ["Input device name"],
|
||||
"disk": ["/dev/sdax"],
|
||||
"gpio": ["gpiochip0", "gpiochip100"],
|
||||
"audio": {
|
||||
"CARD_ID": {
|
||||
"name": "xy",
|
||||
"type": "microphone",
|
||||
"devices": {
|
||||
"DEV_ID": "type of device"
|
||||
}
|
||||
"devices": [
|
||||
"chan_id": "channel ID",
|
||||
"chan_type": "type of device"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Network
|
||||
|
||||
- GET `/network/info`
|
||||
|
||||
- GET `/hardware/audio`
|
||||
```json
|
||||
{
|
||||
"hostname": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/network/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": "",
|
||||
"audio": {
|
||||
"input": {
|
||||
"0,0": "Mic"
|
||||
},
|
||||
"output": {
|
||||
"1,0": "Jack",
|
||||
"1,1": "HDMI"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -281,9 +346,15 @@ Optional:
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"devices": [""],
|
||||
"arch": "arch",
|
||||
"machine": "Image machine type",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image"
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
"port": 8123,
|
||||
"ssl": "bool",
|
||||
"watchdog": "bool",
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
@@ -302,21 +373,53 @@ Optional:
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/homeassistant/restart`
|
||||
- POST `/homeassistant/options`
|
||||
- POST `/homeassistant/check`
|
||||
- POST `/homeassistant/start`
|
||||
- POST `/homeassistant/stop`
|
||||
- POST `/homeassistant/rebuild`
|
||||
|
||||
- POST `/homeassistant/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"devices": [],
|
||||
"image": "Optional|null",
|
||||
"last_version": "Optional for custom image|null"
|
||||
"last_version": "Optional for custom image|null",
|
||||
"port": "port for access hass",
|
||||
"ssl": "bool",
|
||||
"password": "",
|
||||
"refresh_token": "",
|
||||
"watchdog": "bool",
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
Image with `null` and last_version with `null` reset this options.
|
||||
|
||||
- POST/GET `/homeassistant/api`
|
||||
|
||||
Proxy to real home-assistant instance.
|
||||
|
||||
- GET `/homeassistant/websocket`
|
||||
|
||||
Proxy to real websocket instance.
|
||||
|
||||
- GET `/homeassistant/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### RESTful for API addons
|
||||
|
||||
If an add-on will call itself, you can use `/addons/self/...`.
|
||||
|
||||
- GET `/addons`
|
||||
|
||||
Get all available addons.
|
||||
@@ -328,18 +431,15 @@ Get all available addons.
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"repository": "core|local|REP_ID",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"available": "bool",
|
||||
"build": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"devices": ["/dev/xy"],
|
||||
"url": "null|url",
|
||||
"logo": "bool",
|
||||
"audio": "bool",
|
||||
"hassio_api": "bool"
|
||||
"icon": "bool",
|
||||
"logo": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
@@ -360,10 +460,15 @@ Get all available addons.
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xdssd_xybla",
|
||||
"description": "description",
|
||||
"long_description": "null|markdown",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
"available": "bool",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"machine": "[raspberrypi2, tinker]",
|
||||
"repository": "12345678|null",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
@@ -373,19 +478,43 @@ Get all available addons.
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"host_network": "bool",
|
||||
"host_pid": "bool",
|
||||
"host_ipc": "bool",
|
||||
"host_dbus": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"apparmor": "disable|default|profile",
|
||||
"devices": ["/dev/xy"],
|
||||
"auto_uart": "bool",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"changelog": "bool",
|
||||
"hassio_api": "bool",
|
||||
"hassio_role": "default|homeassistant|manager|admin",
|
||||
"homeassistant_api": "bool",
|
||||
"auth_api": "bool",
|
||||
"full_access": "bool",
|
||||
"protected": "bool",
|
||||
"rating": "1-6",
|
||||
"stdin": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"gpio": "bool",
|
||||
"kernel_modules": "bool",
|
||||
"devicetree": "bool",
|
||||
"docker_api": "bool",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0"
|
||||
"audio_output": "null|0,0",
|
||||
"services_role": "['service:access']",
|
||||
"discovery": "['service']"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/icon`
|
||||
|
||||
- GET `/addons/{addon}/logo`
|
||||
|
||||
- GET `/addons/{addon}/changelog`
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
|
||||
```json
|
||||
@@ -401,7 +530,17 @@ Get all available addons.
|
||||
}
|
||||
```
|
||||
|
||||
For reset custom network/audio settings, set it `null`.
|
||||
Reset custom network/audio/options, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/security`
|
||||
|
||||
This function is not callable by itself.
|
||||
|
||||
```json
|
||||
{
|
||||
"protected": "bool",
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
@@ -409,71 +548,148 @@ For reset custom network/audio settings, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/install`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/{addon}/uninstall`
|
||||
|
||||
- POST `/addons/{addon}/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/logs`
|
||||
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/addons/{addon}/restart`
|
||||
|
||||
## Host Control
|
||||
- POST `/addons/{addon}/rebuild`
|
||||
|
||||
Communicate over UNIX socket with a host daemon.
|
||||
Only supported for local build addons
|
||||
|
||||
- commands
|
||||
- POST `/addons/{addon}/stdin`
|
||||
|
||||
```
|
||||
# info
|
||||
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
||||
# reboot
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
Write data to add-on stdin
|
||||
|
||||
# hostname xy
|
||||
|
||||
# network info
|
||||
-> {}
|
||||
# network wlan ssd xy
|
||||
# network wlan password xy
|
||||
# network int ip xy
|
||||
# network int netmask xy
|
||||
# network int route xy
|
||||
- GET `/addons/{addon}/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
Features:
|
||||
### discovery
|
||||
|
||||
- shutdown
|
||||
- reboot
|
||||
- update
|
||||
- hostname
|
||||
- network_info
|
||||
- network_control
|
||||
|
||||
Answer:
|
||||
```
|
||||
{}|OK|ERROR|WRONG
|
||||
- GET `/discovery`
|
||||
```json
|
||||
{
|
||||
"discovery": [
|
||||
{
|
||||
"addon": "slug",
|
||||
"service": "name",
|
||||
"uuid": "uuid",
|
||||
"config": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- {}: json
|
||||
- OK: call was successfully
|
||||
- ERROR: error on call
|
||||
- WRONG: not supported
|
||||
- GET `/discovery/{UUID}`
|
||||
```json
|
||||
{
|
||||
"addon": "slug",
|
||||
"service": "name",
|
||||
"uuid": "uuid",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/discovery`
|
||||
```json
|
||||
{
|
||||
"service": "name",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"uuid": "uuid"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/discovery/{UUID}`
|
||||
|
||||
### Services
|
||||
|
||||
- GET `/services`
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"slug": "name",
|
||||
"available": "bool",
|
||||
"providers": "list"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### MQTT
|
||||
|
||||
- GET `/services/mqtt`
|
||||
```json
|
||||
{
|
||||
"addon": "name",
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/mqtt`
|
||||
```json
|
||||
{
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool|optional",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/mqtt`
|
||||
|
||||
### Misc
|
||||
|
||||
- GET `/info`
|
||||
```json
|
||||
{
|
||||
"supervisor": "version",
|
||||
"homeassistant": "version",
|
||||
"hassos": "null|version",
|
||||
"hostname": "name",
|
||||
"machine": "type",
|
||||
"arch": "arch",
|
||||
"supported_arch": ["arch1", "arch2"],
|
||||
"channel": "stable|beta|dev"
|
||||
}
|
||||
```
|
||||
|
||||
### Auth / SSO API
|
||||
|
||||
You can use the user system on homeassistant. We handle this auth system on
|
||||
supervisor.
|
||||
|
||||
You can call post `/auth`
|
||||
|
||||
We support:
|
||||
- Json `{ "user|name": "...", "password": "..." }`
|
||||
- application/x-www-form-urlencoded `user|name=...&password=...`
|
||||
- BasicAuth
|
||||
|
33
Dockerfile
Normal file
33
Dockerfile
Normal file
@@ -0,0 +1,33 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
libffi \
|
||||
musl \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
libstdc++ \
|
||||
eudev-libs
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt /usr/src/
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
g++ \
|
||||
openssl-dev \
|
||||
libffi-dev \
|
||||
musl-dev \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -f /usr/src/requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . /usr/src/hassio
|
||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||
&& rm -rf /usr/src/hassio
|
||||
|
||||
CMD [ "python3", "-m", "hassio" ]
|
26
README.md
26
README.md
@@ -1,14 +1,28 @@
|
||||
# Hass.io
|
||||
|
||||
### First private cloud solution for home automation.
|
||||
## First private cloud solution for home automation
|
||||
|
||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||

|
||||
|
||||
- [Hass.io Addons](https://github.com/home-assistant/hassio-addons)
|
||||
- [Hass.io Build](https://github.com/home-assistant/hassio-build)
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-build/tree/master/builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus on updater.json
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||
|
@@ -1 +1 @@
|
||||
"""Init file for HassIO."""
|
||||
"""Init file for Hass.io."""
|
||||
|
@@ -1,45 +1,59 @@
|
||||
"""Main file for HassIO."""
|
||||
"""Main file for Hass.io."""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import hassio.bootstrap as bootstrap
|
||||
import hassio.core as core
|
||||
from hassio import bootstrap
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_event_loop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
import uvloop
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return asyncio.get_event_loop()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
|
||||
if not bootstrap.check_environment():
|
||||
exit(1)
|
||||
# Init async event loop
|
||||
loop = initialize_event_loop()
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
# Check if all information are available to setup Hass.io
|
||||
if not bootstrap.check_environment():
|
||||
sys.exit(1)
|
||||
|
||||
# init executor pool
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hassio setup")
|
||||
config = bootstrap.initialize_system_data()
|
||||
hassio = core.HassIO(loop, config)
|
||||
_LOGGER.info("Initialize Hass.io setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
|
||||
bootstrap.migrate_system_env(config)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
_LOGGER.info("Run Hassio setup")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
_LOGGER.info("Setup HassIO")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
_LOGGER.info("Start Hassio")
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
|
||||
_LOGGER.info("Run Hassio loop")
|
||||
try:
|
||||
_LOGGER.info("Run Hass.io")
|
||||
loop.run_forever()
|
||||
|
||||
_LOGGER.info("Cleanup system")
|
||||
finally:
|
||||
_LOGGER.info("Stopping Hass.io")
|
||||
loop.run_until_complete(coresys.core.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hassio")
|
||||
sys.exit(hassio.exit_code)
|
||||
_LOGGER.info("Close Hass.io")
|
||||
sys.exit(0)
|
||||
|
@@ -1,60 +1,72 @@
|
||||
"""Init file for HassIO addons."""
|
||||
"""Init file for Hass.io add-ons."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import Data
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||
from .data import AddonsData
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
|
||||
|
||||
class AddonManager(object):
|
||||
"""Manage addons inside HassIO."""
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Hass.io."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.dock = dock
|
||||
self.data = Data(config)
|
||||
self.addons = {}
|
||||
self.repositories = {}
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.data = AddonsData(coresys)
|
||||
self.addons_obj = {}
|
||||
self.repositories_obj = {}
|
||||
|
||||
@property
|
||||
def list_addons(self):
|
||||
"""Return a list of all addons."""
|
||||
return list(self.addons.values())
|
||||
"""Return a list of all add-ons."""
|
||||
return list(self.addons_obj.values())
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed add-ons."""
|
||||
return [addon for addon in self.addons_obj.values()
|
||||
if addon.is_installed]
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of addon repositories."""
|
||||
return list(self.repositories.values())
|
||||
"""Return list of add-on repositories."""
|
||||
return list(self.repositories_obj.values())
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return a adddon from slug."""
|
||||
return self.addons.get(addon_slug)
|
||||
"""Return an add-on from slug."""
|
||||
return self.addons_obj.get(addon_slug)
|
||||
|
||||
async def prepare(self):
|
||||
"""Startup addon management."""
|
||||
def from_token(self, token):
|
||||
"""Return an add-on from Hass.io token."""
|
||||
for addon in self.list_addons:
|
||||
if addon.is_installed and token == addon.hassio_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self):
|
||||
"""Start up add-on management."""
|
||||
self.data.reload()
|
||||
|
||||
# init hassio built-in repositories
|
||||
# Init Hass.io built-in repositories
|
||||
repositories = \
|
||||
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# init custom repositories & load addons
|
||||
# Init custom repositories and load add-ons
|
||||
await self.load_repositories(repositories)
|
||||
|
||||
async def reload(self):
|
||||
"""Update addons from repo and reload list."""
|
||||
"""Update add-ons from repository and reload list."""
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories.values()]
|
||||
self.repositories_obj.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
@@ -65,69 +77,82 @@ class AddonManager(object):
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories)
|
||||
old_rep = set(self.repositories_obj)
|
||||
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.config, self.loop, self.data, url)
|
||||
repository = Repository(self.coresys, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories[url] = repository
|
||||
self.repositories_obj[url] = repository
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.config.add_addon_repository(url)
|
||||
self.sys_config.add_addon_repository(url)
|
||||
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories.pop(url).remove()
|
||||
self.config.drop_addon_repository(url)
|
||||
self.repositories_obj.pop(url).remove()
|
||||
self.sys_config.drop_addon_repository(url)
|
||||
|
||||
# update data
|
||||
self.data.reload()
|
||||
await self.load_addons()
|
||||
|
||||
async def load_addons(self):
|
||||
"""Update/add internal addon store."""
|
||||
"""Update/add internal add-on store."""
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons)
|
||||
del_addons = set(self.addons) - all_addons
|
||||
add_addons = all_addons - set(self.addons_obj)
|
||||
del_addons = set(self.addons_obj) - all_addons
|
||||
|
||||
_LOGGER.info("Load addons: %d all - %d new - %d remove",
|
||||
_LOGGER.info("Load add-ons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(
|
||||
self.config, self.loop, self.dock, self.data, addon_slug)
|
||||
addon = Addon(self.coresys, addon_slug)
|
||||
|
||||
tasks.append(addon.load())
|
||||
self.addons[addon_slug] = addon
|
||||
self.addons_obj[addon_slug] = addon
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons.pop(addon_slug)
|
||||
self.addons_obj.pop(addon_slug)
|
||||
|
||||
async def auto_boot(self, stage):
|
||||
"""Boot addons with mode auto."""
|
||||
async def boot(self, stage):
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons.values():
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
|
||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||
_LOGGER.info("Startup %s run %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
await asyncio.wait(tasks)
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage):
|
||||
"""Shutdown addons."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and \
|
||||
await addon.state() == STATE_STARTED and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
_LOGGER.info("Shutdown %s stop %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
File diff suppressed because it is too large
Load Diff
82
hassio/addons/build.py
Normal file
82
hassio/addons/build.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""Hass.io add-on build environment."""
|
||||
from __future__ import annotations
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
|
||||
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
|
||||
|
||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Handle build options for add-ons."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str) -> None:
|
||||
"""Initialize Hass.io add-on builder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._id: str = slug
|
||||
|
||||
super().__init__(
|
||||
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
|
||||
@property
|
||||
def addon(self) -> Addon:
|
||||
"""Return add-on of build data."""
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
"""Base images for this add-on."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self.sys_arch.default,
|
||||
f"homeassistant/{self.sys_arch.default}-base:latest")
|
||||
|
||||
@property
|
||||
def squash(self) -> bool:
|
||||
"""Return True or False if squash is active."""
|
||||
return self._data[ATTR_SQUASH]
|
||||
|
||||
@property
|
||||
def additional_args(self) -> Dict[str, str]:
|
||||
"""Return additional Docker build arguments."""
|
||||
return self._data[ATTR_ARGS]
|
||||
|
||||
def get_docker_args(self, version):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
'path': str(self.addon.path_location),
|
||||
'tag': f"{self.addon.image}:{version}",
|
||||
'pull': True,
|
||||
'forcerm': True,
|
||||
'squash': self.squash,
|
||||
'labels': {
|
||||
'io.hass.version': version,
|
||||
'io.hass.arch': self.sys_arch.default,
|
||||
'io.hass.type': META_ADDON,
|
||||
'io.hass.name': self._fix_label('name'),
|
||||
'io.hass.description': self._fix_label('description'),
|
||||
},
|
||||
'buildargs': {
|
||||
'BUILD_FROM': self.base_image,
|
||||
'BUILD_VERSION': version,
|
||||
'BUILD_ARCH': self.sys_arch.default,
|
||||
**self.additional_args,
|
||||
}
|
||||
}
|
||||
|
||||
if self.addon.url:
|
||||
args['labels']['io.hass.url'] = self.addon.url
|
||||
|
||||
return args
|
||||
|
||||
def _fix_label(self, label_name: str) -> str:
|
||||
"""Remove characters they are not supported."""
|
||||
label = getattr(self.addon, label_name, "")
|
||||
return label.replace("'", "")
|
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"local": {
|
||||
"name": "Local Add-Ons",
|
||||
"name": "Local add-ons",
|
||||
"url": "https://home-assistant.io/hassio",
|
||||
"maintainer": "By our self"
|
||||
"maintainer": "you"
|
||||
},
|
||||
"core": {
|
||||
"name": "Built-in Add-Ons",
|
||||
"name": "Official add-ons",
|
||||
"url": "https://home-assistant.io/addons",
|
||||
"maintainer": "Home Assistant authors"
|
||||
"maintainer": "Home Assistant"
|
||||
}
|
||||
}
|
||||
|
@@ -1,81 +1,78 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import copy
|
||||
"""Init file for Hass.io add-on data."""
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import extract_hash_from_path
|
||||
from .validate import (
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG,
|
||||
MAP_VOLUME)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
from ..tools import JsonConfig, read_json_file
|
||||
ATTR_LOCATON,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SLUG,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_USER,
|
||||
FILE_HASSIO_ADDONS,
|
||||
REPOSITORY_CORE,
|
||||
REPOSITORY_LOCAL,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import JsonFileError
|
||||
from ..utils.json import JsonConfig, read_json_file
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
|
||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Hold data for Add-ons inside Hass.io."""
|
||||
|
||||
class Data(JsonConfig):
|
||||
"""Hold data for addons inside HassIO."""
|
||||
|
||||
def __init__(self, config):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
|
||||
self.config = config
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
||||
self.coresys = coresys
|
||||
self._repositories = {}
|
||||
self._cache = {}
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
"""Return local addon user data."""
|
||||
"""Return local add-on user data."""
|
||||
return self._data[ATTR_USER]
|
||||
|
||||
@property
|
||||
def system(self):
|
||||
"""Return local addon data."""
|
||||
"""Return local add-on data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""Return addon data from cache/repositories."""
|
||||
"""Return add-on data from cache/repositories."""
|
||||
return self._cache
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return addon data from repositories."""
|
||||
"""Return add-on data from repositories."""
|
||||
return self._repositories
|
||||
|
||||
def reload(self):
|
||||
"""Read data from addons repository."""
|
||||
"""Read data from add-on repository."""
|
||||
self._cache = {}
|
||||
self._repositories = {}
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_core, REPOSITORY_CORE)
|
||||
self._read_addons_folder(self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
|
||||
# read local repository
|
||||
self._read_addons_folder(
|
||||
self.config.path_addons_local, REPOSITORY_LOCAL)
|
||||
self._read_addons_folder(self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
|
||||
# read custom git repositories
|
||||
for repository_element in self.config.path_addons_git.iterdir():
|
||||
for repository_element in self.sys_config.path_addons_git.iterdir():
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
# update local data
|
||||
self._merge_config()
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
@@ -83,15 +80,12 @@ class Data(JsonConfig):
|
||||
# exists repository json
|
||||
repository_file = Path(path, "repository.json")
|
||||
try:
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(
|
||||
read_json_file(repository_file)
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(read_json_file(repository_file))
|
||||
except JsonFileError:
|
||||
_LOGGER.warning(
|
||||
"Can't read repository information from %s", repository_file
|
||||
)
|
||||
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read repository information from %s",
|
||||
repository_file)
|
||||
return
|
||||
|
||||
except vol.Invalid:
|
||||
_LOGGER.warning("Repository parse error %s", repository_file)
|
||||
return
|
||||
@@ -101,65 +95,42 @@ class Data(JsonConfig):
|
||||
self._read_addons_folder(path, slug)
|
||||
|
||||
def _read_addons_folder(self, path, repository):
|
||||
"""Read data from addons folder."""
|
||||
"""Read data from add-ons folder."""
|
||||
for addon in path.glob("**/config.json"):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read %s from repository %s", addon, repository)
|
||||
continue
|
||||
|
||||
# validate
|
||||
try:
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning(
|
||||
"Can't read %s: %s", addon, humanize_error(addon_config, ex)
|
||||
)
|
||||
continue
|
||||
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(
|
||||
repository, addon_config[ATTR_SLUG])
|
||||
addon_slug = "{}_{}".format(repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s -> %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
|
||||
def _set_builtin_repositories(self):
|
||||
"""Add local built-in repository into dataset."""
|
||||
try:
|
||||
builtin_file = Path(__file__).parent.joinpath('built-in.json')
|
||||
builtin_file = Path(__file__).parent.joinpath("built-in.json")
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read built-in.json -> %s", err)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read built-in json")
|
||||
return
|
||||
|
||||
# core repository
|
||||
self._repositories[REPOSITORY_CORE] = \
|
||||
builtin_data[REPOSITORY_CORE]
|
||||
self._repositories[REPOSITORY_CORE] = builtin_data[REPOSITORY_CORE]
|
||||
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
|
||||
def _merge_config(self):
|
||||
"""Update local config if they have update.
|
||||
|
||||
It need to be the same version as the local version is for merge.
|
||||
"""
|
||||
have_change = False
|
||||
|
||||
for addon in set(self.system):
|
||||
# detached
|
||||
if addon not in self._cache:
|
||||
continue
|
||||
|
||||
cache = self._cache[addon]
|
||||
data = self.system[addon]
|
||||
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
|
||||
if data != cache:
|
||||
self.system[addon] = copy.deepcopy(cache)
|
||||
have_change = True
|
||||
|
||||
if have_change:
|
||||
self.save()
|
||||
self._repositories[REPOSITORY_LOCAL] = builtin_data[REPOSITORY_LOCAL]
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for HassIO addons git."""
|
||||
"""Init file for Hass.io add-on Git."""
|
||||
import asyncio
|
||||
import logging
|
||||
import functools as ft
|
||||
@@ -7,104 +7,149 @@ import shutil
|
||||
|
||||
import git
|
||||
|
||||
from .util import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import RE_REPOSITORY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GitRepo(object):
|
||||
"""Manage addons git repo."""
|
||||
class GitRepo(CoreSysAttributes):
|
||||
"""Manage Add-on Git repository."""
|
||||
|
||||
def __init__(self, config, loop, path, url):
|
||||
"""Initialize git base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
def __init__(self, coresys, path, url):
|
||||
"""Initialize Git base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.repo = None
|
||||
self.path = path
|
||||
self.url = url
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
self._data = RE_REPOSITORY.match(url).groupdict()
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return repository URL."""
|
||||
return self._data[ATTR_URL]
|
||||
|
||||
@property
|
||||
def branch(self):
|
||||
"""Return repository branch."""
|
||||
return self._data[ATTR_BRANCH]
|
||||
|
||||
async def load(self):
|
||||
"""Init git addon repo."""
|
||||
"""Init Git add-on repository."""
|
||||
if not self.path.is_dir():
|
||||
return await self.clone()
|
||||
|
||||
async with self._lock:
|
||||
async with self.lock:
|
||||
try:
|
||||
_LOGGER.info("Load addon %s repository", self.path)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo, str(self.path))
|
||||
_LOGGER.info("Load add-on %s repository", self.path)
|
||||
self.repo = await self.sys_run_in_executor(
|
||||
git.Repo, str(self.path))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
|
||||
self._remove()
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def clone(self):
|
||||
"""Clone git addon repo."""
|
||||
async with self._lock:
|
||||
"""Clone git add-on repository."""
|
||||
async with self.lock:
|
||||
git_args = {
|
||||
attribute: value
|
||||
for attribute, value in (
|
||||
('recursive', True),
|
||||
('branch', self.branch),
|
||||
('depth', 1),
|
||||
('shallow-submodules', True)
|
||||
) if value is not None
|
||||
}
|
||||
|
||||
try:
|
||||
_LOGGER.info("Clone addon %s repository", self.url)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, ft.partial(
|
||||
_LOGGER.info("Clone add-on %s repository", self.url)
|
||||
self.repo = await self.sys_run_in_executor(ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path),
|
||||
recursive=True))
|
||||
**git_args
|
||||
))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
|
||||
_LOGGER.error("Can't clone %s repository: %s.", self.url, err)
|
||||
self._remove()
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def pull(self):
|
||||
"""Pull git addon repo."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.warning("It is already a task in progress.")
|
||||
"""Pull Git add-on repo."""
|
||||
if self.lock.locked():
|
||||
_LOGGER.warning("It is already a task in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
async with self.lock:
|
||||
_LOGGER.info("Update add-on %s repository", self.url)
|
||||
branch = self.repo.active_branch.name
|
||||
|
||||
try:
|
||||
_LOGGER.info("Pull addon %s repository", self.url)
|
||||
await self.loop.run_in_executor(
|
||||
None, self.repo.remotes.origin.pull)
|
||||
# Download data
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.remotes.origin.fetch, **{
|
||||
'update-shallow': True,
|
||||
'depth': 1,
|
||||
}))
|
||||
|
||||
# Jump on top of that
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.git.reset, f"origin/{branch}", hard=True))
|
||||
|
||||
# Cleanup old data
|
||||
await self.sys_run_in_executor(ft.partial(
|
||||
self.repo.git.clean, "-xdf"))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.exc.GitCommandError) as err:
|
||||
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
||||
git.GitCommandError) as err:
|
||||
_LOGGER.error("Can't update %s repo: %s.", self.url, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""HassIO addons repository."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize git hassio addon repository."""
|
||||
super().__init__(
|
||||
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
|
||||
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom addons repository."""
|
||||
|
||||
def __init__(self, config, loop, url):
|
||||
"""Initialize git hassio addon repository."""
|
||||
path = Path(config.path_addons_git, get_hash_from_repository(url))
|
||||
|
||||
super().__init__(config, loop, path, url)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a custom addon."""
|
||||
if self.path.is_dir():
|
||||
_LOGGER.info("Remove custom addon repository %s", self.url)
|
||||
def _remove(self):
|
||||
"""Remove a repository."""
|
||||
if not self.path.is_dir():
|
||||
return
|
||||
|
||||
def log_err(funct, path, _):
|
||||
"""Log error."""
|
||||
_LOGGER.warning("Can't remove %s", path)
|
||||
|
||||
shutil.rmtree(str(self.path), onerror=log_err)
|
||||
|
||||
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""Hass.io add-ons repository."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Git Hass.io add-on repository."""
|
||||
super().__init__(
|
||||
coresys, coresys.config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
|
||||
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom add-ons repository."""
|
||||
|
||||
def __init__(self, coresys, url):
|
||||
"""Initialize custom Git Hass.io addo-n repository."""
|
||||
path = Path(
|
||||
coresys.config.path_addons_git,
|
||||
get_hash_from_repository(url))
|
||||
|
||||
super().__init__(coresys, path, url)
|
||||
|
||||
def remove(self):
|
||||
"""Remove a custom repository."""
|
||||
_LOGGER.info("Remove custom add-on repository %s", self.url)
|
||||
self._remove()
|
||||
|
@@ -1,18 +1,20 @@
|
||||
"""Represent a HassIO repository."""
|
||||
"""Represent a Hass.io repository."""
|
||||
from .git import GitRepoHassIO, GitRepoCustom
|
||||
from .util import get_hash_from_repository
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import (
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
|
||||
class Repository(object):
|
||||
"""Repository in HassIO."""
|
||||
class Repository(CoreSysAttributes):
|
||||
"""Repository in Hass.io."""
|
||||
|
||||
def __init__(self, config, loop, data, repository):
|
||||
def __init__(self, coresys, repository):
|
||||
"""Initialize repository object."""
|
||||
self.data = data
|
||||
self.coresys = coresys
|
||||
self.source = None
|
||||
self.git = None
|
||||
|
||||
@@ -20,16 +22,16 @@ class Repository(object):
|
||||
self._id = repository
|
||||
elif repository == REPOSITORY_CORE:
|
||||
self._id = repository
|
||||
self.git = GitRepoHassIO(config, loop)
|
||||
self.git = GitRepoHassIO(coresys)
|
||||
else:
|
||||
self._id = get_hash_from_repository(repository)
|
||||
self.git = GitRepoCustom(config, loop, repository)
|
||||
self.git = GitRepoCustom(coresys, repository)
|
||||
self.source = repository
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return data struct repository."""
|
||||
return self.data.repositories.get(self._id, {})
|
||||
return self.sys_addons.data.repositories.get(self._id, {})
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
@@ -43,7 +45,7 @@ class Repository(object):
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of repository."""
|
||||
"""Return URL of repository."""
|
||||
return self._mesh.get(ATTR_URL, self.source)
|
||||
|
||||
@property
|
||||
@@ -58,14 +60,14 @@ class Repository(object):
|
||||
return True
|
||||
|
||||
async def update(self):
|
||||
"""Update addon repository."""
|
||||
"""Update add-on repository."""
|
||||
if self.git:
|
||||
return await self.git.pull()
|
||||
return True
|
||||
|
||||
def remove(self):
|
||||
"""Remove addon repository."""
|
||||
"""Remove add-on repository."""
|
||||
if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL):
|
||||
raise RuntimeError("Can't remove built-in repositories!")
|
||||
raise APIError("Can't remove built-in repositories!")
|
||||
|
||||
self.git.remove()
|
||||
|
@@ -1,35 +0,0 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_hash_from_repository(name):
|
||||
"""Generate a hash from repository."""
|
||||
key = name.lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def extract_hash_from_path(path):
|
||||
"""Extract repo id from path."""
|
||||
repo_dir = path.parts[-1]
|
||||
|
||||
if not RE_SHA1.match(repo_dir):
|
||||
return get_hash_from_repository(repo_dir)
|
||||
return repo_dir
|
||||
|
||||
|
||||
def check_installed(method):
|
||||
"""Wrap function with check if addon is installed."""
|
||||
async def wrap_check(addon, *args, **kwargs):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
||||
return False
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
130
hassio/addons/utils.py
Normal file
130
hassio/addons/utils.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Util add-ons functions."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import (
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
ROLE_ADMIN,
|
||||
ROLE_MANAGER,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rating_security(addon: Addon) -> int:
|
||||
"""Return 1-6 for security rating.
|
||||
|
||||
1 = not secure
|
||||
6 = high secure
|
||||
"""
|
||||
rating = 5
|
||||
|
||||
# AppArmor
|
||||
if addon.apparmor == SECURITY_DISABLE:
|
||||
rating += -1
|
||||
elif addon.apparmor == SECURITY_PROFILE:
|
||||
rating += 1
|
||||
|
||||
# Home Assistant Login
|
||||
if addon.access_auth_api:
|
||||
rating += 1
|
||||
|
||||
# Privileged options
|
||||
if any(
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
)
|
||||
):
|
||||
rating += -1
|
||||
|
||||
# API Hass.io role
|
||||
if addon.hassio_role == ROLE_MANAGER:
|
||||
rating += -1
|
||||
elif addon.hassio_role == ROLE_ADMIN:
|
||||
rating += -2
|
||||
|
||||
# Not secure Networking
|
||||
if addon.host_network:
|
||||
rating += -1
|
||||
|
||||
# Insecure PID namespace
|
||||
if addon.host_pid:
|
||||
rating += -2
|
||||
|
||||
# Full Access
|
||||
if addon.with_full_access:
|
||||
rating += -2
|
||||
|
||||
# Docker Access
|
||||
if addon.access_docker_api:
|
||||
rating = 1
|
||||
|
||||
return max(min(6, rating), 1)
|
||||
|
||||
|
||||
def get_hash_from_repository(name: str) -> str:
|
||||
"""Generate a hash from repository."""
|
||||
key = name.lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def extract_hash_from_path(path: Path) -> str:
|
||||
"""Extract repo id from path."""
|
||||
repository_dir = path.parts[-1]
|
||||
|
||||
if not RE_SHA1.match(repository_dir):
|
||||
return get_hash_from_repository(repository_dir)
|
||||
return repository_dir
|
||||
|
||||
|
||||
def check_installed(method):
|
||||
"""Wrap function with check if add-on is installed."""
|
||||
|
||||
async def wrap_check(addon, *args, **kwargs):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
||||
return False
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
||||
|
||||
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
else:
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
@@ -1,21 +1,35 @@
|
||||
"""Validate addons options schema."""
|
||||
"""Validate add-ons options schema."""
|
||||
import logging
|
||||
import re
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE,
|
||||
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE,
|
||||
BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER,
|
||||
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||
ARCH_ALL, ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_ARGS,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART, ATTR_AUTO_UPDATE, ATTR_BOOT, ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
|
||||
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
|
||||
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE,
|
||||
ATTR_MAINTAINER, ATTR_MAP, ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS,
|
||||
ATTR_PORTS, ATTR_PRIVILEGED, ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA,
|
||||
ATTR_SERVICES, ATTR_SLUG, ATTR_SQUASH, ATTR_STARTUP, ATTR_STATE,
|
||||
ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT, ATTR_TMPFS, ATTR_URL, ATTR_USER,
|
||||
ATTR_UUID, ATTR_VERSION, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
|
||||
PRIVILEGED_ALL, ROLE_ALL, ROLE_DEFAULT, STARTUP_ALL, STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES, STATE_STARTED, STATE_STOPPED)
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..validate import (
|
||||
ALSA_DEVICE, DOCKER_PORTS, NETWORK_PORT, SHA256, UUID_MATCH)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
@@ -24,22 +38,29 @@ V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
V_PORT = 'port'
|
||||
V_MATCH = 'match'
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL, V_PORT])
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|str|bool|email|url|port"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
ARCH_ALL = [
|
||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
||||
]
|
||||
RE_DOCKER_IMAGE = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
|
||||
|
||||
STARTUP_ALL = [
|
||||
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION
|
||||
]
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
|
||||
PRIVILEGED_ALL = [
|
||||
"NET_ADMIN",
|
||||
"SYS_ADMIN",
|
||||
"SYS_RAWIO"
|
||||
|
||||
MACHINE_ALL = [
|
||||
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
|
||||
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
|
||||
'raspberrypi3', 'raspberrypi3-64', 'tinker',
|
||||
]
|
||||
|
||||
|
||||
@@ -58,34 +79,58 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS):
|
||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
||||
], vol.Schema({vol.Coerce(str): ADDON_ELEMENT}))
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
], vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||
}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||
vol.Optional(ATTR_IMAGE):
|
||||
vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
@@ -93,20 +138,36 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
|
||||
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
|
||||
}),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
||||
vol.Coerce(str): vol.Coerce(str)
|
||||
}),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
})
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
@@ -115,11 +176,11 @@ SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default={}): {
|
||||
SCHEMA_ADDONS_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||
},
|
||||
vol.Optional(ATTR_SYSTEM, default={}): {
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||
}
|
||||
})
|
||||
@@ -130,19 +191,21 @@ SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
def validate_options(raw_schema):
|
||||
"""Validate schema."""
|
||||
def validate(struct):
|
||||
"""Create schema validator for addons options."""
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in raw_schema:
|
||||
raise vol.Invalid("Unknown options {}.".format(key))
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
@@ -156,41 +219,50 @@ def validate_options(raw_schema):
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, 'root')
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(typ, value, key):
|
||||
"""Validate a single element."""
|
||||
try:
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||
raise vol.Invalid(f"Missing required option '{key}'")
|
||||
|
||||
if typ == V_STR:
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR):
|
||||
return str(value)
|
||||
elif typ == V_INT:
|
||||
return int(value)
|
||||
elif typ == V_FLOAT:
|
||||
return float(value)
|
||||
elif typ == V_BOOL:
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ == V_EMAIL:
|
||||
elif typ.startswith(V_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ == V_URL:
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ == V_PORT:
|
||||
elif typ.startswith(V_PORT):
|
||||
return NETWORK_PORT(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group('match'))(str(value))
|
||||
|
||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||
except ValueError:
|
||||
raise vol.Invalid(
|
||||
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||
|
||||
|
||||
def _nested_validate_list(typ, data_list, key):
|
||||
@@ -198,17 +270,10 @@ def _nested_validate_list(typ, data_list, key):
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# dict list
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = {}
|
||||
for c_key, c_value in element.items():
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid(
|
||||
"Unknown nested options {}".format(c_key))
|
||||
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
c_options = _nested_validate_dict(typ, element, key)
|
||||
options.append(c_options)
|
||||
# normal list
|
||||
else:
|
||||
options.append(_single_validate(typ, element, key))
|
||||
|
||||
@@ -220,9 +285,27 @@ def _nested_validate_dict(typ, data_dict, key):
|
||||
options = {}
|
||||
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid("Unknow nested dict options {}".format(c_key))
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(typ[c_key][0],
|
||||
c_value, c_key)
|
||||
else:
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and \
|
||||
origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||
|
@@ -1,161 +1,284 @@
|
||||
"""Init file for HassIO rest api."""
|
||||
"""Init file for Hass.io RESTful API."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .addons import APIAddons
|
||||
from .auth import APIAuth
|
||||
from .discovery import APIDiscovery
|
||||
from .hardware import APIHardware
|
||||
from .hassos import APIHassOS
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .supervisor import APISupervisor
|
||||
from .security import APISecurity
|
||||
from .info import APIInfo
|
||||
from .proxy import APIProxy
|
||||
from .security import SecurityMiddleware
|
||||
from .services import APIServices
|
||||
from .snapshots import APISnapshots
|
||||
from .supervisor import APISupervisor
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RestAPI(object):
|
||||
"""Handle rest api for hassio."""
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Hass.io."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.webapp = web.Application(loop=self.loop)
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
||||
self.webapp: web.Application = web.Application(
|
||||
middlewares=[self.security.token_validation])
|
||||
|
||||
# service stuff
|
||||
self._handler = None
|
||||
self.server = None
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||
self._site: Optional[web.TCPSite] = None
|
||||
|
||||
def register_host(self, host_control, hardware):
|
||||
"""Register hostcontrol function."""
|
||||
api_host = APIHost(self.config, self.loop, host_control, hardware)
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
self._register_hassos()
|
||||
self._register_hardware()
|
||||
self._register_homeassistant()
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_snapshots()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
self._register_info()
|
||||
self._register_auth()
|
||||
|
||||
self.webapp.router.add_get('/host/info', api_host.info)
|
||||
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_post('/host/update', api_host.update)
|
||||
self.webapp.router.add_post('/host/options', api_host.options)
|
||||
def _register_host(self) -> None:
|
||||
"""Register hostcontrol functions."""
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
def register_network(self, host_control):
|
||||
"""Register network function."""
|
||||
api_net = APINetwork(self.config, self.loop, host_control)
|
||||
self.webapp.add_routes([
|
||||
web.get('/host/info', api_host.info),
|
||||
web.post('/host/reboot', api_host.reboot),
|
||||
web.post('/host/shutdown', api_host.shutdown),
|
||||
web.post('/host/reload', api_host.reload),
|
||||
web.post('/host/options', api_host.options),
|
||||
web.get('/host/services', api_host.services),
|
||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
||||
web.post('/host/services/{service}/start', api_host.service_start),
|
||||
web.post('/host/services/{service}/restart',
|
||||
api_host.service_restart),
|
||||
web.post('/host/services/{service}/reload',
|
||||
api_host.service_reload),
|
||||
])
|
||||
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
def _register_hassos(self) -> None:
|
||||
"""Register HassOS functions."""
|
||||
api_hassos = APIHassOS()
|
||||
api_hassos.coresys = self.coresys
|
||||
|
||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||
updater):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, supervisor, snapshots, addons,
|
||||
host_control, updater)
|
||||
self.webapp.add_routes([
|
||||
web.get('/hassos/info', api_hassos.info),
|
||||
web.post('/hassos/update', api_hassos.update),
|
||||
web.post('/hassos/update/cli', api_hassos.update_cli),
|
||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
||||
])
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||
def _register_hardware(self) -> None:
|
||||
"""Register hardware functions."""
|
||||
api_hardware = APIHardware()
|
||||
api_hardware.coresys = self.coresys
|
||||
|
||||
def register_homeassistant(self, dock_homeassistant):
|
||||
"""Register homeassistant function."""
|
||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||
self.webapp.add_routes([
|
||||
web.get('/hardware/info', api_hardware.info),
|
||||
web.get('/hardware/audio', api_hardware.audio),
|
||||
])
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||
def _register_info(self) -> None:
|
||||
"""Register info functions."""
|
||||
api_info = APIInfo()
|
||||
api_info.coresys = self.coresys
|
||||
|
||||
def register_addons(self, addons):
|
||||
"""Register homeassistant function."""
|
||||
api_addons = APIAddons(self.config, self.loop, addons)
|
||||
self.webapp.add_routes([
|
||||
web.get('/info', api_info.info),
|
||||
])
|
||||
|
||||
self.webapp.router.add_get('/addons', api_addons.list)
|
||||
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
||||
def _register_auth(self) -> None:
|
||||
"""Register auth functions."""
|
||||
api_auth = APIAuth()
|
||||
api_auth.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/uninstall', api_addons.uninstall)
|
||||
self.webapp.router.add_post('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/restart', api_addons.restart)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||
self.webapp.add_routes([
|
||||
web.post('/auth', api_auth.auth),
|
||||
])
|
||||
|
||||
def register_security(self):
|
||||
"""Register security function."""
|
||||
api_security = APISecurity(self.config, self.loop)
|
||||
def _register_supervisor(self) -> None:
|
||||
"""Register Supervisor functions."""
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/security/info', api_security.info)
|
||||
self.webapp.router.add_post('/security/options', api_security.options)
|
||||
self.webapp.router.add_post('/security/totp', api_security.totp)
|
||||
self.webapp.router.add_post('/security/session', api_security.session)
|
||||
self.webapp.add_routes([
|
||||
web.get('/supervisor/ping', api_supervisor.ping),
|
||||
web.get('/supervisor/info', api_supervisor.info),
|
||||
web.get('/supervisor/stats', api_supervisor.stats),
|
||||
web.get('/supervisor/logs', api_supervisor.logs),
|
||||
web.post('/supervisor/update', api_supervisor.update),
|
||||
web.post('/supervisor/reload', api_supervisor.reload),
|
||||
web.post('/supervisor/options', api_supervisor.options),
|
||||
])
|
||||
|
||||
def register_snapshots(self, snapshots):
|
||||
"""Register snapshots function."""
|
||||
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
|
||||
def _register_homeassistant(self) -> None:
|
||||
"""Register Home Assistant functions."""
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
||||
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/info', api_hass.info),
|
||||
web.get('/homeassistant/logs', api_hass.logs),
|
||||
web.get('/homeassistant/stats', api_hass.stats),
|
||||
web.post('/homeassistant/options', api_hass.options),
|
||||
web.post('/homeassistant/update', api_hass.update),
|
||||
web.post('/homeassistant/restart', api_hass.restart),
|
||||
web.post('/homeassistant/stop', api_hass.stop),
|
||||
web.post('/homeassistant/start', api_hass.start),
|
||||
web.post('/homeassistant/check', api_hass.check),
|
||||
web.post('/homeassistant/rebuild', api_hass.rebuild),
|
||||
])
|
||||
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||
def _register_proxy(self) -> None:
|
||||
"""Register Home Assistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/remove', api_snapshots.remove)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial)
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/', api_proxy.api),
|
||||
])
|
||||
|
||||
def register_panel(self):
|
||||
"""Register panel for homeassistant."""
|
||||
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
|
||||
def _register_addons(self) -> None:
|
||||
"""Register Add-on functions."""
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
|
||||
def get_panel(request):
|
||||
"""Return file response with panel."""
|
||||
return web.FileResponse(panel)
|
||||
self.webapp.add_routes([
|
||||
web.get('/addons', api_addons.list),
|
||||
web.post('/addons/reload', api_addons.reload),
|
||||
web.get('/addons/{addon}/info', api_addons.info),
|
||||
web.post('/addons/{addon}/install', api_addons.install),
|
||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
||||
web.post('/addons/{addon}/start', api_addons.start),
|
||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
||||
web.post('/addons/{addon}/update', api_addons.update),
|
||||
web.post('/addons/{addon}/options', api_addons.options),
|
||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
||||
web.post('/addons/{addon}/security', api_addons.security),
|
||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
||||
])
|
||||
|
||||
self.webapp.router.add_get('/panel', get_panel)
|
||||
def _register_snapshots(self) -> None:
|
||||
"""Register snapshots functions."""
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
self._handler = self.webapp.make_handler(loop=self.loop)
|
||||
self.webapp.add_routes([
|
||||
web.get('/snapshots', api_snapshots.list),
|
||||
web.post('/snapshots/reload', api_snapshots.reload),
|
||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
||||
web.post('/snapshots/{snapshot}/restore/full',
|
||||
api_snapshots.restore_full),
|
||||
web.post('/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial),
|
||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
||||
])
|
||||
|
||||
def _register_services(self) -> None:
|
||||
"""Register services functions."""
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/services', api_services.list),
|
||||
web.get('/services/{service}', api_services.get_service),
|
||||
web.post('/services/{service}', api_services.set_service),
|
||||
web.delete('/services/{service}', api_services.del_service),
|
||||
])
|
||||
|
||||
def _register_discovery(self) -> None:
|
||||
"""Register discovery functions."""
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/discovery', api_discovery.list),
|
||||
web.get('/discovery/{uuid}', api_discovery.get_discovery),
|
||||
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
|
||||
web.post('/discovery', api_discovery.set_discovery),
|
||||
])
|
||||
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
|
||||
def create_response(panel_file):
|
||||
"""Create a function to generate a response."""
|
||||
path = panel_dir.joinpath(f"{panel_file!s}.html")
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.add_routes(
|
||||
[web.get('/panel', create_response('hassio-main-es5'))])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
||||
])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.62 - 0.70
|
||||
self.webapp.add_routes([
|
||||
web.get('/app-es5/index.html', create_response('index')),
|
||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
||||
])
|
||||
|
||||
# This route is for HA > 0.70
|
||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Run RESTful API webserver."""
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
|
||||
|
||||
try:
|
||||
self.server = await self.loop.create_server(
|
||||
self._handler, "0.0.0.0", "80")
|
||||
await self._site.start()
|
||||
except OSError as err:
|
||||
_LOGGER.fatal(
|
||||
"Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
|
||||
err)
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop rest api webserver."""
|
||||
if self.server:
|
||||
self.server.close()
|
||||
await self.server.wait_closed()
|
||||
await self.webapp.shutdown()
|
||||
async def stop(self) -> None:
|
||||
"""Stop RESTful API webserver."""
|
||||
if not self._site:
|
||||
return
|
||||
|
||||
if self._handler:
|
||||
await self._handler.finish_connections(60)
|
||||
await self.webapp.cleanup()
|
||||
# Shutdown running API
|
||||
await self._site.stop()
|
||||
await self._runner.cleanup()
|
||||
|
||||
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
||||
|
@@ -1,11 +1,12 @@
|
||||
"""Init file for HassIO homeassistant rest api."""
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
@@ -13,8 +14,18 @@ from ..const import (
|
||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||
BOOT_AUTO, BOOT_MANUAL, CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
||||
from ..validate import DOCKER_PORTS
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
|
||||
ATTR_FULL_ACCESS, ATTR_PROTECTED, ATTR_RATING, ATTR_HOST_PID,
|
||||
ATTR_HASSIO_ROLE, ATTR_MACHINE, ATTR_AVAILABLE, ATTR_AUTH_API,
|
||||
ATTR_KERNEL_MODULES,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT, REQUEST_FROM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS, ALSA_DEVICE
|
||||
from ..exceptions import APIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -27,62 +38,58 @@ SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SECURITY = vol.Schema({
|
||||
vol.Optional(ATTR_PROTECTED): vol.Boolean(),
|
||||
})
|
||||
|
||||
|
||||
class APIAddons(object):
|
||||
"""Handle rest api for addons functions."""
|
||||
|
||||
def __init__(self, config, loop, addons):
|
||||
"""Initialize homeassistant rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.addons = addons
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
addon = self.addons.get(request.match_info.get('addon'))
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon_slug = request.match_info.get('addon')
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == 'self':
|
||||
return request.get(REQUEST_FROM)
|
||||
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
if not addon:
|
||||
raise RuntimeError("Addon not exists")
|
||||
raise APIError("Addon does not exist")
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
raise RuntimeError("Addon is not installed")
|
||||
raise APIError("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@staticmethod
|
||||
def _pretty_devices(addon):
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return all addons / repositories ."""
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
for addon in self.sys_addons.list_addons:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.addons.list_repositories:
|
||||
for repository in self.sys_addons.list_repositories:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
@@ -98,47 +105,71 @@ class APIAddons(object):
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload all addons data."""
|
||||
await asyncio.shield(self.addons.reload(), loop=self.loop)
|
||||
"""Reload all add-on data."""
|
||||
await asyncio.shield(self.sys_addons.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return addon information."""
|
||||
"""Return add-on information."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
return {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LAST_VERSION: addon.last_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_MACHINE: addon.supported_machine,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_HOST_NETWORK: addon.network_mode == 'host',
|
||||
ATTR_HOST_NETWORK: addon.host_network,
|
||||
ATTR_HOST_PID: addon.host_pid,
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
ATTR_HOST_DBUS: addon.host_dbus,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: _pretty_devices(addon),
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HASSIO_ROLE: addon.hassio_role,
|
||||
ATTR_AUTH_API: addon.access_auth_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_DOCKER_API: addon.access_docker_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_SERVICES: _pretty_services(addon),
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Store user options for addon."""
|
||||
"""Store user options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): addon.schema,
|
||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||
})
|
||||
|
||||
body = await api_validate(addon_schema, request)
|
||||
@@ -156,27 +187,56 @@ class APIAddons(object):
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def install(self, request):
|
||||
"""Install addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
version = body.get(ATTR_VERSION, addon.last_version)
|
||||
async def security(self, request):
|
||||
"""Store security options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
body = await api_validate(SCHEMA_SECURITY, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
addon.install(version=version), loop=self.loop)
|
||||
if ATTR_PROTECTED in body:
|
||||
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
addon = self._extract_addon(request)
|
||||
stats = await addon.stats()
|
||||
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
def install(self, request):
|
||||
"""Install add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
"""Uninstall add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
"""Start addon."""
|
||||
"""Start add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
# check options
|
||||
@@ -184,47 +244,99 @@ class APIAddons(object):
|
||||
try:
|
||||
addon.schema(options)
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
raise APIError(humanize_error(options, ex)) from None
|
||||
|
||||
return asyncio.shield(addon.start(), loop=self.loop)
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop addon."""
|
||||
"""Stop add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
def update(self, request):
|
||||
"""Update add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
version = body.get(ATTR_VERSION, addon.last_version)
|
||||
|
||||
if version == addon.version_installed:
|
||||
raise RuntimeError("Version %s is already in use", version)
|
||||
if addon.last_version == addon.version_installed:
|
||||
raise APIError("No update available!")
|
||||
|
||||
return await asyncio.shield(
|
||||
addon.update(version=version), loop=self.loop)
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart addon."""
|
||||
"""Restart add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart(), loop=self.loop)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request):
|
||||
"""Rebuild local build add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.need_build:
|
||||
raise APIError("Only local build addons are supported")
|
||||
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return logs from addon."""
|
||||
"""Return logs from add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def icon(self, request):
|
||||
"""Return icon from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_icon:
|
||||
raise APIError("No icon found!")
|
||||
|
||||
with addon.path_icon.open('rb') as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request):
|
||||
"""Return logo from addon."""
|
||||
"""Return logo from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
raise RuntimeError("No image found!")
|
||||
raise APIError("No logo found!")
|
||||
|
||||
with addon.path_logo.open('rb') as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def changelog(self, request):
|
||||
"""Return changelog from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_changelog:
|
||||
raise APIError("No changelog found!")
|
||||
|
||||
with addon.path_changelog.open('r') as changelog:
|
||||
return changelog.read()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request):
|
||||
"""Write to stdin of add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError("STDIN not supported by add-on")
|
||||
|
||||
data = await request.read()
|
||||
return await asyncio.shield(addon.write_stdin(data))
|
||||
|
||||
|
||||
def _pretty_devices(addon):
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return None
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
|
||||
|
||||
def _pretty_services(addon):
|
||||
"""Return a simplified services role list."""
|
||||
services = []
|
||||
for name, access in addon.services_role.items():
|
||||
services.append(f"{name}:{access}")
|
||||
return services
|
||||
|
61
hassio/api/auth.py
Normal file
61
hassio/api/auth.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Init file for Hass.io auth/SSO RESTful API."""
|
||||
import logging
|
||||
|
||||
from aiohttp import BasicAuth
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION, WWW_AUTHENTICATE
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request, addon):
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
|
||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||
|
||||
def _process_dict(self, request, addon, data):
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
username = data.get('username') or data.get('user')
|
||||
password = data.get('password')
|
||||
|
||||
return self.sys_auth.check_login(addon, username, password)
|
||||
|
||||
@api_process
|
||||
async def auth(self, request):
|
||||
"""Process login request."""
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
if not addon.access_auth_api:
|
||||
raise APIForbidden("Can't use Home Assistant auth!")
|
||||
|
||||
# BasicAuth
|
||||
if AUTHORIZATION in request.headers:
|
||||
return await self._process_basic(request, addon)
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
data = await request.json()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
# URL encoded
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(headers={
|
||||
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
|
||||
})
|
100
hassio/api/discovery.py
Normal file
100
hassio/api/discovery.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""Init file for Hass.io network RESTful API."""
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDON,
|
||||
ATTR_UUID,
|
||||
ATTR_CONFIG,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_SERVICE,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
"""Handle RESTful API for discovery functions."""
|
||||
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||
if not message:
|
||||
raise APIError("Discovery message not found")
|
||||
return message
|
||||
|
||||
def _check_permission_ha(self, request):
|
||||
"""Check permission for API call / Home Assistant."""
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
raise APIForbidden("Only HomeAssistant can use this API!")
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
self._check_permission_ha(request)
|
||||
|
||||
discovery = []
|
||||
for message in self.sys_discovery.list_messages:
|
||||
discovery.append(
|
||||
{
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
)
|
||||
|
||||
return {ATTR_DISCOVERY: discovery}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
# Access?
|
||||
if body[ATTR_SERVICE] not in addon.discovery:
|
||||
raise APIForbidden(f"Can't use discovery!")
|
||||
|
||||
# Process discovery message
|
||||
message = self.sys_discovery.send(addon, **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@api_process
|
||||
async def get_discovery(self, request):
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
# HomeAssistant?
|
||||
self._check_permission_ha(request)
|
||||
|
||||
return {
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_discovery(self, request):
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
# Permission
|
||||
if message.addon != addon.slug:
|
||||
raise APIForbidden(f"Can't remove discovery message")
|
||||
|
||||
self.sys_discovery.remove(message)
|
||||
return True
|
34
hassio/api/hardware.py
Normal file
34
hassio/api/hardware.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Init file for Hass.io hardware RESTful API."""
|
||||
import logging
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request):
|
||||
"""Show ALSA audio devices."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||
}
|
||||
}
|
53
hassio/api/hassos.py
Normal file
53
hassio/api/hassos.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Init file for Hass.io HassOS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST, ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for HassOS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return HassOS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update HassOS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request):
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request):
|
||||
"""Trigger config reload on HassOS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
@@ -1,91 +1,165 @@
|
||||
"""Init file for HassIO homeassistant rest api."""
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Coroutine, Dict, Any
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
||||
CONTENT_TYPE_BINARY)
|
||||
from ..validate import HASS_DEVICES
|
||||
ATTR_ARCH,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_CUSTOM,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SSL,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import DOCKER_IMAGE, NETWORK_PORT
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHomeAssistant(object):
|
||||
"""Handle rest api for homeassistant functions."""
|
||||
|
||||
def __init__(self, config, loop, homeassistant):
|
||||
"""Initialize homeassistant rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.homeassistant = homeassistant
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle RESTful API for Home Assistant functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.homeassistant.last_version,
|
||||
ATTR_IMAGE: self.homeassistant.image,
|
||||
ATTR_DEVICES: self.homeassistant.devices,
|
||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||
ATTR_PORT: self.sys_homeassistant.api_port,
|
||||
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set homeassistant options."""
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set Home Assistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_DEVICES in body:
|
||||
self.homeassistant.devices = body[ATTR_DEVICES]
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.homeassistant.set_custom(
|
||||
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
|
||||
return True
|
||||
if ATTR_PORT in body:
|
||||
self.sys_homeassistant.api_port = body[ATTR_PORT]
|
||||
|
||||
if ATTR_PASSWORD in body:
|
||||
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
self.sys_homeassistant.refresh_token = None
|
||||
|
||||
if ATTR_SSL in body:
|
||||
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||
|
||||
if ATTR_WATCHDOG in body:
|
||||
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update homeassistant."""
|
||||
async def stats(self, request: web.Request) -> Dict[Any, str]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_homeassistant.stats()
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Home Assistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
|
||||
|
||||
if version == self.homeassistant.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.update(version), loop=self.loop)
|
||||
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart homeassistant."""
|
||||
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
|
||||
def stop(self, request: web.Request) -> Coroutine:
|
||||
"""Stop Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.stop())
|
||||
|
||||
@api_process
|
||||
def start(self, request: web.Request) -> Coroutine:
|
||||
"""Start Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.start())
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Coroutine:
|
||||
"""Restart Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Coroutine:
|
||||
"""Rebuild Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return homeassistant docker logs."""
|
||||
return self.homeassistant.logs()
|
||||
def logs(self, request: web.Request) -> Coroutine:
|
||||
"""Return Home Assistant Docker logs."""
|
||||
return self.sys_homeassistant.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request):
|
||||
"""Check config of homeassistant."""
|
||||
code, message = await self.homeassistant.check_config()
|
||||
if not code:
|
||||
raise RuntimeError(message)
|
||||
|
||||
return True
|
||||
async def check(self, request: web.Request) -> None:
|
||||
"""Check configuration of Home Assistant."""
|
||||
result = await self.sys_homeassistant.check_config()
|
||||
if not result.valid:
|
||||
raise APIError(result.log)
|
||||
|
@@ -1,90 +1,101 @@
|
||||
"""Init file for HassIO host rest api."""
|
||||
"""Init file for Hass.io host RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process_hostcontrol, api_process, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT)
|
||||
from ..validate import ALSA_CHANNEL
|
||||
ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL, ATTR_OPERATING_SYSTEM,
|
||||
ATTR_CHASSIS, ATTR_DEPLOYMENT, ATTR_STATE, ATTR_NAME, ATTR_DESCRIPTON,
|
||||
ATTR_SERVICES, ATTR_CPE)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
SERVICE = 'service'
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHost(object):
|
||||
"""Handle rest api for host functions."""
|
||||
|
||||
def __init__(self, config, loop, host_control, hardware):
|
||||
"""Initialize host rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_control = host_control
|
||||
self.local_hw = hardware
|
||||
class APIHost(CoreSysAttributes):
|
||||
"""Handle RESTful API for host functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_TYPE: self.host_control.type,
|
||||
ATTR_VERSION: self.host_control.version,
|
||||
ATTR_LAST_VERSION: self.host_control.last_version,
|
||||
ATTR_FEATURES: self.host_control.features,
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
ATTR_OS: self.host_control.os_info,
|
||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_FEATURES: self.sys_host.supperted_features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_KERNEL: self.sys_host.info.kernel,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Process host options."""
|
||||
"""Edit host settings."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self.config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
return True
|
||||
|
||||
@api_process_hostcontrol
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return self.host_control.reboot()
|
||||
|
||||
@api_process_hostcontrol
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return self.host_control.shutdown()
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
||||
|
||||
if version == self.host_control.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.host_control.update(version=version), loop=self.loop)
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
await asyncio.shield(
|
||||
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
|
||||
|
||||
@api_process
|
||||
async def hardware(self, request):
|
||||
"""Return local hardware infos."""
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return asyncio.shield(self.sys_host.control.reboot())
|
||||
|
||||
@api_process
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return asyncio.shield(self.sys_host.control.shutdown())
|
||||
|
||||
@api_process
|
||||
def reload(self, request):
|
||||
"""Reload host data."""
|
||||
return asyncio.shield(self.sys_host.reload())
|
||||
|
||||
@api_process
|
||||
async def services(self, request):
|
||||
"""Return list of available services."""
|
||||
services = []
|
||||
for unit in self.sys_host.services:
|
||||
services.append({
|
||||
ATTR_NAME: unit.name,
|
||||
ATTR_DESCRIPTON: unit.description,
|
||||
ATTR_STATE: unit.state,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_SERIAL: self.local_hw.serial_devices,
|
||||
ATTR_INPUT: self.local_hw.input_devices,
|
||||
ATTR_DISK: self.local_hw.disk_devices,
|
||||
ATTR_AUDIO: self.local_hw.audio_devices,
|
||||
ATTR_SERVICES: services
|
||||
}
|
||||
|
||||
@api_process
|
||||
def service_start(self, request):
|
||||
"""Start a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.start(unit))
|
||||
|
||||
@api_process
|
||||
def service_stop(self, request):
|
||||
"""Stop a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.stop(unit))
|
||||
|
||||
@api_process
|
||||
def service_reload(self, request):
|
||||
"""Reload a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.reload(unit))
|
||||
|
||||
@api_process
|
||||
def service_restart(self, request):
|
||||
"""Restart a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.restart(unit))
|
||||
|
28
hassio/api/info.py
Normal file
28
hassio/api/info.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Init file for Hass.io info RESTful API."""
|
||||
import logging
|
||||
|
||||
from ..const import (ATTR_ARCH, ATTR_CHANNEL, ATTR_HASSOS, ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME, ATTR_MACHINE, ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED_ARCH)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIInfo(CoreSysAttributes):
|
||||
"""Handle RESTful API for info functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show system info."""
|
||||
return {
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
||||
ATTR_HASSOS: self.sys_hassos.version,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_MACHINE: self.sys_machine,
|
||||
ATTR_ARCH: self.sys_arch.default,
|
||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
}
|
@@ -1,43 +0,0 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_hostcontrol, api_validate
|
||||
from ..const import ATTR_HOSTNAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APINetwork(object):
|
||||
"""Handle rest api for network functions."""
|
||||
|
||||
def __init__(self, config, loop, host_control):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_control = host_control
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show network settings."""
|
||||
return {
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
}
|
||||
|
||||
@api_process_hostcontrol
|
||||
async def options(self, request):
|
||||
"""Edit network settings."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
if self.host_control.hostname != body[ATTR_HOSTNAME]:
|
||||
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||
|
||||
return True
|
1
hassio/api/panel/chunk.088b1034e27d00ee9329.js
Normal file
1
hassio/api/panel/chunk.088b1034e27d00ee9329.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.088b1034e27d00ee9329.js.gz
Normal file
BIN
hassio/api/panel/chunk.088b1034e27d00ee9329.js.gz
Normal file
Binary file not shown.
3
hassio/api/panel/chunk.2c1fb1dea4fa88f96920.js
Normal file
3
hassio/api/panel/chunk.2c1fb1dea4fa88f96920.js
Normal file
File diff suppressed because one or more lines are too long
142
hassio/api/panel/chunk.2c1fb1dea4fa88f96920.js.LICENSE
Normal file
142
hassio/api/panel/chunk.2c1fb1dea4fa88f96920.js.LICENSE
Normal file
@@ -0,0 +1,142 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright 2018 Google Inc. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2016 Google Inc.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2019 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.2c1fb1dea4fa88f96920.js.gz
Normal file
BIN
hassio/api/panel/chunk.2c1fb1dea4fa88f96920.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.2c1fb1dea4fa88f96920.js.map
Normal file
1
hassio/api/panel/chunk.2c1fb1dea4fa88f96920.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.2c1fb1dea4fa88f96920.js","sourceRoot":""}
|
1
hassio/api/panel/chunk.6ff2deda34a647d6051c.js
Normal file
1
hassio/api/panel/chunk.6ff2deda34a647d6051c.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.6ff2deda34a647d6051c.js.gz
Normal file
BIN
hassio/api/panel/chunk.6ff2deda34a647d6051c.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.75766aa821239c9936dc.js
Normal file
1
hassio/api/panel/chunk.75766aa821239c9936dc.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.75766aa821239c9936dc.js.gz
Normal file
BIN
hassio/api/panel/chunk.75766aa821239c9936dc.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.b74ddf4cacc7d5de8a55.js
Normal file
1
hassio/api/panel/chunk.b74ddf4cacc7d5de8a55.js
Normal file
@@ -0,0 +1 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{110:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(101),i=t.n(e),o=t(103),u=t.n(o),a=i.a,c=u.a}}]);
|
BIN
hassio/api/panel/chunk.b74ddf4cacc7d5de8a55.js.gz
Normal file
BIN
hassio/api/panel/chunk.b74ddf4cacc7d5de8a55.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.d33e783375f0db186ab5.js
Normal file
1
hassio/api/panel/chunk.d33e783375f0db186ab5.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.d33e783375f0db186ab5.js.gz
Normal file
BIN
hassio/api/panel/chunk.d33e783375f0db186ab5.js.gz
Normal file
Binary file not shown.
3
hassio/api/panel/chunk.e7d34dbf975fad4b7776.js
Normal file
3
hassio/api/panel/chunk.e7d34dbf975fad4b7776.js
Normal file
File diff suppressed because one or more lines are too long
20
hassio/api/panel/chunk.e7d34dbf975fad4b7776.js.LICENSE
Normal file
20
hassio/api/panel/chunk.e7d34dbf975fad4b7776.js.LICENSE
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
hassio/api/panel/chunk.e7d34dbf975fad4b7776.js.gz
Normal file
BIN
hassio/api/panel/chunk.e7d34dbf975fad4b7776.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.e7d34dbf975fad4b7776.js.map
Normal file
1
hassio/api/panel/chunk.e7d34dbf975fad4b7776.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.e7d34dbf975fad4b7776.js","sourceRoot":""}
|
1
hassio/api/panel/entrypoint.js
Normal file
1
hassio/api/panel/entrypoint.js
Normal file
@@ -0,0 +1 @@
|
||||
!function(e){function t(t){for(var n,o,i=t[0],u=t[1],a=0,f=[];a<i.length;a++)o=i[a],r[o]&&f.push(r[o][0]),r[o]=0;for(n in u)Object.prototype.hasOwnProperty.call(u,n)&&(e[n]=u[n]);for(c&&c(t);f.length;)f.shift()()}var n={},r={1:0};function o(t){if(n[t])return n[t].exports;var r=n[t]={i:t,l:!1,exports:{}};return e[t].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var t=[],n=r[e];if(0!==n)if(n)t.push(n[2]);else{var i=new Promise(function(t,o){n=r[e]=[t,o]});t.push(n[2]=i);var u,a=document.createElement("script");a.charset="utf-8",a.timeout=120,o.nc&&a.setAttribute("nonce",o.nc),a.src=function(e){return o.p+"chunk."+{0:"e7d34dbf975fad4b7776",2:"75766aa821239c9936dc",3:"6ff2deda34a647d6051c",4:"b74ddf4cacc7d5de8a55",5:"d33e783375f0db186ab5",6:"2c1fb1dea4fa88f96920",7:"088b1034e27d00ee9329"}[e]+".js"}(e),u=function(t){a.onerror=a.onload=null,clearTimeout(c);var n=r[e];if(0!==n){if(n){var o=t&&("load"===t.type?"missing":t.type),i=t&&t.target&&t.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,n[1](u)}r[e]=void 0}};var c=setTimeout(function(){u({type:"timeout",target:a})},12e4);a.onerror=a.onload=u,document.head.appendChild(a)}return Promise.all(t)},o.m=e,o.c=n,o.d=function(e,t,n){o.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,t){if(1&t&&(e=o(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(o.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var r in e)o.d(n,r,function(t){return e[t]}.bind(null,r));return n},o.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(t,"a",t),t},o.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=t,i=i.slice();for(var a=0;a<i.length;a++)t(i[a]);var c=u;o(o.s=0)}([function(e,t,n){window.loadES5Adapter().then(function(){Promise.all([n.e(0),n.e(2)]).then(n.bind(null,2)),Promise.all([n.e(0),n.e(6),n.e(3)]).then(n.bind(null,1))}),document.body.style.height="100%"}]);
|
BIN
hassio/api/panel/entrypoint.js.gz
Normal file
BIN
hassio/api/panel/entrypoint.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/entrypoint.js.map
Normal file
1
hassio/api/panel/entrypoint.js.map
Normal file
File diff suppressed because one or more lines are too long
3
hassio/api/panel/hassio-app.html
Normal file
3
hassio/api/panel/hassio-app.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
BIN
hassio/api/panel/hassio-app.html.gz
Normal file
Binary file not shown.
72
hassio/api/panel/hassio-main-es5.html
Normal file
72
hassio/api/panel/hassio-main-es5.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-main-es5.html.gz
Normal file
BIN
hassio/api/panel/hassio-main-es5.html.gz
Normal file
Binary file not shown.
72
hassio/api/panel/hassio-main-latest.html
Normal file
72
hassio/api/panel/hassio-main-latest.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/hassio-main-latest.html.gz
Normal file
BIN
hassio/api/panel/hassio-main-latest.html.gz
Normal file
Binary file not shown.
38
hassio/api/panel/index.html
Normal file
38
hassio/api/panel/index.html
Normal file
@@ -0,0 +1,38 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Hass.io</title>
|
||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||
<style>
|
||||
body {
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
||||
</head>
|
||||
<body>
|
||||
<hassio-app></hassio-app>
|
||||
<script>
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.write(e.outerHTML);
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
</script>
|
||||
<!--
|
||||
Disabled while we make Home Assistant able to serve the right files.
|
||||
<script src="./app.js"></script>
|
||||
-->
|
||||
<link rel='import' href='./hassio-app.html'>
|
||||
</body>
|
||||
</html>
|
BIN
hassio/api/panel/index.html.gz
Normal file
BIN
hassio/api/panel/index.html.gz
Normal file
Binary file not shown.
253
hassio/api/proxy.py
Normal file
253
hassio/api/proxy.py
Normal file
@@ -0,0 +1,253 @@
|
||||
"""Utils for Home Assistant Proxy."""
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION
|
||||
import async_timeout
|
||||
|
||||
from ..const import HEADER_HA_ACCESS
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
HomeAssistantAuthError, HomeAssistantAPIError, APIError)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIProxy(CoreSysAttributes):
|
||||
"""API Proxy for Home Assistant."""
|
||||
|
||||
def _check_access(self, request):
|
||||
"""Check the Hass.io token."""
|
||||
if AUTHORIZATION in request.headers:
|
||||
bearer = request.headers[AUTHORIZATION]
|
||||
hassio_token = bearer.split(' ')[-1]
|
||||
else:
|
||||
hassio_token = request.headers.get(HEADER_HA_ACCESS)
|
||||
|
||||
addon = self.sys_addons.from_token(hassio_token)
|
||||
if not addon:
|
||||
_LOGGER.warning("Unknown Home Assistant API access!")
|
||||
elif not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Not permitted API access: %s", addon.slug)
|
||||
else:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
return
|
||||
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
@asynccontextmanager
|
||||
async def _api_client(self, request, path, timeout=300):
|
||||
"""Return a client request with proxy origin for Home Assistant."""
|
||||
try:
|
||||
# read data
|
||||
with async_timeout.timeout(30):
|
||||
data = await request.read()
|
||||
|
||||
if data:
|
||||
content_type = request.content_type
|
||||
else:
|
||||
content_type = None
|
||||
|
||||
async with self.sys_homeassistant.make_request(
|
||||
request.method.lower(), f'api/{path}',
|
||||
content_type=content_type,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
) as resp:
|
||||
yield resp
|
||||
return
|
||||
|
||||
except HomeAssistantAuthError:
|
||||
_LOGGER.error("Authenticate error on API for request %s", path)
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Error on API for request %s", path)
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Client error on API %s request %s", path, err)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Client timeout error on API request %s", path)
|
||||
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def stream(self, request):
|
||||
"""Proxy HomeAssistant EventStream Requests."""
|
||||
self._check_access(request)
|
||||
|
||||
_LOGGER.info("Home Assistant EventStream start")
|
||||
async with self._api_client(request, 'stream', timeout=None) as client:
|
||||
response = web.StreamResponse()
|
||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||
try:
|
||||
await response.prepare(request)
|
||||
async for data in client.content:
|
||||
await response.write(data)
|
||||
|
||||
except (aiohttp.ClientError, aiohttp.ClientPayloadError):
|
||||
pass
|
||||
|
||||
_LOGGER.info("Home Assistant EventStream close")
|
||||
return response
|
||||
|
||||
async def api(self, request):
|
||||
"""Proxy Home Assistant API Requests."""
|
||||
self._check_access(request)
|
||||
|
||||
# Normal request
|
||||
path = request.match_info.get('path', '')
|
||||
async with self._api_client(request, path) as client:
|
||||
data = await client.read()
|
||||
return web.Response(
|
||||
body=data,
|
||||
status=client.status,
|
||||
content_type=client.content_type
|
||||
)
|
||||
|
||||
async def _websocket_client(self):
|
||||
"""Initialize a WebSocket API connection."""
|
||||
url = f"{self.sys_homeassistant.api_url}/api/websocket"
|
||||
|
||||
try:
|
||||
client = await self.sys_websession_ssl.ws_connect(
|
||||
url, heartbeat=30, verify_ssl=False)
|
||||
|
||||
# Handle authentication
|
||||
data = await client.receive_json()
|
||||
|
||||
if data.get('type') == 'auth_ok':
|
||||
return client
|
||||
|
||||
if data.get('type') != 'auth_required':
|
||||
# Invalid protocol
|
||||
_LOGGER.error(
|
||||
"Got unexpected response from HA WebSocket: %s", data)
|
||||
raise APIError()
|
||||
|
||||
if self.sys_homeassistant.refresh_token:
|
||||
await self.sys_homeassistant.ensure_access_token()
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'access_token': self.sys_homeassistant.access_token,
|
||||
})
|
||||
else:
|
||||
await client.send_json({
|
||||
'type': 'auth',
|
||||
'api_password': self.sys_homeassistant.api_password,
|
||||
})
|
||||
|
||||
data = await client.receive_json()
|
||||
|
||||
if data.get('type') == 'auth_ok':
|
||||
return client
|
||||
|
||||
# Renew the Token is invalid
|
||||
if data.get('type') == 'invalid_auth' and self.sys_homeassistant.refresh_token:
|
||||
self.sys_homeassistant.access_token = None
|
||||
return await self._websocket_client()
|
||||
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
except (RuntimeError, ValueError, ClientConnectorError) as err:
|
||||
_LOGGER.error("Client error on WebSocket API %s.", err)
|
||||
except HomeAssistantAuthError:
|
||||
_LOGGER.error("Failed authentication to Home Assistant WebSocket")
|
||||
|
||||
raise APIError()
|
||||
|
||||
async def websocket(self, request):
|
||||
"""Initialize a WebSocket API connection."""
|
||||
_LOGGER.info("Home Assistant WebSocket API request initialize")
|
||||
|
||||
# init server
|
||||
server = web.WebSocketResponse(heartbeat=30)
|
||||
await server.prepare(request)
|
||||
|
||||
# handle authentication
|
||||
try:
|
||||
await server.send_json({
|
||||
'type': 'auth_required',
|
||||
'ha_version': self.sys_homeassistant.version,
|
||||
})
|
||||
|
||||
# Check API access
|
||||
response = await server.receive_json()
|
||||
hassio_token = response.get('api_password') or response.get('access_token')
|
||||
addon = self.sys_addons.from_token(hassio_token)
|
||||
|
||||
if not addon or not addon.access_homeassistant_api:
|
||||
_LOGGER.warning("Unauthorized WebSocket access!")
|
||||
await server.send_json({
|
||||
'type': 'auth_invalid',
|
||||
'message': 'Invalid access',
|
||||
})
|
||||
return server
|
||||
|
||||
_LOGGER.info("WebSocket access from %s", addon.slug)
|
||||
|
||||
await server.send_json({
|
||||
'type': 'auth_ok',
|
||||
'ha_version': self.sys_homeassistant.version,
|
||||
})
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||
return server
|
||||
|
||||
# init connection to hass
|
||||
try:
|
||||
client = await self._websocket_client()
|
||||
except APIError:
|
||||
return server
|
||||
|
||||
_LOGGER.info("Home Assistant WebSocket API request running")
|
||||
try:
|
||||
client_read = None
|
||||
server_read = None
|
||||
while not server.closed and not client.closed:
|
||||
if not client_read:
|
||||
client_read = self.sys_create_task(
|
||||
client.receive_str())
|
||||
if not server_read:
|
||||
server_read = self.sys_create_task(
|
||||
server.receive_str())
|
||||
|
||||
# wait until data need to be processed
|
||||
await asyncio.wait(
|
||||
[client_read, server_read],
|
||||
return_when=asyncio.FIRST_COMPLETED
|
||||
)
|
||||
|
||||
# server
|
||||
if server_read.done() and not client.closed:
|
||||
server_read.exception()
|
||||
await client.send_str(server_read.result())
|
||||
server_read = None
|
||||
|
||||
# client
|
||||
if client_read.done() and not server.closed:
|
||||
client_read.exception()
|
||||
await server.send_str(client_read.result())
|
||||
client_read = None
|
||||
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
except (RuntimeError, ConnectionError, TypeError) as err:
|
||||
_LOGGER.info("Home Assistant WebSocket API error: %s", err)
|
||||
|
||||
finally:
|
||||
if client_read:
|
||||
client_read.cancel()
|
||||
if server_read:
|
||||
server_read.cancel()
|
||||
|
||||
# close connections
|
||||
if not client.closed:
|
||||
await client.close()
|
||||
if not server.closed:
|
||||
await server.close()
|
||||
|
||||
_LOGGER.info("Home Assistant WebSocket API connection is closed")
|
||||
return server
|
@@ -1,102 +1,140 @@
|
||||
"""Init file for HassIO security rest api."""
|
||||
from datetime import datetime, timedelta
|
||||
import io
|
||||
"""Handle security part of this API."""
|
||||
import logging
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
import pyotp
|
||||
import pyqrcode
|
||||
from aiohttp.web import middleware
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized, HTTPForbidden
|
||||
|
||||
from .util import api_process, api_validate, hash_password
|
||||
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
|
||||
from ..const import (
|
||||
HEADER_TOKEN, REQUEST_FROM, ROLE_ADMIN, ROLE_DEFAULT, ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER, ROLE_BACKUP)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_PASSWORD = vol.Schema({
|
||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
|
||||
vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str),
|
||||
})
|
||||
# Block Anytime
|
||||
BLACKLIST = re.compile(
|
||||
r"^(?:"
|
||||
r"|/homeassistant/api/hassio/.*"
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Free to call or have own security concepts
|
||||
NO_SECURITY_CHECK = re.compile(
|
||||
r"^(?:"
|
||||
r"|/homeassistant/api/.*"
|
||||
r"|/homeassistant/websocket"
|
||||
r"|/supervisor/ping"
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Can called by every add-on
|
||||
ADDONS_API_BYPASS = re.compile(
|
||||
r"^(?:"
|
||||
r"|/addons/self/(?!security|update)[^/]+"
|
||||
r"|/info"
|
||||
r"|/services.*"
|
||||
r"|/discovery.*"
|
||||
r"|/auth"
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Policy role add-on API access
|
||||
ADDONS_ROLE_ACCESS = {
|
||||
ROLE_DEFAULT: re.compile(
|
||||
r"^(?:"
|
||||
r"|/[^/]+/info"
|
||||
r"|/addons"
|
||||
r")$"
|
||||
),
|
||||
ROLE_HOMEASSISTANT: re.compile(
|
||||
r"^(?:"
|
||||
r"|/homeassistant/.+"
|
||||
r")$"
|
||||
),
|
||||
ROLE_BACKUP: re.compile(
|
||||
r"^(?:"
|
||||
r"|/snapshots.*"
|
||||
r")$"
|
||||
),
|
||||
ROLE_MANAGER: re.compile(
|
||||
r"^(?:"
|
||||
r"|/homeassistant/.+"
|
||||
r"|/host/.+"
|
||||
r"|/hardware/.+"
|
||||
r"|/hassos/.+"
|
||||
r"|/supervisor/.+"
|
||||
r"|/addons(?:/[^/]+/(?!security).+)?"
|
||||
r"|/snapshots.*"
|
||||
r")$"
|
||||
),
|
||||
ROLE_ADMIN: re.compile(
|
||||
r".*"
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class APISecurity(object):
|
||||
"""Handle rest api for security functions."""
|
||||
class SecurityMiddleware(CoreSysAttributes):
|
||||
"""Security middleware functions."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize security rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
def __init__(self, coresys):
|
||||
"""Initialize security middleware."""
|
||||
self.coresys = coresys
|
||||
|
||||
def _check_password(self, body):
|
||||
"""Check if password is valid and security is initialize."""
|
||||
if not self.config.security_initialize:
|
||||
raise RuntimeError("First set a password")
|
||||
@middleware
|
||||
async def token_validation(self, request, handler):
|
||||
"""Check security access of this layer."""
|
||||
request_from = None
|
||||
hassio_token = request.headers.get(HEADER_TOKEN)
|
||||
|
||||
password = hash_password(body[ATTR_PASSWORD])
|
||||
if password != self.config.security_password:
|
||||
raise RuntimeError("Wrong password")
|
||||
# Blacklist
|
||||
if BLACKLIST.match(request.path):
|
||||
_LOGGER.warning("%s is blacklisted!", request.path)
|
||||
raise HTTPForbidden()
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_INITIALIZE: self.config.security_initialize,
|
||||
ATTR_TOTP: self.config.security_totp is not None,
|
||||
}
|
||||
# Ignore security check
|
||||
if NO_SECURITY_CHECK.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
return await handler(request)
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set options / password."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
# Not token
|
||||
if not hassio_token:
|
||||
_LOGGER.warning("No API token provided for %s", request.path)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
if self.config.security_initialize:
|
||||
raise RuntimeError("Password is already set!")
|
||||
# Home-Assistant
|
||||
# UUID check need removed with 131
|
||||
if hassio_token in (self.sys_homeassistant.uuid,
|
||||
self.sys_homeassistant.hassio_token):
|
||||
_LOGGER.debug("%s access from Home Assistant", request.path)
|
||||
request_from = self.sys_homeassistant
|
||||
|
||||
self.config.security_password = hash_password(body[ATTR_PASSWORD])
|
||||
self.config.security_initialize = True
|
||||
return True
|
||||
# Host
|
||||
if hassio_token == self.sys_machine_id:
|
||||
_LOGGER.debug("%s access from Host", request.path)
|
||||
request_from = self.sys_host
|
||||
|
||||
@api_process
|
||||
async def totp(self, request):
|
||||
"""Set and initialze TOTP."""
|
||||
body = await api_validate(SCHEMA_PASSWORD, request)
|
||||
self._check_password(body)
|
||||
# Add-on
|
||||
addon = None
|
||||
if hassio_token and not request_from:
|
||||
addon = self.sys_addons.from_token(hassio_token)
|
||||
|
||||
# generate TOTP
|
||||
totp_init_key = pyotp.random_base32()
|
||||
totp = pyotp.TOTP(totp_init_key)
|
||||
# Check Add-on API access
|
||||
if addon and ADDONS_API_BYPASS.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s from %s", request.path, addon.slug)
|
||||
request_from = addon
|
||||
elif addon and addon.access_hassio_api:
|
||||
# Check Role
|
||||
if ADDONS_ROLE_ACCESS[addon.hassio_role].match(request.path):
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
request_from = addon
|
||||
else:
|
||||
_LOGGER.warning("%s no role for %s", request.path, addon.slug)
|
||||
|
||||
# init qrcode
|
||||
buff = io.BytesIO()
|
||||
if request_from:
|
||||
request[REQUEST_FROM] = request_from
|
||||
return await handler(request)
|
||||
|
||||
qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO"))
|
||||
qrcode.svg(buff)
|
||||
|
||||
# finish
|
||||
self.config.security_totp = totp_init_key
|
||||
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
|
||||
|
||||
@api_process
|
||||
async def session(self, request):
|
||||
"""Set and initialze session."""
|
||||
body = await api_validate(SCHEMA_SESSION, request)
|
||||
self._check_password(body)
|
||||
|
||||
# check TOTP
|
||||
if self.config.security_totp:
|
||||
totp = pyotp.TOTP(self.config.security_totp)
|
||||
if body[ATTR_TOTP] != totp.now():
|
||||
raise RuntimeError("Invalid TOTP token!")
|
||||
|
||||
# create session
|
||||
valid_until = datetime.now() + timedelta(days=1)
|
||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
||||
|
||||
# store session
|
||||
self.config.add_security_session(session, valid_until)
|
||||
return {ATTR_SESSION: session}
|
||||
_LOGGER.error("Invalid token for access %s", request.path)
|
||||
raise HTTPForbidden()
|
||||
|
75
hassio/api/services.py
Normal file
75
hassio/api/services.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Init file for Hass.io network RESTful API."""
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_AVAILABLE, ATTR_PROVIDERS, ATTR_SLUG, ATTR_SERVICES, REQUEST_FROM,
|
||||
PROVIDE_SERVICE)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
|
||||
|
||||
class APIServices(CoreSysAttributes):
|
||||
"""Handle RESTful API for services functions."""
|
||||
|
||||
def _extract_service(self, request):
|
||||
"""Return service, throw an exception if it doesn't exist."""
|
||||
service = self.sys_services.get(request.match_info.get('service'))
|
||||
if not service:
|
||||
raise APIError("Service does not exist")
|
||||
|
||||
return service
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
services = []
|
||||
for service in self.sys_services.list_services:
|
||||
services.append({
|
||||
ATTR_SLUG: service.slug,
|
||||
ATTR_AVAILABLE: service.enabled,
|
||||
ATTR_PROVIDERS: service.providers,
|
||||
})
|
||||
|
||||
return {ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_service(self, request):
|
||||
"""Write data into a service."""
|
||||
service = self._extract_service(request)
|
||||
body = await api_validate(service.schema, request)
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
_check_access(request, service.slug)
|
||||
service.set_service_data(addon, body)
|
||||
|
||||
@api_process
|
||||
async def get_service(self, request):
|
||||
"""Read data into a service."""
|
||||
service = self._extract_service(request)
|
||||
|
||||
# Access
|
||||
_check_access(request, service.slug)
|
||||
|
||||
if not service.enabled:
|
||||
raise APIError("Service not enabled")
|
||||
return service.get_service_data()
|
||||
|
||||
@api_process
|
||||
async def del_service(self, request):
|
||||
"""Delete data into a service."""
|
||||
service = self._extract_service(request)
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
# Access
|
||||
_check_access(request, service.slug, True)
|
||||
service.del_service_data(addon)
|
||||
|
||||
|
||||
def _check_access(request, service, provide=False):
|
||||
"""Raise error if the rights are wrong."""
|
||||
addon = request[REQUEST_FROM]
|
||||
if not addon.services_role.get(service):
|
||||
raise APIForbidden(f"No access to {service} service!")
|
||||
|
||||
if provide and addon.services_role.get(service) != PROVIDE_SERVICE:
|
||||
raise APIForbidden(f"No access to write {service} service!")
|
@@ -1,61 +1,72 @@
|
||||
"""Init file for HassIO snapshot rest api."""
|
||||
"""Init file for Hass.io snapshot RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||
ATTR_DEVICES, ATTR_SNAPSHOTS)
|
||||
ATTR_SNAPSHOTS, ATTR_PASSWORD, ATTR_PROTECTED, CONTENT_TYPE_TAR)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS):
|
||||
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
})
|
||||
|
||||
SCHEMA_RESTORE_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS):
|
||||
vol.All([vol.Coerce(str)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
})
|
||||
|
||||
|
||||
class APISnapshots(object):
|
||||
"""Handle rest api for snapshot functions."""
|
||||
|
||||
def __init__(self, config, loop, snapshots):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.snapshots = snapshots
|
||||
class APISnapshots(CoreSysAttributes):
|
||||
"""Handle RESTful API for snapshot functions."""
|
||||
|
||||
def _extract_snapshot(self, request):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
|
||||
"""Return snapshot, throw an exception if it doesn't exist."""
|
||||
snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
|
||||
if not snapshot:
|
||||
raise RuntimeError("Snapshot not exists")
|
||||
raise APIError("Snapshot does not exist")
|
||||
return snapshot
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return snapshot list."""
|
||||
data_snapshots = []
|
||||
for snapshot in self.snapshots.list_snapshots:
|
||||
for snapshot in self.sys_snapshots.list_snapshots:
|
||||
data_snapshots.append({
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_TYPE: snapshot.sys_type,
|
||||
ATTR_PROTECTED: snapshot.protected,
|
||||
})
|
||||
|
||||
return {
|
||||
@@ -65,7 +76,7 @@ class APISnapshots(object):
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload snapshot list."""
|
||||
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
|
||||
await asyncio.shield(self.sys_snapshots.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
@@ -79,6 +90,7 @@ class APISnapshots(object):
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
ATTR_SIZE: addon_data[ATTR_SIZE],
|
||||
})
|
||||
|
||||
return {
|
||||
@@ -87,10 +99,8 @@ class APISnapshots(object):
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_SIZE: snapshot.size,
|
||||
ATTR_HOMEASSISTANT: {
|
||||
ATTR_VERSION: snapshot.homeassistant_version,
|
||||
ATTR_DEVICES: snapshot.homeassistant_devices,
|
||||
},
|
||||
ATTR_PROTECTED: snapshot.protected,
|
||||
ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: snapshot.repositories,
|
||||
ATTR_FOLDERS: snapshot.folders,
|
||||
@@ -100,36 +110,78 @@ class APISnapshots(object):
|
||||
async def snapshot_full(self, request):
|
||||
"""Full-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_full(**body), loop=self.loop)
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.do_snapshot_full(**body))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
async def snapshot_partial(self, request):
|
||||
"""Partial-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.do_snapshot_partial(**body))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
def restore_full(self, request):
|
||||
async def restore_full(self, request):
|
||||
"""Full-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return asyncio.shield(
|
||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.sys_snapshots.do_restore_full(snapshot, **body))
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self.loop
|
||||
)
|
||||
self.sys_snapshots.do_restore_partial(snapshot, **body))
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return self.snapshots.remove(snapshot)
|
||||
return self.sys_snapshots.remove(snapshot)
|
||||
|
||||
async def download(self, request):
|
||||
"""Download a snapshot file."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
|
||||
_LOGGER.info("Download snapshot %s", snapshot.slug)
|
||||
response = web.FileResponse(snapshot.tarfile)
|
||||
response.content_type = CONTENT_TYPE_TAR
|
||||
return response
|
||||
|
||||
@api_process
|
||||
async def upload(self, request):
|
||||
"""Upload a snapshot file."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
||||
tar_file = Path(temp_dir, f"snapshot.tar")
|
||||
|
||||
try:
|
||||
with tar_file.open('wb') as snapshot:
|
||||
async for data in request.content.iter_any():
|
||||
snapshot.write(data)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write new snapshot file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
snapshot = await asyncio.shield(
|
||||
self.sys_snapshots.import_snapshot(tar_file))
|
||||
|
||||
if snapshot:
|
||||
return {ATTR_SLUG: snapshot.slug}
|
||||
return False
|
||||
|
@@ -1,24 +1,29 @@
|
||||
"""Init file for HassIO supervisor rest api."""
|
||||
"""Init file for Hass.io Supervisor RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, CONTENT_TYPE_BINARY)
|
||||
from ..validate import validate_timezone
|
||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import WAIT_BOOT, REPOSITORIES, CHANNELS
|
||||
from ..exceptions import APIError
|
||||
from ..utils.validate import validate_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -26,30 +31,19 @@ SCHEMA_VERSION = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
class APISupervisor(object):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||
host_control, updater):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.supervisor = supervisor
|
||||
self.addons = addons
|
||||
self.snapshots = snapshots
|
||||
self.host_control = host_control
|
||||
self.updater = updater
|
||||
class APISupervisor(CoreSysAttributes):
|
||||
"""Handle RESTful API for Supervisor functions."""
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
"""Return ok for signal that the api is ready."""
|
||||
"""Return ok for signal that the API is ready."""
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
for addon in self.sys_addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -59,67 +53,86 @@ class APISupervisor(object):
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.updater.version_hassio,
|
||||
ATTR_BETA_CHANNEL: self.updater.beta_channel,
|
||||
ATTR_ARCH: self.config.arch,
|
||||
ATTR_TIMEZONE: self.config.timezone,
|
||||
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_ARCH: self.sys_supervisor.arch,
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
||||
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set supervisor options."""
|
||||
"""Set Supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||
if ATTR_CHANNEL in body:
|
||||
self.sys_updater.channel = body[ATTR_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.config.timezone = body[ATTR_TIMEZONE]
|
||||
self.sys_config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.addons.load_repositories(new))
|
||||
await asyncio.shield(self.sys_addons.load_repositories(new))
|
||||
|
||||
self.sys_updater.save_data()
|
||||
self.sys_config.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_supervisor.stats()
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update supervisor OS."""
|
||||
"""Update Supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.updater.version_hassio)
|
||||
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
|
||||
|
||||
if version == self.supervisor.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
if version == self.sys_supervisor.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.supervisor.update(version), loop=self.loop)
|
||||
return await asyncio.shield(self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload addons, config ect."""
|
||||
"""Reload add-ons, configuration, etc."""
|
||||
tasks = [
|
||||
self.addons.reload(),
|
||||
self.snapshots.reload(),
|
||||
self.updater.fetch_data(),
|
||||
self.host_control.load()
|
||||
self.sys_updater.reload(),
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
||||
results, _ = await asyncio.shield(asyncio.wait(tasks))
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
raise RuntimeError("Some reload task fails!")
|
||||
raise APIError("Some reload task fails!")
|
||||
|
||||
return True
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return supervisor docker logs."""
|
||||
return self.supervisor.logs()
|
||||
"""Return supervisor Docker logs."""
|
||||
return self.sys_supervisor.logs()
|
||||
|
@@ -1,71 +1,51 @@
|
||||
"""Init file for HassIO util for rest api."""
|
||||
"""Init file for Hass.io util for RESTful API."""
|
||||
import json
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPServiceUnavailable
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY)
|
||||
from ..exceptions import HassioError, APIError, APIForbidden
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def json_loads(data):
|
||||
"""Extract json from string with support for '' and None."""
|
||||
if not data:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
raise APIError("Invalid json")
|
||||
|
||||
|
||||
def api_process(method):
|
||||
"""Wrap function with true/false calls to rest api."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
"""Return API information."""
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
except (APIError, APIForbidden) as err:
|
||||
return api_return_error(message=str(err))
|
||||
except HassioError:
|
||||
return api_return_error(message="Unknown Error, see logs")
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
if isinstance(answer, web.Response):
|
||||
return answer
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
elif isinstance(answer, bool) and not answer:
|
||||
return api_return_error()
|
||||
return api_return_ok()
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
def api_process_hostcontrol(method):
|
||||
"""Wrap HostControl calls to rest api."""
|
||||
async def wrap_hostcontrol(api, *args, **kwargs):
|
||||
"""Return host information."""
|
||||
if not api.host_control.active:
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
return api_return_error(message=str(err))
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
elif answer is None:
|
||||
return api_return_error("Function is not supported")
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
|
||||
return wrap_hostcontrol
|
||||
|
||||
|
||||
def api_process_raw(content):
|
||||
"""Wrap content_type into function."""
|
||||
def wrap_method(method):
|
||||
@@ -75,9 +55,12 @@ def api_process_raw(content):
|
||||
try:
|
||||
msg_data = await method(api, *args, **kwargs)
|
||||
msg_type = content
|
||||
except RuntimeError as err:
|
||||
except (APIError, APIForbidden) as err:
|
||||
msg_data = str(err).encode()
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
except HassioError:
|
||||
msg_data = b''
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
|
||||
return web.Response(body=msg_data, content_type=msg_type)
|
||||
|
||||
@@ -86,7 +69,7 @@ def api_process_raw(content):
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return a API error message."""
|
||||
"""Return an API error message."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
@@ -94,7 +77,7 @@ def api_return_error(message=None):
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
"""Return a API ok answer."""
|
||||
"""Return an API ok answer."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_OK,
|
||||
JSON_DATA: data or {},
|
||||
@@ -107,12 +90,6 @@ async def api_validate(schema, request):
|
||||
try:
|
||||
data = schema(data)
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(data, ex)) from None
|
||||
raise APIError(humanize_error(data, ex)) from None
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def hash_password(password):
|
||||
"""Hash and salt our passwords."""
|
||||
key = ")*()*SALT_HASSIO2123{}6554547485HSKA!!*JSLAfdasda$".format(password)
|
||||
return hashlib.sha256(key.encode()).hexdigest()
|
49
hassio/arch.json
Normal file
49
hassio/arch.json
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"raspberrypi": [
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi2": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"tinker": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"odroid-c2": [
|
||||
"aarch64"
|
||||
],
|
||||
"odroid-xu": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"orangepi-prime": [
|
||||
"aarch64"
|
||||
],
|
||||
"qemux86": [
|
||||
"i386"
|
||||
],
|
||||
"qemux86-64": [
|
||||
"amd64",
|
||||
"i386"
|
||||
],
|
||||
"qemuarm": [
|
||||
"armhf"
|
||||
],
|
||||
"qemuarm-64": [
|
||||
"aarch64"
|
||||
],
|
||||
"intel-nuc": [
|
||||
"amd64",
|
||||
"i386"
|
||||
]
|
||||
}
|
65
hassio/arch.py
Normal file
65
hassio/arch.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Handle Arch for underlay maschine/platforms."""
|
||||
import logging
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
from .coresys import CoreSysAttributes, CoreSys
|
||||
from .exceptions import HassioArchNotFound, JsonFileError
|
||||
from .utils.json import read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CpuArch(CoreSysAttributes):
|
||||
"""Manage available architectures."""
|
||||
|
||||
def __init__(self, coresys: CoreSys) -> None:
|
||||
"""Initialize CPU Architecture handler."""
|
||||
self.coresys = coresys
|
||||
self._supported_arch: List[str] = []
|
||||
self._default_arch: str
|
||||
|
||||
@property
|
||||
def default(self) -> str:
|
||||
"""Return system default arch."""
|
||||
return self._default_arch
|
||||
|
||||
@property
|
||||
def supervisor(self) -> str:
|
||||
"""Return supervisor arch."""
|
||||
return self.sys_supervisor.arch
|
||||
|
||||
@property
|
||||
def supported(self) -> List[str]:
|
||||
"""Return support arch by CPU/Machine."""
|
||||
return self._supported_arch
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load data and initialize default arch."""
|
||||
try:
|
||||
arch_data = read_json_file(Path(__file__).parent.joinpath("arch.json"))
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read arch json")
|
||||
return
|
||||
|
||||
# Evaluate current CPU/Platform
|
||||
if not self.sys_machine or self.sys_machine not in arch_data:
|
||||
_LOGGER.warning("Can't detect underlay machine type!")
|
||||
self._default_arch = self.sys_supervisor.arch
|
||||
self._supported_arch.append(self.default)
|
||||
return
|
||||
|
||||
# Use configs from arch.json
|
||||
self._supported_arch.extend(arch_data[self.sys_machine])
|
||||
self._default_arch = self.supported[0]
|
||||
|
||||
def is_supported(self, arch_list: List[str]) -> bool:
|
||||
"""Return True if there is a supported arch by this platform."""
|
||||
return not set(self.supported).isdisjoint(set(arch_list))
|
||||
|
||||
def match(self, arch_list: List[str]) -> str:
|
||||
"""Return best match for this CPU/Platform."""
|
||||
for self_arch in self.supported:
|
||||
if self_arch in arch_list:
|
||||
return self_arch
|
||||
raise HassioArchNotFound()
|
95
hassio/auth.py
Normal file
95
hassio/auth.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||
import logging
|
||||
import hashlib
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_AUTH_CONFIG
|
||||
from .exceptions import AuthError, HomeAssistantAPIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Auth(JsonConfig, CoreSysAttributes):
|
||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_AUTH, SCHEMA_AUTH_CONFIG)
|
||||
self.coresys = coresys
|
||||
|
||||
def _check_cache(self, username, password):
|
||||
"""Check password in cache."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) == password_h:
|
||||
_LOGGER.info("Cache hit for %s", username)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("No cache hit for %s", username)
|
||||
return False
|
||||
|
||||
def _update_cache(self, username, password):
|
||||
"""Cache a username, password."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) == password_h:
|
||||
return
|
||||
|
||||
self._data[username_h] = password_h
|
||||
self.save_data()
|
||||
|
||||
def _dismatch_cache(self, username, password):
|
||||
"""Remove user from cache."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) != password_h:
|
||||
return
|
||||
|
||||
self._data.pop(username_h, None)
|
||||
self.save_data()
|
||||
|
||||
async def check_login(self, addon, username, password):
|
||||
"""Check username login."""
|
||||
if password is None:
|
||||
_LOGGER.error("None as password is not supported!")
|
||||
raise AuthError()
|
||||
_LOGGER.info("Auth request from %s for %s", addon.slug, username)
|
||||
|
||||
# Check API state
|
||||
if not await self.sys_homeassistant.check_api_state():
|
||||
_LOGGER.info("Home Assistant not running, check cache")
|
||||
return self._check_cache(username, password)
|
||||
|
||||
try:
|
||||
async with self.sys_homeassistant.make_request(
|
||||
'post', 'api/hassio_auth', json={
|
||||
ATTR_USERNAME: username,
|
||||
ATTR_PASSWORD: password,
|
||||
ATTR_ADDON: addon.slug,
|
||||
}) as req:
|
||||
|
||||
if req.status == 200:
|
||||
_LOGGER.info("Success login from %s", username)
|
||||
self._update_cache(username, password)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("Wrong login from %s", username)
|
||||
self._dismatch_cache(username, password)
|
||||
return False
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Can't request auth on Home Assistant!")
|
||||
|
||||
raise AuthError()
|
||||
|
||||
|
||||
def _rehash(value, salt2=""):
|
||||
"""Rehash a value."""
|
||||
for idx in range(1, 20):
|
||||
value = hashlib.sha256(f"{value}{idx}{salt2}".encode()).hexdigest()
|
||||
return value
|
@@ -1,68 +1,127 @@
|
||||
"""Bootstrap HassIO."""
|
||||
"""Bootstrap Hass.io."""
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import signal
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .arch import CpuArch
|
||||
from .auth import Auth
|
||||
from .const import SOCKET_DOCKER
|
||||
from .config import CoreConfig
|
||||
from .core import HassIO
|
||||
from .coresys import CoreSys
|
||||
from .dbus import DBusManager
|
||||
from .discovery import Discovery
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENV_SHARE = "SUPERVISOR_SHARE"
|
||||
ENV_NAME = "SUPERVISOR_NAME"
|
||||
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
|
||||
|
||||
def initialize_system_data():
|
||||
"""Setup default config and create folders."""
|
||||
config = CoreConfig()
|
||||
MACHINE_ID = Path("/etc/machine-id")
|
||||
|
||||
# homeassistant config folder
|
||||
if not config.path_config.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home-Assistant config folder %s", config.path_config)
|
||||
config.path_config.mkdir()
|
||||
|
||||
async def initialize_coresys():
|
||||
"""Initialize HassIO coresys/objects."""
|
||||
coresys = CoreSys()
|
||||
|
||||
# Initialize core objects
|
||||
coresys.core = HassIO(coresys)
|
||||
coresys.arch = CpuArch(coresys)
|
||||
coresys.auth = Auth(coresys)
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
coresys.homeassistant = HomeAssistant(coresys)
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.hassos = HassOS(coresys)
|
||||
|
||||
# bootstrap config
|
||||
initialize_system_data(coresys)
|
||||
|
||||
# Set Machine/Host ID
|
||||
if MACHINE_ID.exists():
|
||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
||||
|
||||
return coresys
|
||||
|
||||
|
||||
def initialize_system_data(coresys):
|
||||
"""Set up the default configuration and create folders."""
|
||||
config = coresys.config
|
||||
|
||||
# Home Assistant configuration folder
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info("Create Home Assistant configuration folder %s",
|
||||
config.path_homeassistant)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
if not config.path_ssl.is_dir():
|
||||
_LOGGER.info("Create hassio ssl folder %s", config.path_ssl)
|
||||
_LOGGER.info("Create Hass.io SSL/TLS folder %s", config.path_ssl)
|
||||
config.path_ssl.mkdir()
|
||||
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create hassio addon data folder %s", config.path_addons_data)
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s",
|
||||
config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info("Create hassio addon local repository folder %s",
|
||||
_LOGGER.info("Create Hass.io Add-on local repository folder %s",
|
||||
config.path_addons_local)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info("Create hassio addon git repositories folder %s",
|
||||
_LOGGER.info("Create Hass.io Add-on git repositories folder %s",
|
||||
config.path_addons_git)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
if not config.path_tmp.is_dir():
|
||||
_LOGGER.info("Create hassio temp folder %s", config.path_tmp)
|
||||
_LOGGER.info("Create Hass.io temp folder %s", config.path_tmp)
|
||||
config.path_tmp.mkdir(parents=True)
|
||||
|
||||
# hassio backup folder
|
||||
if not config.path_backup.is_dir():
|
||||
_LOGGER.info("Create hassio backup folder %s", config.path_backup)
|
||||
_LOGGER.info("Create Hass.io backup folder %s", config.path_backup)
|
||||
config.path_backup.mkdir()
|
||||
|
||||
# share folder
|
||||
if not config.path_share.is_dir():
|
||||
_LOGGER.info("Create hassio share folder %s", config.path_share)
|
||||
_LOGGER.info("Create Hass.io share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
# apparmor folder
|
||||
if not config.path_apparmor.is_dir():
|
||||
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
||||
config.path_apparmor.mkdir()
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def migrate_system_env(config):
|
||||
def migrate_system_env(coresys):
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
@@ -70,67 +129,78 @@ def migrate_system_env(config):
|
||||
try:
|
||||
old_build.rmdir()
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't cleanup old addons build dir.")
|
||||
_LOGGER.warning("Can't cleanup old Add-on build directory")
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
|
||||
"[%(name)s] %(message)s")
|
||||
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
||||
datefmt = '%y-%m-%d %H:%M:%S'
|
||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||
datefmt = "%y-%m-%d %H:%M:%S"
|
||||
|
||||
# suppress overly verbose logs from libraries that aren't helpful
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
|
||||
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
|
||||
logging.getLogger().handlers[0].setFormatter(
|
||||
ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
'DEBUG': 'cyan',
|
||||
'INFO': 'green',
|
||||
'WARNING': 'yellow',
|
||||
'ERROR': 'red',
|
||||
'CRITICAL': 'red',
|
||||
}
|
||||
"DEBUG": "cyan",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
))
|
||||
|
||||
|
||||
def check_environment():
|
||||
"""Check if all environment are exists."""
|
||||
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
||||
'HOMEASSISTANT_REPOSITORY'):
|
||||
# check environment variables
|
||||
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
|
||||
try:
|
||||
os.environ[key]
|
||||
except KeyError:
|
||||
_LOGGER.fatal("Can't find %s in env!", key)
|
||||
return False
|
||||
|
||||
# check docker socket
|
||||
if not SOCKET_DOCKER.is_socket():
|
||||
_LOGGER.fatal("Can't find docker socket!")
|
||||
_LOGGER.fatal("Can't find Docker socket!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which("socat"):
|
||||
_LOGGER.fatal("Can't find socat!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which("gdbus"):
|
||||
_LOGGER.fatal("Can't find gdbus!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def reg_signal(loop, hassio):
|
||||
"""Register SIGTERM, SIGKILL to stop system."""
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM and SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
||||
loop.add_signal_handler(signal.SIGTERM,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGHUP, lambda: loop.create_task(hassio.stop()))
|
||||
loop.add_signal_handler(signal.SIGHUP,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGINT, lambda: loop.create_task(hassio.stop()))
|
||||
loop.add_signal_handler(signal.SIGINT,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
192
hassio/config.py
192
hassio/config.py
@@ -1,21 +1,22 @@
|
||||
"""Bootstrap HassIO."""
|
||||
"""Bootstrap Hass.io."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
||||
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_API_ENDPOINT,
|
||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT)
|
||||
from .tools import JsonConfig
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATETIME_FORMAT = "%Y%m%d %H:%M:%S"
|
||||
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
HOMEASSISTANT_CONFIG = PurePath('homeassistant')
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
|
||||
@@ -27,6 +28,11 @@ ADDONS_DATA = PurePath("addons/data")
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
APPARMOR_DATA = PurePath("apparmor")
|
||||
|
||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||
|
||||
RE_TIMEZONE = re.compile(r"time_zone: (?P<timezone>[\w/\-+]+)")
|
||||
|
||||
|
||||
class CoreConfig(JsonConfig):
|
||||
@@ -35,52 +41,79 @@ class CoreConfig(JsonConfig):
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||
self.arch = None
|
||||
|
||||
@property
|
||||
def api_endpoint(self):
|
||||
"""Return IP address of api endpoint."""
|
||||
return self._data[ATTR_API_ENDPOINT]
|
||||
|
||||
@api_endpoint.setter
|
||||
def api_endpoint(self, value):
|
||||
"""Store IP address of api endpoint."""
|
||||
self._data[ATTR_API_ENDPOINT] = value
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
config_file = Path(self.path_homeassistant, 'configuration.yaml')
|
||||
try:
|
||||
assert config_file.exists()
|
||||
configuration = config_file.read_text()
|
||||
|
||||
data = RE_TIMEZONE.search(configuration)
|
||||
assert data
|
||||
|
||||
timezone = data.group('timezone')
|
||||
pytz.timezone(timezone)
|
||||
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
|
||||
_LOGGER.debug("Can't parse Home Assistant timezone")
|
||||
return self._data[ATTR_TIMEZONE]
|
||||
|
||||
return timezone
|
||||
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
"""Return wait time for auto boot stages."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
"""Return last boot datetime."""
|
||||
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
||||
|
||||
boot_time = parse_datetime(boot_str)
|
||||
if not boot_time:
|
||||
return datetime.utcfromtimestamp(1)
|
||||
return boot_time
|
||||
|
||||
@last_boot.setter
|
||||
def last_boot(self, value):
|
||||
"""Set last boot datetime."""
|
||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||
|
||||
@property
|
||||
def path_hassio(self):
|
||||
"""Return hassio data path."""
|
||||
"""Return Hass.io data path."""
|
||||
return HASSIO_DATA
|
||||
|
||||
@property
|
||||
def path_extern_hassio(self):
|
||||
"""Return hassio data path extern for docker."""
|
||||
"""Return Hass.io data path external for Docker."""
|
||||
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
||||
|
||||
@property
|
||||
def path_extern_config(self):
|
||||
"""Return config path extern for docker."""
|
||||
def path_extern_homeassistant(self):
|
||||
"""Return config path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
||||
|
||||
@property
|
||||
def path_config(self):
|
||||
def path_homeassistant(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
||||
|
||||
@property
|
||||
def path_extern_ssl(self):
|
||||
"""Return SSL path extern for docker."""
|
||||
"""Return SSL path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
||||
|
||||
@property
|
||||
@@ -90,39 +123,44 @@ class CoreConfig(JsonConfig):
|
||||
|
||||
@property
|
||||
def path_addons_core(self):
|
||||
"""Return git path for core addons."""
|
||||
"""Return git path for core Add-ons."""
|
||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
||||
|
||||
@property
|
||||
def path_addons_git(self):
|
||||
"""Return path for git addons."""
|
||||
"""Return path for Git Add-on."""
|
||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
||||
|
||||
@property
|
||||
def path_addons_local(self):
|
||||
"""Return path for customs addons."""
|
||||
"""Return path for custom Add-ons."""
|
||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_extern_addons_local(self):
|
||||
"""Return path for customs addons."""
|
||||
"""Return path for custom Add-ons."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_addons_data(self):
|
||||
"""Return root addon data folder."""
|
||||
"""Return root Add-on data folder."""
|
||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_addons_data(self):
|
||||
"""Return root addon data folder extern for docker."""
|
||||
"""Return root add-on data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_tmp(self):
|
||||
"""Return hass.io temp folder."""
|
||||
"""Return Hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_tmp(self):
|
||||
"""Return Hass.io temp folder for Docker."""
|
||||
return PurePath(self.path_extern_hassio, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
@@ -130,7 +168,7 @@ class CoreConfig(JsonConfig):
|
||||
|
||||
@property
|
||||
def path_extern_backup(self):
|
||||
"""Return root backup data folder extern for docker."""
|
||||
"""Return root backup data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
@@ -138,14 +176,19 @@ class CoreConfig(JsonConfig):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return root Apparmor profile folder."""
|
||||
return Path(HASSIO_DATA, APPARMOR_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder extern for docker."""
|
||||
"""Return root share data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def addons_repositories(self):
|
||||
"""Return list of addons custom repositories."""
|
||||
"""Return list of custom Add-on repositories."""
|
||||
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
||||
|
||||
def add_addon_repository(self, repo):
|
||||
@@ -154,7 +197,6 @@ class CoreConfig(JsonConfig):
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||
self.save()
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
@@ -162,79 +204,3 @@ class CoreConfig(JsonConfig):
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_initialize(self):
|
||||
"""Return is security was initialize."""
|
||||
return self._data[ATTR_SECURITY]
|
||||
|
||||
@security_initialize.setter
|
||||
def security_initialize(self, value):
|
||||
"""Set is security initialize."""
|
||||
self._data[ATTR_SECURITY] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_totp(self):
|
||||
"""Return the TOTP key."""
|
||||
return self._data.get(ATTR_TOTP)
|
||||
|
||||
@security_totp.setter
|
||||
def security_totp(self, value):
|
||||
"""Set the TOTP key."""
|
||||
self._data[ATTR_TOTP] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_password(self):
|
||||
"""Return the password key."""
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@security_password.setter
|
||||
def security_password(self, value):
|
||||
"""Set the password key."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_sessions(self):
|
||||
"""Return api sessions."""
|
||||
return {
|
||||
session: datetime.strptime(until, DATETIME_FORMAT) for
|
||||
session, until in self._data[ATTR_SESSIONS].items()
|
||||
}
|
||||
|
||||
def add_security_session(self, session, valid):
|
||||
"""Set the a new session."""
|
||||
self._data[ATTR_SESSIONS].update(
|
||||
{session: valid.strftime(DATETIME_FORMAT)}
|
||||
)
|
||||
self.save()
|
||||
|
||||
def drop_security_session(self, session):
|
||||
"""Delete the a session."""
|
||||
self._data[ATTR_SESSIONS].pop(session, None)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA audio output card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_OUTPUT)
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set ALSA audio output card,dev."""
|
||||
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA audio input card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_INPUT)
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set ALSA audio input card,dev."""
|
||||
self._data[ATTR_AUDIO_INPUT] = value
|
||||
self.save()
|
||||
|
386
hassio/const.py
386
hassio/const.py
@@ -1,150 +1,290 @@
|
||||
"""Const file for HassIO."""
|
||||
"""Constants file for Hass.io."""
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = '0.57'
|
||||
HASSIO_VERSION = "151"
|
||||
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/{}/version.json')
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
URL_HASSIO_APPARMOR = "https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
||||
|
||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||
URL_HASSOS_OTA = (
|
||||
"https://github.com/home-assistant/hassos/releases/download/"
|
||||
"{version}/hassos_{board}-{version}.raucb"
|
||||
)
|
||||
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
||||
RUN_WATCHDOG_HOMEASSISTANT = 15
|
||||
RUN_CLEANUP_API_SESSIONS = 900
|
||||
|
||||
RESTART_EXIT_CODE = 100
|
||||
|
||||
FILE_HASSIO_AUTH = Path(HASSIO_DATA, "auth.json")
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||
|
||||
LABEL_VERSION = 'io.hass.version'
|
||||
LABEL_ARCH = 'io.hass.arch'
|
||||
LABEL_TYPE = 'io.hass.type'
|
||||
DOCKER_NETWORK = "hassio"
|
||||
DOCKER_NETWORK_MASK = ip_network("172.30.32.0/23")
|
||||
DOCKER_NETWORK_RANGE = ip_network("172.30.33.0/24")
|
||||
|
||||
META_ADDON = 'addon'
|
||||
META_SUPERVISOR = 'supervisor'
|
||||
META_HOMEASSISTANT = 'homeassistant'
|
||||
LABEL_VERSION = "io.hass.version"
|
||||
LABEL_ARCH = "io.hass.arch"
|
||||
LABEL_TYPE = "io.hass.type"
|
||||
LABEL_MACHINE = "io.hass.machine"
|
||||
|
||||
JSON_RESULT = 'result'
|
||||
JSON_DATA = 'data'
|
||||
JSON_MESSAGE = 'message'
|
||||
META_ADDON = "addon"
|
||||
META_SUPERVISOR = "supervisor"
|
||||
META_HOMEASSISTANT = "homeassistant"
|
||||
|
||||
RESULT_ERROR = 'error'
|
||||
RESULT_OK = 'ok'
|
||||
JSON_RESULT = "result"
|
||||
JSON_DATA = "data"
|
||||
JSON_MESSAGE = "message"
|
||||
|
||||
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||
CONTENT_TYPE_PNG = 'image/png'
|
||||
RESULT_ERROR = "error"
|
||||
RESULT_OK = "ok"
|
||||
|
||||
ATTR_DATE = 'date'
|
||||
ATTR_ARCH = 'arch'
|
||||
ATTR_HOSTNAME = 'hostname'
|
||||
ATTR_TIMEZONE = 'timezone'
|
||||
ATTR_OS = 'os'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_SOURCE = 'source'
|
||||
ATTR_FEATURES = 'features'
|
||||
ATTR_ADDONS = 'addons'
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_LAST_VERSION = 'last_version'
|
||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_SLUG = 'slug'
|
||||
ATTR_DESCRIPTON = 'description'
|
||||
ATTR_STARTUP = 'startup'
|
||||
ATTR_BOOT = 'boot'
|
||||
ATTR_PORTS = 'ports'
|
||||
ATTR_MAP = 'map'
|
||||
ATTR_WEBUI = 'webui'
|
||||
ATTR_OPTIONS = 'options'
|
||||
ATTR_INSTALLED = 'installed'
|
||||
ATTR_DETACHED = 'detached'
|
||||
ATTR_STATE = 'state'
|
||||
ATTR_SCHEMA = 'schema'
|
||||
ATTR_IMAGE = 'image'
|
||||
ATTR_LOGO = 'logo'
|
||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||
ATTR_REPOSITORY = 'repository'
|
||||
ATTR_REPOSITORIES = 'repositories'
|
||||
ATTR_URL = 'url'
|
||||
ATTR_MAINTAINER = 'maintainer'
|
||||
ATTR_PASSWORD = 'password'
|
||||
ATTR_TOTP = 'totp'
|
||||
ATTR_INITIALIZE = 'initialize'
|
||||
ATTR_SESSION = 'session'
|
||||
ATTR_SESSIONS = 'sessions'
|
||||
ATTR_LOCATON = 'location'
|
||||
ATTR_BUILD = 'build'
|
||||
ATTR_DEVICES = 'devices'
|
||||
ATTR_ENVIRONMENT = 'environment'
|
||||
ATTR_HOST_NETWORK = 'host_network'
|
||||
ATTR_NETWORK = 'network'
|
||||
ATTR_TMPFS = 'tmpfs'
|
||||
ATTR_PRIVILEGED = 'privileged'
|
||||
ATTR_USER = 'user'
|
||||
ATTR_SYSTEM = 'system'
|
||||
ATTR_SNAPSHOTS = 'snapshots'
|
||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||
ATTR_HASSIO = 'hassio'
|
||||
ATTR_HASSIO_API = 'hassio_api'
|
||||
ATTR_FOLDERS = 'folders'
|
||||
ATTR_SIZE = 'size'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_TIMEOUT = 'timeout'
|
||||
ATTR_AUTO_UPDATE = 'auto_update'
|
||||
ATTR_CUSTOM = 'custom'
|
||||
ATTR_AUDIO = 'audio'
|
||||
ATTR_AUDIO_INPUT = 'audio_input'
|
||||
ATTR_AUDIO_OUTPUT = 'audio_output'
|
||||
ATTR_INPUT = 'input'
|
||||
ATTR_OUTPUT = 'output'
|
||||
ATTR_DISK = 'disk'
|
||||
ATTR_SERIAL = 'serial'
|
||||
ATTR_SECURITY = 'security'
|
||||
ATTR_API_ENDPOINT = 'api_endpoint'
|
||||
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
||||
CONTENT_TYPE_PNG = "image/png"
|
||||
CONTENT_TYPE_JSON = "application/json"
|
||||
CONTENT_TYPE_TEXT = "text/plain"
|
||||
CONTENT_TYPE_TAR = "application/tar"
|
||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||
HEADER_HA_ACCESS = "x-ha-access"
|
||||
HEADER_TOKEN = "x-hassio-key"
|
||||
|
||||
STARTUP_INITIALIZE = 'initialize'
|
||||
STARTUP_SYSTEM = 'system'
|
||||
STARTUP_SERVICES = 'services'
|
||||
STARTUP_APPLICATION = 'application'
|
||||
STARTUP_ONCE = 'once'
|
||||
ENV_TOKEN = "HASSIO_TOKEN"
|
||||
ENV_TIME = "TZ"
|
||||
|
||||
BOOT_AUTO = 'auto'
|
||||
BOOT_MANUAL = 'manual'
|
||||
REQUEST_FROM = "HASSIO_FROM"
|
||||
|
||||
STATE_STARTED = 'started'
|
||||
STATE_STOPPED = 'stopped'
|
||||
STATE_NONE = 'none'
|
||||
ATTR_MACHINE = "machine"
|
||||
ATTR_WAIT_BOOT = "wait_boot"
|
||||
ATTR_DEPLOYMENT = "deployment"
|
||||
ATTR_WATCHDOG = "watchdog"
|
||||
ATTR_CHANGELOG = "changelog"
|
||||
ATTR_DATE = "date"
|
||||
ATTR_ARCH = "arch"
|
||||
ATTR_LONG_DESCRIPTION = "long_description"
|
||||
ATTR_HOSTNAME = "hostname"
|
||||
ATTR_TIMEZONE = "timezone"
|
||||
ATTR_ARGS = "args"
|
||||
ATTR_OPERATING_SYSTEM = "operating_system"
|
||||
ATTR_CHASSIS = "chassis"
|
||||
ATTR_TYPE = "type"
|
||||
ATTR_SOURCE = "source"
|
||||
ATTR_FEATURES = "features"
|
||||
ATTR_ADDONS = "addons"
|
||||
ATTR_PROVIDERS = "providers"
|
||||
ATTR_VERSION = "version"
|
||||
ATTR_VERSION_LATEST = "version_latest"
|
||||
ATTR_AUTO_UART = "auto_uart"
|
||||
ATTR_LAST_BOOT = "last_boot"
|
||||
ATTR_LAST_VERSION = "last_version"
|
||||
ATTR_CHANNEL = "channel"
|
||||
ATTR_NAME = "name"
|
||||
ATTR_SLUG = "slug"
|
||||
ATTR_DESCRIPTON = "description"
|
||||
ATTR_STARTUP = "startup"
|
||||
ATTR_BOOT = "boot"
|
||||
ATTR_PORTS = "ports"
|
||||
ATTR_PORT = "port"
|
||||
ATTR_SSL = "ssl"
|
||||
ATTR_MAP = "map"
|
||||
ATTR_WEBUI = "webui"
|
||||
ATTR_OPTIONS = "options"
|
||||
ATTR_INSTALLED = "installed"
|
||||
ATTR_DETACHED = "detached"
|
||||
ATTR_STATE = "state"
|
||||
ATTR_SCHEMA = "schema"
|
||||
ATTR_IMAGE = "image"
|
||||
ATTR_ICON = "icon"
|
||||
ATTR_LOGO = "logo"
|
||||
ATTR_STDIN = "stdin"
|
||||
ATTR_ADDONS_REPOSITORIES = "addons_repositories"
|
||||
ATTR_REPOSITORY = "repository"
|
||||
ATTR_REPOSITORIES = "repositories"
|
||||
ATTR_URL = "url"
|
||||
ATTR_MAINTAINER = "maintainer"
|
||||
ATTR_PASSWORD = "password"
|
||||
ATTR_TOTP = "totp"
|
||||
ATTR_INITIALIZE = "initialize"
|
||||
ATTR_LOCATON = "location"
|
||||
ATTR_BUILD = "build"
|
||||
ATTR_DEVICES = "devices"
|
||||
ATTR_ENVIRONMENT = "environment"
|
||||
ATTR_HOST_NETWORK = "host_network"
|
||||
ATTR_HOST_PID = "host_pid"
|
||||
ATTR_HOST_IPC = "host_ipc"
|
||||
ATTR_HOST_DBUS = "host_dbus"
|
||||
ATTR_NETWORK = "network"
|
||||
ATTR_TMPFS = "tmpfs"
|
||||
ATTR_PRIVILEGED = "privileged"
|
||||
ATTR_USER = "user"
|
||||
ATTR_SYSTEM = "system"
|
||||
ATTR_SNAPSHOTS = "snapshots"
|
||||
ATTR_HOMEASSISTANT = "homeassistant"
|
||||
ATTR_HASSIO = "hassio"
|
||||
ATTR_HASSIO_API = "hassio_api"
|
||||
ATTR_HOMEASSISTANT_API = "homeassistant_api"
|
||||
ATTR_UUID = "uuid"
|
||||
ATTR_FOLDERS = "folders"
|
||||
ATTR_SIZE = "size"
|
||||
ATTR_TYPE = "type"
|
||||
ATTR_TIMEOUT = "timeout"
|
||||
ATTR_AUTO_UPDATE = "auto_update"
|
||||
ATTR_CUSTOM = "custom"
|
||||
ATTR_AUDIO = "audio"
|
||||
ATTR_AUDIO_INPUT = "audio_input"
|
||||
ATTR_AUDIO_OUTPUT = "audio_output"
|
||||
ATTR_INPUT = "input"
|
||||
ATTR_OUTPUT = "output"
|
||||
ATTR_DISK = "disk"
|
||||
ATTR_SERIAL = "serial"
|
||||
ATTR_SECURITY = "security"
|
||||
ATTR_BUILD_FROM = "build_from"
|
||||
ATTR_SQUASH = "squash"
|
||||
ATTR_GPIO = "gpio"
|
||||
ATTR_LEGACY = "legacy"
|
||||
ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list"
|
||||
ATTR_CPU_PERCENT = "cpu_percent"
|
||||
ATTR_NETWORK_RX = "network_rx"
|
||||
ATTR_NETWORK_TX = "network_tx"
|
||||
ATTR_MEMORY_LIMIT = "memory_limit"
|
||||
ATTR_MEMORY_USAGE = "memory_usage"
|
||||
ATTR_BLK_READ = "blk_read"
|
||||
ATTR_BLK_WRITE = "blk_write"
|
||||
ATTR_ADDON = "addon"
|
||||
ATTR_AVAILABLE = "available"
|
||||
ATTR_HOST = "host"
|
||||
ATTR_USERNAME = "username"
|
||||
ATTR_DISCOVERY = "discovery"
|
||||
ATTR_CONFIG = "config"
|
||||
ATTR_SERVICES = "services"
|
||||
ATTR_SERVICE = "service"
|
||||
ATTR_DISCOVERY = "discovery"
|
||||
ATTR_PROTECTED = "protected"
|
||||
ATTR_CRYPTO = "crypto"
|
||||
ATTR_BRANCH = "branch"
|
||||
ATTR_KERNEL = "kernel"
|
||||
ATTR_APPARMOR = "apparmor"
|
||||
ATTR_DEVICETREE = "devicetree"
|
||||
ATTR_CPE = "cpe"
|
||||
ATTR_BOARD = "board"
|
||||
ATTR_HASSOS = "hassos"
|
||||
ATTR_HASSOS_CLI = "hassos_cli"
|
||||
ATTR_VERSION_CLI = "version_cli"
|
||||
ATTR_VERSION_CLI_LATEST = "version_cli_latest"
|
||||
ATTR_REFRESH_TOKEN = "refresh_token"
|
||||
ATTR_ACCESS_TOKEN = "access_token"
|
||||
ATTR_DOCKER_API = "docker_api"
|
||||
ATTR_FULL_ACCESS = "full_access"
|
||||
ATTR_PROTECTED = "protected"
|
||||
ATTR_RATING = "rating"
|
||||
ATTR_HASSIO_ROLE = "hassio_role"
|
||||
ATTR_SUPERVISOR = "supervisor"
|
||||
ATTR_AUTH_API = "auth_api"
|
||||
ATTR_KERNEL_MODULES = "kernel_modules"
|
||||
ATTR_SUPPORTED_ARCH = "supported_arch"
|
||||
|
||||
MAP_CONFIG = 'config'
|
||||
MAP_SSL = 'ssl'
|
||||
MAP_ADDONS = 'addons'
|
||||
MAP_BACKUP = 'backup'
|
||||
MAP_SHARE = 'share'
|
||||
PROVIDE_SERVICE = "provide"
|
||||
NEED_SERVICE = "need"
|
||||
WANT_SERVICE = "want"
|
||||
|
||||
ARCH_ARMHF = 'armhf'
|
||||
ARCH_AARCH64 = 'aarch64'
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_I386 = 'i386'
|
||||
STARTUP_INITIALIZE = "initialize"
|
||||
STARTUP_SYSTEM = "system"
|
||||
STARTUP_SERVICES = "services"
|
||||
STARTUP_APPLICATION = "application"
|
||||
STARTUP_ONCE = "once"
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
STARTUP_ALL = [
|
||||
STARTUP_ONCE,
|
||||
STARTUP_INITIALIZE,
|
||||
STARTUP_SYSTEM,
|
||||
STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION,
|
||||
]
|
||||
|
||||
FOLDER_HOMEASSISTANT = 'homeassistant'
|
||||
FOLDER_SHARE = 'share'
|
||||
FOLDER_ADDONS = 'addons/local'
|
||||
FOLDER_SSL = 'ssl'
|
||||
BOOT_AUTO = "auto"
|
||||
BOOT_MANUAL = "manual"
|
||||
|
||||
SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
STATE_STARTED = "started"
|
||||
STATE_STOPPED = "stopped"
|
||||
STATE_NONE = "none"
|
||||
|
||||
MAP_CONFIG = "config"
|
||||
MAP_SSL = "ssl"
|
||||
MAP_ADDONS = "addons"
|
||||
MAP_BACKUP = "backup"
|
||||
MAP_SHARE = "share"
|
||||
|
||||
ARCH_ARMHF = "armhf"
|
||||
ARCH_ARMV7 = "armv7"
|
||||
ARCH_AARCH64 = "aarch64"
|
||||
ARCH_AMD64 = "amd64"
|
||||
ARCH_I386 = "i386"
|
||||
|
||||
ARCH_ALL = [ARCH_ARMHF, ARCH_ARMV7, ARCH_AARCH64, ARCH_AMD64, ARCH_I386]
|
||||
|
||||
CHANNEL_STABLE = "stable"
|
||||
CHANNEL_BETA = "beta"
|
||||
CHANNEL_DEV = "dev"
|
||||
|
||||
REPOSITORY_CORE = "core"
|
||||
REPOSITORY_LOCAL = "local"
|
||||
|
||||
FOLDER_HOMEASSISTANT = "homeassistant"
|
||||
FOLDER_SHARE = "share"
|
||||
FOLDER_ADDONS = "addons/local"
|
||||
FOLDER_SSL = "ssl"
|
||||
|
||||
SNAPSHOT_FULL = "full"
|
||||
SNAPSHOT_PARTIAL = "partial"
|
||||
|
||||
CRYPTO_AES128 = "aes128"
|
||||
|
||||
SECURITY_PROFILE = "profile"
|
||||
SECURITY_DEFAULT = "default"
|
||||
SECURITY_DISABLE = "disable"
|
||||
|
||||
PRIVILEGED_NET_ADMIN = "NET_ADMIN"
|
||||
PRIVILEGED_SYS_ADMIN = "SYS_ADMIN"
|
||||
PRIVILEGED_SYS_RAWIO = "SYS_RAWIO"
|
||||
PRIVILEGED_IPC_LOCK = "IPC_LOCK"
|
||||
PRIVILEGED_SYS_TIME = "SYS_TIME"
|
||||
PRIVILEGED_SYS_NICE = "SYS_NICE"
|
||||
PRIVILEGED_SYS_MODULE = "SYS_MODULE"
|
||||
PRIVILEGED_SYS_RESOURCE = "SYS_RESOURCE"
|
||||
PRIVILEGED_SYS_PTRACE = "SYS_PTRACE"
|
||||
PRIVILEGED_DAC_READ_SEARCH = "DAC_READ_SEARCH"
|
||||
|
||||
PRIVILEGED_ALL = [
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_IPC_LOCK,
|
||||
PRIVILEGED_SYS_TIME,
|
||||
PRIVILEGED_SYS_NICE,
|
||||
PRIVILEGED_SYS_RESOURCE,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
]
|
||||
|
||||
FEATURES_SHUTDOWN = "shutdown"
|
||||
FEATURES_REBOOT = "reboot"
|
||||
FEATURES_HASSOS = "hassos"
|
||||
FEATURES_HOSTNAME = "hostname"
|
||||
FEATURES_SERVICES = "services"
|
||||
|
||||
ROLE_DEFAULT = "default"
|
||||
ROLE_HOMEASSISTANT = "homeassistant"
|
||||
ROLE_BACKUP = "backup"
|
||||
ROLE_MANAGER = "manager"
|
||||
ROLE_ADMIN = "admin"
|
||||
|
||||
ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN]
|
||||
|
||||
CHAN_ID = "chan_id"
|
||||
CHAN_TYPE = "chan_type"
|
||||
|
228
hassio/core.py
228
hassio/core.py
@@ -1,179 +1,151 @@
|
||||
"""Main file for HassIO."""
|
||||
"""Main file for Hass.io."""
|
||||
from contextlib import suppress
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
import docker
|
||||
import async_timeout
|
||||
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .host_control import HostControl
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
||||
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
||||
RUN_UPDATE_ADDONS_TASKS)
|
||||
from .hardware import Hardware
|
||||
from .homeassistant import HomeAssistant
|
||||
from .scheduler import Scheduler
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .snapshots import SnapshotsManager
|
||||
from .updater import Updater
|
||||
from .tasks import (
|
||||
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
||||
from .tools import get_local_ip, fetch_timezone
|
||||
STARTUP_SYSTEM,
|
||||
STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_INITIALIZE,
|
||||
)
|
||||
from .exceptions import HassioError, HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassIO(object):
|
||||
"""Main object of hassio."""
|
||||
class HassIO(CoreSysAttributes):
|
||||
"""Main object of Hass.io."""
|
||||
|
||||
def __init__(self, loop, config):
|
||||
"""Initialize hassio object."""
|
||||
self.exit_code = 0
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.websession = aiohttp.ClientSession(loop=loop)
|
||||
self.updater = Updater(config, loop, self.websession)
|
||||
self.scheduler = Scheduler(loop)
|
||||
self.api = RestAPI(config, loop)
|
||||
self.hardware = Hardware()
|
||||
self.dock = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
|
||||
|
||||
# init homeassistant
|
||||
self.homeassistant = HomeAssistant(
|
||||
config, loop, self.dock, self.updater)
|
||||
|
||||
# init HostControl
|
||||
self.host_control = HostControl(loop)
|
||||
|
||||
# init addon system
|
||||
self.addons = AddonManager(config, loop, self.dock)
|
||||
|
||||
# init snapshot system
|
||||
self.snapshots = SnapshotsManager(
|
||||
config, loop, self.scheduler, self.addons, self.homeassistant)
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Hass.io object."""
|
||||
self.coresys = coresys
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
if not await self.supervisor.attach():
|
||||
_LOGGER.fatal("Can't attach to supervisor docker container!")
|
||||
await self.supervisor.cleanup()
|
||||
# Load Supervisor
|
||||
await self.sys_supervisor.load()
|
||||
|
||||
# set running arch
|
||||
self.config.arch = self.supervisor.arch
|
||||
# Load DBus
|
||||
await self.sys_dbus.load()
|
||||
|
||||
# set api endpoint
|
||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
||||
# Load Host
|
||||
await self.sys_host.load()
|
||||
|
||||
# update timezone
|
||||
if self.config.timezone == 'UTC':
|
||||
self.config.timezone = await fetch_timezone(self.websession)
|
||||
# Load Home Assistant
|
||||
await self.sys_homeassistant.load()
|
||||
|
||||
# hostcontrol
|
||||
await self.host_control.load()
|
||||
# Load CPU/Arch
|
||||
await self.sys_arch.load()
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
# rest api views
|
||||
self.api.register_host(self.host_control, self.hardware)
|
||||
self.api.register_network(self.host_control)
|
||||
self.api.register_supervisor(
|
||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||
self.updater)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
self.api.register_security()
|
||||
self.api.register_snapshots(self.snapshots)
|
||||
self.api.register_panel()
|
||||
await self.sys_api.load()
|
||||
|
||||
# schedule api session cleanup
|
||||
self.scheduler.register_task(
|
||||
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
|
||||
now=True)
|
||||
# load last available data
|
||||
await self.sys_updater.load()
|
||||
|
||||
# Load homeassistant
|
||||
await self.homeassistant.prepare()
|
||||
# load last available data
|
||||
await self.sys_snapshots.load()
|
||||
|
||||
# Load addons
|
||||
await self.addons.prepare()
|
||||
# load services
|
||||
await self.sys_services.load()
|
||||
|
||||
# schedule addon update task
|
||||
self.scheduler.register_task(
|
||||
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
|
||||
self.scheduler.register_task(
|
||||
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
|
||||
# Load discovery
|
||||
await self.sys_discovery.load()
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
hassio_update(self.supervisor, self.updater),
|
||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
|
||||
# schedule snapshot update tasks
|
||||
self.scheduler.register_task(
|
||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
||||
# start dns forwarding
|
||||
self.sys_create_task(self.sys_dns.start())
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
"""Start Hass.io orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
await asyncio.wait(
|
||||
[hassio_update(self.supervisor, self.updater)()],
|
||||
loop=self.loop
|
||||
)
|
||||
if self.sys_supervisor.need_update:
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
||||
elif await self.sys_supervisor.update():
|
||||
return
|
||||
|
||||
# start api
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
||||
await self.sys_api.start()
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
||||
_LOGGER.info("Hass.io reboot detected")
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self.sys_services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self.addons.auto_boot(STARTUP_SERVICES)
|
||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
if self.sys_homeassistant.boot:
|
||||
with suppress(HomeAssistantError):
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
# start addon mark as application
|
||||
await self.addons.auto_boot(STARTUP_APPLICATION)
|
||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
||||
self.sys_config.save_data()
|
||||
|
||||
finally:
|
||||
# schedule homeassistant watchdog
|
||||
self.scheduler.register_task(
|
||||
homeassistant_watchdog(self.loop, self.homeassistant),
|
||||
RUN_WATCHDOG_HOMEASSISTANT)
|
||||
# Add core tasks into scheduler
|
||||
await self.sys_tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.homeassistant.version == 'landingpage':
|
||||
self.loop.create_task(self.homeassistant.install())
|
||||
if self.sys_homeassistant.version == "landingpage":
|
||||
self.sys_create_task(self.sys_homeassistant.install())
|
||||
|
||||
async def stop(self, exit_code=0):
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.suspend = True
|
||||
self.sys_scheduler.suspend = True
|
||||
|
||||
# process stop tasks
|
||||
self.websession.close()
|
||||
await self.api.stop()
|
||||
# process async stop tasks
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_api.stop(),
|
||||
self.sys_dns.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close(),
|
||||
]
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Force Shutdown!")
|
||||
|
||||
self.exit_code = exit_code
|
||||
self.loop.stop()
|
||||
_LOGGER.info("Hass.io is down")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown all running containers in correct order."""
|
||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
||||
|
||||
# Close Home Assistant
|
||||
with suppress(HassioError):
|
||||
await self.sys_homeassistant.stop()
|
||||
|
||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
||||
|
455
hassio/coresys.py
Normal file
455
hassio/coresys.py
Normal file
@@ -0,0 +1,455 @@
|
||||
"""Handle core shared data."""
|
||||
from __future__ import annotations
|
||||
import asyncio
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .config import CoreConfig
|
||||
from .const import CHANNEL_DEV
|
||||
from .docker import DockerAPI
|
||||
from .misc.dns import DNSForward
|
||||
from .misc.hardware import Hardware
|
||||
from .misc.scheduler import Scheduler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .arch import CpuArch
|
||||
from .auth import Auth
|
||||
from .core import HassIO
|
||||
from .dbus import DBusManager
|
||||
from .discovery import Discovery
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
|
||||
|
||||
class CoreSys:
|
||||
"""Class that handle all shared data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize coresys."""
|
||||
# Static attributes
|
||||
self.machine_id: str = None
|
||||
|
||||
# External objects
|
||||
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
|
||||
self._websession: aiohttp.ClientSession = aiohttp.ClientSession()
|
||||
self._websession_ssl: aiohttp.ClientSession = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(ssl=False))
|
||||
|
||||
# Global objects
|
||||
self._config: CoreConfig = CoreConfig()
|
||||
self._hardware: Hardware = Hardware()
|
||||
self._docker: DockerAPI = DockerAPI()
|
||||
self._scheduler: Scheduler = Scheduler()
|
||||
self._dns: DNSForward = DNSForward()
|
||||
|
||||
# Internal objects pointers
|
||||
self._core: HassIO = None
|
||||
self._arch: CpuArch = None
|
||||
self._auth: Auth = None
|
||||
self._homeassistant: HomeAssistant = None
|
||||
self._supervisor: Supervisor = None
|
||||
self._addons: AddonManager = None
|
||||
self._api: RestAPI = None
|
||||
self._updater: Updater = None
|
||||
self._snapshots: SnapshotManager = None
|
||||
self._tasks: Tasks = None
|
||||
self._host: HostManager = None
|
||||
self._dbus: DBusManager = None
|
||||
self._hassos: HassOS = None
|
||||
self._services: ServiceManager = None
|
||||
self._discovery: Discovery = None
|
||||
|
||||
@property
|
||||
def machine(self) -> str:
|
||||
"""Return running machine type of the Hass.io system."""
|
||||
if self._homeassistant:
|
||||
return self._homeassistant.machine
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self) -> str:
|
||||
"""Return True if we run dev mode."""
|
||||
return self._updater.channel == CHANNEL_DEV
|
||||
|
||||
@property
|
||||
def timezone(self) -> str:
|
||||
"""Return timezone."""
|
||||
return self._config.timezone
|
||||
|
||||
@property
|
||||
def loop(self) -> asyncio.BaseEventLoop:
|
||||
"""Return loop object."""
|
||||
return self._loop
|
||||
|
||||
@property
|
||||
def websession(self) -> aiohttp.ClientSession:
|
||||
"""Return websession object."""
|
||||
return self._websession
|
||||
|
||||
@property
|
||||
def websession_ssl(self) -> aiohttp.ClientSession:
|
||||
"""Return websession object with disabled SSL."""
|
||||
return self._websession_ssl
|
||||
|
||||
@property
|
||||
def config(self) -> CoreConfig:
|
||||
"""Return CoreConfig object."""
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def hardware(self) -> Hardware:
|
||||
"""Return Hardware object."""
|
||||
return self._hardware
|
||||
|
||||
@property
|
||||
def docker(self) -> DockerAPI:
|
||||
"""Return DockerAPI object."""
|
||||
return self._docker
|
||||
|
||||
@property
|
||||
def scheduler(self) -> Scheduler:
|
||||
"""Return Scheduler object."""
|
||||
return self._scheduler
|
||||
|
||||
@property
|
||||
def dns(self) -> DNSForward:
|
||||
"""Return DNSForward object."""
|
||||
return self._dns
|
||||
|
||||
@property
|
||||
def core(self) -> HassIO:
|
||||
"""Return HassIO object."""
|
||||
return self._core
|
||||
|
||||
@core.setter
|
||||
def core(self, value: HassIO):
|
||||
"""Set a Hass.io object."""
|
||||
if self._core:
|
||||
raise RuntimeError("Hass.io already set!")
|
||||
self._core = value
|
||||
|
||||
@property
|
||||
def arch(self) -> CpuArch:
|
||||
"""Return CpuArch object."""
|
||||
return self._arch
|
||||
|
||||
@arch.setter
|
||||
def arch(self, value: CpuArch):
|
||||
"""Set a CpuArch object."""
|
||||
if self._arch:
|
||||
raise RuntimeError("CpuArch already set!")
|
||||
self._arch = value
|
||||
|
||||
@property
|
||||
def auth(self) -> Auth:
|
||||
"""Return Auth object."""
|
||||
return self._auth
|
||||
|
||||
@auth.setter
|
||||
def auth(self, value: Auth):
|
||||
"""Set a Auth object."""
|
||||
if self._auth:
|
||||
raise RuntimeError("Auth already set!")
|
||||
self._auth = value
|
||||
|
||||
@property
|
||||
def homeassistant(self) -> HomeAssistant:
|
||||
"""Return Home Assistant object."""
|
||||
return self._homeassistant
|
||||
|
||||
@homeassistant.setter
|
||||
def homeassistant(self, value: HomeAssistant):
|
||||
"""Set a HomeAssistant object."""
|
||||
if self._homeassistant:
|
||||
raise RuntimeError("Home Assistant already set!")
|
||||
self._homeassistant = value
|
||||
|
||||
@property
|
||||
def supervisor(self) -> Supervisor:
|
||||
"""Return Supervisor object."""
|
||||
return self._supervisor
|
||||
|
||||
@supervisor.setter
|
||||
def supervisor(self, value: Supervisor):
|
||||
"""Set a Supervisor object."""
|
||||
if self._supervisor:
|
||||
raise RuntimeError("Supervisor already set!")
|
||||
self._supervisor = value
|
||||
|
||||
@property
|
||||
def api(self) -> RestAPI:
|
||||
"""Return API object."""
|
||||
return self._api
|
||||
|
||||
@api.setter
|
||||
def api(self, value: RestAPI):
|
||||
"""Set an API object."""
|
||||
if self._api:
|
||||
raise RuntimeError("API already set!")
|
||||
self._api = value
|
||||
|
||||
@property
|
||||
def updater(self) -> Updater:
|
||||
"""Return Updater object."""
|
||||
return self._updater
|
||||
|
||||
@updater.setter
|
||||
def updater(self, value: Updater):
|
||||
"""Set a Updater object."""
|
||||
if self._updater:
|
||||
raise RuntimeError("Updater already set!")
|
||||
self._updater = value
|
||||
|
||||
@property
|
||||
def addons(self) -> AddonManager:
|
||||
"""Return AddonManager object."""
|
||||
return self._addons
|
||||
|
||||
@addons.setter
|
||||
def addons(self, value: AddonManager):
|
||||
"""Set a AddonManager object."""
|
||||
if self._addons:
|
||||
raise RuntimeError("AddonManager already set!")
|
||||
self._addons = value
|
||||
|
||||
@property
|
||||
def snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
return self._snapshots
|
||||
|
||||
@snapshots.setter
|
||||
def snapshots(self, value: SnapshotManager):
|
||||
"""Set a SnapshotManager object."""
|
||||
if self._snapshots:
|
||||
raise RuntimeError("SnapshotsManager already set!")
|
||||
self._snapshots = value
|
||||
|
||||
@property
|
||||
def tasks(self) -> Tasks:
|
||||
"""Return Tasks object."""
|
||||
return self._tasks
|
||||
|
||||
@tasks.setter
|
||||
def tasks(self, value: Tasks):
|
||||
"""Set a Tasks object."""
|
||||
if self._tasks:
|
||||
raise RuntimeError("Tasks already set!")
|
||||
self._tasks = value
|
||||
|
||||
@property
|
||||
def services(self) -> ServiceManager:
|
||||
"""Return ServiceManager object."""
|
||||
return self._services
|
||||
|
||||
@services.setter
|
||||
def services(self, value: ServiceManager):
|
||||
"""Set a ServiceManager object."""
|
||||
if self._services:
|
||||
raise RuntimeError("Services already set!")
|
||||
self._services = value
|
||||
|
||||
@property
|
||||
def discovery(self) -> Discovery:
|
||||
"""Return ServiceManager object."""
|
||||
return self._discovery
|
||||
|
||||
@discovery.setter
|
||||
def discovery(self, value: Discovery):
|
||||
"""Set a Discovery object."""
|
||||
if self._discovery:
|
||||
raise RuntimeError("Discovery already set!")
|
||||
self._discovery = value
|
||||
|
||||
@property
|
||||
def dbus(self) -> DBusManager:
|
||||
"""Return DBusManager object."""
|
||||
return self._dbus
|
||||
|
||||
@dbus.setter
|
||||
def dbus(self, value: DBusManager):
|
||||
"""Set a DBusManager object."""
|
||||
if self._dbus:
|
||||
raise RuntimeError("DBusManager already set!")
|
||||
self._dbus = value
|
||||
|
||||
@property
|
||||
def host(self) -> HostManager:
|
||||
"""Return HostManager object."""
|
||||
return self._host
|
||||
|
||||
@host.setter
|
||||
def host(self, value: HostManager):
|
||||
"""Set a HostManager object."""
|
||||
if self._host:
|
||||
raise RuntimeError("HostManager already set!")
|
||||
self._host = value
|
||||
|
||||
@property
|
||||
def hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
return self._hassos
|
||||
|
||||
@hassos.setter
|
||||
def hassos(self, value: HassOS):
|
||||
"""Set a HassOS object."""
|
||||
if self._hassos:
|
||||
raise RuntimeError("HassOS already set!")
|
||||
self._hassos = value
|
||||
|
||||
|
||||
class CoreSysAttributes:
|
||||
"""Inheret basic CoreSysAttributes."""
|
||||
|
||||
coresys = None
|
||||
|
||||
@property
|
||||
def sys_machine(self) -> str:
|
||||
"""Return running machine type of the Hass.io system."""
|
||||
return self.coresys.machine
|
||||
|
||||
@property
|
||||
def sys_dev(self) -> str:
|
||||
"""Return True if we run dev mode."""
|
||||
return self.coresys.dev
|
||||
|
||||
@property
|
||||
def sys_timezone(self) -> str:
|
||||
"""Return timezone."""
|
||||
return self.coresys.timezone
|
||||
|
||||
@property
|
||||
def sys_machine_id(self) -> str:
|
||||
"""Return timezone."""
|
||||
return self.coresys.machine_id
|
||||
|
||||
@property
|
||||
def sys_loop(self) -> asyncio.BaseEventLoop:
|
||||
"""Return loop object."""
|
||||
return self.coresys.loop
|
||||
|
||||
@property
|
||||
def sys_websession(self) -> aiohttp.ClientSession:
|
||||
"""Return websession object."""
|
||||
return self.coresys.websession
|
||||
|
||||
@property
|
||||
def sys_websession_ssl(self) -> aiohttp.ClientSession:
|
||||
"""Return websession object with disabled SSL."""
|
||||
return self.coresys.websession_ssl
|
||||
|
||||
@property
|
||||
def sys_config(self) -> CoreConfig:
|
||||
"""Return CoreConfig object."""
|
||||
return self.coresys.config
|
||||
|
||||
@property
|
||||
def sys_hardware(self) -> Hardware:
|
||||
"""Return Hardware object."""
|
||||
return self.coresys.hardware
|
||||
|
||||
@property
|
||||
def sys_docker(self) -> DockerAPI:
|
||||
"""Return DockerAPI object."""
|
||||
return self.coresys.docker
|
||||
|
||||
@property
|
||||
def sys_scheduler(self) -> Scheduler:
|
||||
"""Return Scheduler object."""
|
||||
return self.coresys.scheduler
|
||||
|
||||
@property
|
||||
def sys_dns(self) -> DNSForward:
|
||||
"""Return DNSForward object."""
|
||||
return self.coresys.dns
|
||||
|
||||
@property
|
||||
def sys_core(self) -> HassIO:
|
||||
"""Return HassIO object."""
|
||||
return self.coresys.core
|
||||
|
||||
@property
|
||||
def sys_arch(self) -> CpuArch:
|
||||
"""Return CpuArch object."""
|
||||
return self.coresys.arch
|
||||
|
||||
@property
|
||||
def sys_auth(self) -> Auth:
|
||||
"""Return Auth object."""
|
||||
return self.coresys.auth
|
||||
|
||||
@property
|
||||
def sys_homeassistant(self) -> HomeAssistant:
|
||||
"""Return Home Assistant object."""
|
||||
return self.coresys.homeassistant
|
||||
|
||||
@property
|
||||
def sys_supervisor(self) -> Supervisor:
|
||||
"""Return Supervisor object."""
|
||||
return self.coresys.supervisor
|
||||
|
||||
@property
|
||||
def sys_api(self) -> RestAPI:
|
||||
"""Return API object."""
|
||||
return self.coresys.api
|
||||
|
||||
@property
|
||||
def sys_updater(self) -> Updater:
|
||||
"""Return Updater object."""
|
||||
return self.coresys.updater
|
||||
|
||||
@property
|
||||
def sys_addons(self) -> AddonManager:
|
||||
"""Return AddonManager object."""
|
||||
return self.coresys.addons
|
||||
|
||||
@property
|
||||
def sys_snapshots(self) -> SnapshotManager:
|
||||
"""Return SnapshotManager object."""
|
||||
return self.coresys.snapshots
|
||||
|
||||
@property
|
||||
def sys_tasks(self) -> Tasks:
|
||||
"""Return Tasks object."""
|
||||
return self.coresys.tasks
|
||||
|
||||
@property
|
||||
def sys_services(self) -> ServiceManager:
|
||||
"""Return ServiceManager object."""
|
||||
return self.coresys.services
|
||||
|
||||
@property
|
||||
def sys_discovery(self) -> Discovery:
|
||||
"""Return ServiceManager object."""
|
||||
return self.coresys.discovery
|
||||
|
||||
@property
|
||||
def sys_dbus(self) -> DBusManager:
|
||||
"""Return DBusManager object."""
|
||||
return self.coresys.dbus
|
||||
|
||||
@property
|
||||
def sys_host(self) -> HostManager:
|
||||
"""Return HostManager object."""
|
||||
return self.coresys.host
|
||||
|
||||
@property
|
||||
def sys_hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
return self.coresys.hassos
|
||||
|
||||
def sys_run_in_executor(self, funct, *args) -> asyncio.Future:
|
||||
"""Wrapper for executor pool."""
|
||||
return self.sys_loop.run_in_executor(None, funct, *args)
|
||||
|
||||
def sys_create_task(self, coroutine) -> asyncio.Task:
|
||||
"""Wrapper for async task."""
|
||||
return self.sys_loop.create_task(coroutine)
|
39
hassio/dbus/__init__.py
Normal file
39
hassio/dbus/__init__.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""D-Bus interface objects."""
|
||||
|
||||
from .systemd import Systemd
|
||||
from .hostname import Hostname
|
||||
from .rauc import Rauc
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class DBusManager(CoreSysAttributes):
|
||||
"""A DBus Interface handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize D-Bus interface."""
|
||||
self.coresys = coresys
|
||||
|
||||
self._systemd = Systemd()
|
||||
self._hostname = Hostname()
|
||||
self._rauc = Rauc()
|
||||
|
||||
@property
|
||||
def systemd(self):
|
||||
"""Return the systemd interface."""
|
||||
return self._systemd
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return the hostname interface."""
|
||||
return self._hostname
|
||||
|
||||
@property
|
||||
def rauc(self):
|
||||
"""Return the rauc interface."""
|
||||
return self._rauc
|
||||
|
||||
async def load(self):
|
||||
"""Connect interfaces to D-Bus."""
|
||||
await self.systemd.connect()
|
||||
await self.hostname.connect()
|
||||
await self.rauc.connect()
|
39
hassio/dbus/hostname.py
Normal file
39
hassio/dbus/hostname.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""D-Bus interface for hostname."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'org.freedesktop.hostname1'
|
||||
DBUS_OBJECT = '/org/freedesktop/hostname1'
|
||||
|
||||
|
||||
class Hostname(DBusInterface):
|
||||
"""Handle D-Bus interface for hostname/system."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to system's D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to hostname")
|
||||
|
||||
@dbus_connected
|
||||
def set_static_hostname(self, hostname):
|
||||
"""Change local hostname.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.SetStaticHostname(hostname, False)
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return local host informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(DBUS_NAME)
|
18
hassio/dbus/interface.py
Normal file
18
hassio/dbus/interface.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Interface class for D-Bus wrappers."""
|
||||
|
||||
|
||||
class DBusInterface:
|
||||
"""Handle D-Bus interface for hostname/system."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize systemd."""
|
||||
self.dbus = None
|
||||
|
||||
@property
|
||||
def is_connected(self):
|
||||
"""Return True, if they is connected to D-Bus."""
|
||||
return self.dbus is not None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
raise NotImplementedError()
|
55
hassio/dbus/rauc.py
Normal file
55
hassio/dbus/rauc.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""D-Bus interface for rauc."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'de.pengutronix.rauc'
|
||||
DBUS_OBJECT = '/'
|
||||
|
||||
|
||||
class Rauc(DBusInterface):
|
||||
"""Handle D-Bus interface for rauc."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to rauc")
|
||||
|
||||
@dbus_connected
|
||||
def install(self, raucb_file):
|
||||
"""Install rauc bundle file.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.Install(raucb_file)
|
||||
|
||||
@dbus_connected
|
||||
def get_slot_status(self):
|
||||
"""Get slot status.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.GetSlotStatus()
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return rauc informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
||||
|
||||
@dbus_connected
|
||||
def signal_completed(self):
|
||||
"""Return a signal wrapper for completed signal.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
79
hassio/dbus/systemd.py
Normal file
79
hassio/dbus/systemd.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""Interface to Systemd over D-Bus."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'org.freedesktop.systemd1'
|
||||
DBUS_OBJECT = '/org/freedesktop/systemd1'
|
||||
|
||||
|
||||
class Systemd(DBusInterface):
|
||||
"""Systemd function handler."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to systemd")
|
||||
|
||||
@dbus_connected
|
||||
def reboot(self):
|
||||
"""Reboot host computer.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.Reboot()
|
||||
|
||||
@dbus_connected
|
||||
def power_off(self):
|
||||
"""Power off host computer.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.PowerOff()
|
||||
|
||||
@dbus_connected
|
||||
def start_unit(self, unit, mode):
|
||||
"""Start a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.StartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def stop_unit(self, unit, mode):
|
||||
"""Stop a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.StopUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def reload_unit(self, unit, mode):
|
||||
"""Reload a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.ReloadOrRestartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def restart_unit(self, unit, mode):
|
||||
"""Restart a systemd service unit.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.RestartUnit(unit, mode)
|
||||
|
||||
@dbus_connected
|
||||
def list_units(self):
|
||||
"""Return a list of available systemd services.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Manager.ListUnits()
|
14
hassio/dbus/utils.py
Normal file
14
hassio/dbus/utils.py
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Utils for D-Bus."""
|
||||
|
||||
from ..exceptions import DBusNotConnectedError
|
||||
|
||||
|
||||
def dbus_connected(method):
|
||||
"""Wrapper for check if D-Bus is connected."""
|
||||
def wrap_dbus(api, *args, **kwargs):
|
||||
"""Check if D-Bus is connected before call a method."""
|
||||
if api.dbus is None:
|
||||
raise DBusNotConnectedError()
|
||||
return method(api, *args, **kwargs)
|
||||
|
||||
return wrap_dbus
|
132
hassio/discovery/__init__.py
Normal file
132
hassio/discovery/__init__.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""Handle discover message for Home Assistant."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional, TYPE_CHECKING
|
||||
from uuid import uuid4, UUID
|
||||
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DiscoveryError, HomeAssistantAPIError
|
||||
from ..utils.json import JsonConfig
|
||||
from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..addons.addon import Addon
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CMD_NEW = "post"
|
||||
CMD_DEL = "delete"
|
||||
|
||||
|
||||
@attr.s
|
||||
class Message:
|
||||
"""Represent a single Discovery message."""
|
||||
|
||||
addon: str = attr.ib()
|
||||
service: str = attr.ib()
|
||||
config: Dict[str, Any] = attr.ib(cmp=False)
|
||||
uuid: UUID = attr.ib(factory=lambda: uuid4().hex, cmp=False)
|
||||
|
||||
|
||||
class Discovery(CoreSysAttributes, JsonConfig):
|
||||
"""Home Assistant Discovery handler."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize discovery handler."""
|
||||
super().__init__(FILE_HASSIO_DISCOVERY, SCHEMA_DISCOVERY_CONFIG)
|
||||
self.coresys: CoreSys = coresys
|
||||
self.message_obj: Dict[str, Message] = {}
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load exists discovery message into storage."""
|
||||
messages = {}
|
||||
for message in self._data[ATTR_DISCOVERY]:
|
||||
discovery = Message(**message)
|
||||
messages[discovery.uuid] = discovery
|
||||
|
||||
_LOGGER.info("Load %d messages", len(messages))
|
||||
self.message_obj = messages
|
||||
|
||||
def save(self) -> None:
|
||||
"""Write discovery message into data file."""
|
||||
messages: List[Dict[str, Any]] = []
|
||||
for message in self.list_messages:
|
||||
messages.append(attr.asdict(message))
|
||||
|
||||
self._data[ATTR_DISCOVERY].clear()
|
||||
self._data[ATTR_DISCOVERY].extend(messages)
|
||||
self.save_data()
|
||||
|
||||
def get(self, uuid: str) -> Optional[Message]:
|
||||
"""Return discovery message."""
|
||||
return self.message_obj.get(uuid)
|
||||
|
||||
@property
|
||||
def list_messages(self) -> List[Message]:
|
||||
"""Return list of available discovery messages."""
|
||||
return list(self.message_obj.values())
|
||||
|
||||
def send(self, addon: Addon, service: str, config: Dict[str, Any]) -> Message:
|
||||
"""Send a discovery message to Home Assistant."""
|
||||
try:
|
||||
config = valid_discovery_config(service, config)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Invalid discovery %s config", humanize_error(config, err))
|
||||
raise DiscoveryError() from None
|
||||
|
||||
# Create message
|
||||
message = Message(addon.slug, service, config)
|
||||
|
||||
# Already exists?
|
||||
for old_message in self.list_messages:
|
||||
if old_message != message:
|
||||
continue
|
||||
_LOGGER.info("Duplicate discovery message from %s", addon.slug)
|
||||
return old_message
|
||||
|
||||
_LOGGER.info("Send discovery to Home Assistant %s from %s", service, addon.slug)
|
||||
self.message_obj[message.uuid] = message
|
||||
self.save()
|
||||
|
||||
self.sys_create_task(self._push_discovery(message, CMD_NEW))
|
||||
return message
|
||||
|
||||
def remove(self, message: Message) -> None:
|
||||
"""Remove a discovery message from Home Assistant."""
|
||||
self.message_obj.pop(message.uuid, None)
|
||||
self.save()
|
||||
|
||||
_LOGGER.info(
|
||||
"Delete discovery to Home Assistant %s from %s",
|
||||
message.service,
|
||||
message.addon,
|
||||
)
|
||||
self.sys_create_task(self._push_discovery(message, CMD_DEL))
|
||||
|
||||
async def _push_discovery(self, message: Message, command: str) -> None:
|
||||
"""Send a discovery request."""
|
||||
if not await self.sys_homeassistant.check_api_state():
|
||||
_LOGGER.info("Discovery %s mesage ignore", message.uuid)
|
||||
return
|
||||
|
||||
data = attr.asdict(message)
|
||||
data.pop(ATTR_CONFIG)
|
||||
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.sys_homeassistant.make_request(
|
||||
command,
|
||||
f"api/hassio_push/discovery/{message.uuid}",
|
||||
json=data,
|
||||
timeout=10,
|
||||
):
|
||||
_LOGGER.info("Discovery %s message send", message.uuid)
|
||||
return
|
||||
|
||||
_LOGGER.warning("Discovery %s message fail", message.uuid)
|
8
hassio/discovery/const.py
Normal file
8
hassio/discovery/const.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Discovery static data."""
|
||||
|
||||
ATTR_HOST = "host"
|
||||
ATTR_PASSWORD = "password"
|
||||
ATTR_PORT = "port"
|
||||
ATTR_PROTOCOL = "protocol"
|
||||
ATTR_SSL = "ssl"
|
||||
ATTR_USERNAME = "username"
|
1
hassio/discovery/services/__init__.py
Normal file
1
hassio/discovery/services/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Discovery service modules."""
|
11
hassio/discovery/services/deconz.py
Normal file
11
hassio/discovery/services/deconz.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Discovery service for MQTT."""
|
||||
import voluptuous as vol
|
||||
|
||||
from hassio.validate import NETWORK_PORT
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT
|
||||
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT}
|
||||
)
|
27
hassio/discovery/services/mqtt.py
Normal file
27
hassio/discovery/services/mqtt.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Discovery service for MQTT."""
|
||||
import voluptuous as vol
|
||||
|
||||
from hassio.validate import NETWORK_PORT
|
||||
|
||||
from ..const import (
|
||||
ATTR_HOST,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_PROTOCOL,
|
||||
ATTR_SSL,
|
||||
ATTR_USERNAME,
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_HOST): vol.Coerce(str),
|
||||
vol.Required(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_USERNAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PROTOCOL, default="3.1.1"): vol.All(
|
||||
vol.Coerce(str), vol.In(["3.1", "3.1.1"])
|
||||
),
|
||||
}
|
||||
)
|
47
hassio/discovery/validate.py
Normal file
47
hassio/discovery/validate.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Validate services schema."""
|
||||
from pathlib import Path
|
||||
from importlib import import_module
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID
|
||||
from ..utils.validate import schema_or
|
||||
from ..validate import UUID_MATCH
|
||||
|
||||
|
||||
def valid_discovery_service(service):
|
||||
"""Validate service name."""
|
||||
service_file = Path(__file__).parent.joinpath(f"services/{service}.py")
|
||||
if not service_file.exists():
|
||||
raise vol.Invalid(f"Service {service} not found")
|
||||
return service
|
||||
|
||||
|
||||
def valid_discovery_config(service, config):
|
||||
"""Validate service name."""
|
||||
try:
|
||||
service_mod = import_module(f".services.{service}", "hassio.discovery")
|
||||
except ImportError:
|
||||
raise vol.Invalid(f"Service {service} not found")
|
||||
|
||||
return service_mod.SCHEMA(config)
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
[
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_UUID): UUID_MATCH,
|
||||
vol.Required(ATTR_ADDON): vol.Coerce(str),
|
||||
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||
vol.Required(ATTR_CONFIG): vol.Maybe(dict),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
SCHEMA_DISCOVERY_CONFIG = vol.Schema(
|
||||
{vol.Optional(ATTR_DISCOVERY, default=list): schema_or(SCHEMA_DISCOVERY)},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
@@ -1,324 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .util import docker_process
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerBase(object):
|
||||
"""Docker hassio wrapper."""
|
||||
|
||||
def __init__(self, config, loop, dock, image=None, timeout=30):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.dock = dock
|
||||
self.image = image
|
||||
self.timeout = timeout
|
||||
self.version = None
|
||||
self.arch = None
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self._lock.locked()
|
||||
|
||||
def process_metadata(self, metadata, force=False):
|
||||
"""Read metadata and set it to object."""
|
||||
# read image
|
||||
if not self.image:
|
||||
self.image = metadata['Config']['Image']
|
||||
|
||||
# read version
|
||||
need_version = force or not self.version
|
||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
||||
elif need_version:
|
||||
_LOGGER.warning("Can't read version from %s", self.name)
|
||||
|
||||
# read arch
|
||||
need_arch = force or not self.arch
|
||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||
|
||||
@docker_process
|
||||
def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
return self.loop.run_in_executor(None, self._install, tag)
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||
image = self.dock.images.pull("{}:{}".format(self.image, tag))
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
"""Return True if docker image exists in local repo."""
|
||||
return self.loop.run_in_executor(None, self._exists)
|
||||
|
||||
def _exists(self):
|
||||
"""Return True if docker image exists in local repo.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if docker is Running.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.loop.run_in_executor(None, self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
"""Return True if docker is Running.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
image = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if container.status != 'running':
|
||||
return False
|
||||
|
||||
# we run on a old image, stop and start it
|
||||
if container.image.id != image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
return self.loop.run_in_executor(None, self._attach)
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
if self.image:
|
||||
obj_data = self.dock.images.get(self.image).attrs
|
||||
else:
|
||||
obj_data = self.dock.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self.process_metadata(obj_data)
|
||||
_LOGGER.info(
|
||||
"Attach to image %s with version %s", self.image, self.version)
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def run(self):
|
||||
"""Run docker image."""
|
||||
return self.loop.run_in_executor(None, self._run)
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@docker_process
|
||||
def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
return self.loop.run_in_executor(None, self._stop)
|
||||
|
||||
def _stop(self):
|
||||
"""Stop/remove and remove docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
if container.status == 'running':
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.stop(timeout=self.timeout)
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Clean %s docker application", self.image)
|
||||
container.remove(force=True)
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def remove(self):
|
||||
"""Remove docker images."""
|
||||
return self.loop.run_in_executor(None, self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
# cleanup container
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info(
|
||||
"Remove docker %s with latest and %s", self.image, self.version)
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
# clean metadata
|
||||
self.version = None
|
||||
self.arch = None
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def update(self, tag):
|
||||
"""Update a docker image."""
|
||||
return self.loop.run_in_executor(None, self._update, tag)
|
||||
|
||||
def _update(self, tag):
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
_LOGGER.info(
|
||||
"Update docker %s with %s:%s", self.version, self.image, tag)
|
||||
|
||||
# update docker image
|
||||
if not self._install(tag):
|
||||
return False
|
||||
|
||||
# stop container & cleanup
|
||||
self._stop()
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def logs(self):
|
||||
"""Return docker logs of container."""
|
||||
return self.loop.run_in_executor(None, self._logs)
|
||||
|
||||
def _logs(self):
|
||||
"""Return docker logs of container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||
|
||||
@docker_process
|
||||
def restart(self):
|
||||
"""Restart docker container."""
|
||||
return self.loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self.loop.run_in_executor(None, self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return False
|
||||
|
||||
for image in self.dock.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||
self.dock.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def execute_command(self, command):
|
||||
"""Create a temporary container and run command."""
|
||||
return self.loop.run_in_executor(None, self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
@@ -1,280 +0,0 @@
|
||||
"""Init file for HassIO addon docker object."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from . import DockerBase
|
||||
from .util import dockerfile_template, docker_process
|
||||
from ..const import (
|
||||
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
|
||||
|
||||
class DockerAddon(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, addon):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(
|
||||
config, loop, dock, image=addon.image, timeout=addon.timeout)
|
||||
self.addon = addon
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of addon."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
if self.addon.with_audio:
|
||||
addon_env.update({
|
||||
'ALSA_OUTPUT': self.addon.audio_output,
|
||||
'ALSA_INPUT': self.addon.audio_input,
|
||||
})
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
'TZ': self.config.timezone,
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# use audio devices
|
||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Return None if no devices is present
|
||||
if devices:
|
||||
return devices
|
||||
return None
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": "{}".format(options)}
|
||||
return None
|
||||
|
||||
@property
|
||||
def mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
if not self.addon.use_hassio_api:
|
||||
return None
|
||||
|
||||
return {
|
||||
'hassio': self.config.api_endpoint,
|
||||
}
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
}}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_config): {
|
||||
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_ssl): {
|
||||
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_addons_local): {
|
||||
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_backup): {
|
||||
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_share): {
|
||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return True
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
# write config
|
||||
if not self.addon.write_options():
|
||||
return False
|
||||
|
||||
try:
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
network_mode=self.addon.network_mode,
|
||||
ports=self.addon.ports,
|
||||
extra_hosts=self.mapping,
|
||||
devices=self.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs
|
||||
)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start docker addon %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_dir = Path(self.config.path_tmp, self.addon.slug)
|
||||
try:
|
||||
# prepare temporary addon build folder
|
||||
try:
|
||||
source = self.addon.path_location
|
||||
shutil.copytree(str(source), str(build_dir))
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't copy %s to temporary build folder -> %s",
|
||||
source, err)
|
||||
return False
|
||||
|
||||
# prepare Dockerfile
|
||||
try:
|
||||
dockerfile_template(
|
||||
Path(build_dir, 'Dockerfile'), self.config.arch,
|
||||
tag, META_ADDON)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't prepare dockerfile -> %s", err)
|
||||
|
||||
# run docker build
|
||||
try:
|
||||
build_tag = "{}:{}".format(self.image, tag)
|
||||
|
||||
_LOGGER.info("Start build %s on %s", build_tag, build_dir)
|
||||
image = self.dock.images.build(
|
||||
path=str(build_dir), tag=build_tag, pull=True)
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
|
||||
except (docker.errors.DockerException, TypeError) as err:
|
||||
_LOGGER.error("Can't build %s -> %s", build_tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Build %s done", build_tag)
|
||||
return True
|
||||
|
||||
finally:
|
||||
shutil.rmtree(str(build_dir), ignore_errors=True)
|
||||
|
||||
@docker_process
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
return self.loop.run_in_executor(None, self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.dock.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image.stream():
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
return self.loop.run_in_executor(None, self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.dock.api.load_image(read_tar)
|
||||
|
||||
image = self.dock.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Addons prepare some thing on start and that is normaly not repeatable.
|
||||
Need run inside executor.
|
||||
"""
|
||||
self._stop()
|
||||
return self._run()
|
@@ -1,117 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
|
||||
|
||||
class DockerHomeAssistant(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, data):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(config, loop, dock, image=data.image)
|
||||
self.data = data
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into docker."""
|
||||
if not self.data.devices:
|
||||
return
|
||||
|
||||
devices = []
|
||||
for device in self.data.devices:
|
||||
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
|
||||
|
||||
return devices
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
try:
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self.config.api_endpoint,
|
||||
'TZ': self.config.timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
}
|
||||
)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start homeassistant %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
_LOGGER.info("Run command '%s' on %s", command, self.image)
|
||||
try:
|
||||
container = self.dock.containers.run(
|
||||
self.image,
|
||||
command=command,
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={
|
||||
'TZ': self.config.timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'ro'},
|
||||
str(self.config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
}
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
exit_code = container.wait()
|
||||
output = container.logs()
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command -> %s", err)
|
||||
return (None, b"")
|
||||
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return (exit_code, output)
|
@@ -1,54 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
from . import DockerBase
|
||||
from .util import docker_process
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, stop_callback, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config, loop, dock, image=image)
|
||||
self.stop_callback = stop_callback
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
@docker_process
|
||||
async def update(self, tag):
|
||||
"""Update a supervisor docker image."""
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def remove(self):
|
||||
"""Remove docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def restart(self):
|
||||
"""Restart docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
@@ -1,60 +0,0 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import logging
|
||||
import re
|
||||
|
||||
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASSIO_BASE_IMAGE = {
|
||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
||||
ARCH_I386: "homeassistant/i386-base:latest",
|
||||
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
||||
}
|
||||
|
||||
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
|
||||
|
||||
|
||||
def dockerfile_template(dockerfile, arch, version, meta_type):
|
||||
"""Prepare a Hass.IO dockerfile."""
|
||||
buff = []
|
||||
hassio_image = HASSIO_BASE_IMAGE[arch]
|
||||
custom_image = re.compile(r"^#{}:FROM".format(arch))
|
||||
|
||||
# read docker
|
||||
with dockerfile.open('r') as dock_input:
|
||||
for line in dock_input:
|
||||
line = TMPL_IMAGE.sub(hassio_image, line)
|
||||
line = custom_image.sub("FROM", line)
|
||||
buff.append(line)
|
||||
|
||||
# add metadata
|
||||
buff.append(create_metadata(version, arch, meta_type))
|
||||
|
||||
# write docker
|
||||
with dockerfile.open('w') as dock_output:
|
||||
dock_output.writelines(buff)
|
||||
|
||||
|
||||
def create_metadata(version, arch, meta_type):
|
||||
"""Generate docker label layer for hassio."""
|
||||
return ('LABEL io.hass.version="{}" '
|
||||
'io.hass.arch="{}" '
|
||||
'io.hass.type="{}"').format(version, arch, meta_type)
|
||||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def docker_process(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api._lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api._lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
122
hassio/docker/__init__.py
Normal file
122
hassio/docker/__init__.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import attr
|
||||
import docker
|
||||
|
||||
from .network import DockerNetwork
|
||||
from ..const import SOCKET_DOCKER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class CommandReturn:
|
||||
"""Return object from command run."""
|
||||
exit_code = attr.ib()
|
||||
output = attr.ib()
|
||||
|
||||
|
||||
class DockerAPI:
|
||||
"""Docker Hass.io wrapper.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.docker = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
|
||||
version='auto', timeout=900)
|
||||
self.network = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
def images(self):
|
||||
"""Return API images."""
|
||||
return self.docker.images
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
"""Return API containers."""
|
||||
return self.docker.containers
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
"""Return API containers."""
|
||||
return self.docker.api
|
||||
|
||||
def run(self, image, **kwargs):
|
||||
""""Create a Docker container and run it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
name = kwargs.get('name', image)
|
||||
network_mode = kwargs.get('network_mode')
|
||||
hostname = kwargs.get('hostname')
|
||||
|
||||
# Setup network
|
||||
kwargs['dns_search'] = ["."]
|
||||
if network_mode:
|
||||
kwargs['dns'] = [str(self.network.supervisor)]
|
||||
kwargs['dns_opt'] = ["ndots:0"]
|
||||
else:
|
||||
kwargs['network'] = None
|
||||
|
||||
# Create container
|
||||
try:
|
||||
container = self.docker.containers.create(
|
||||
image, use_config_proxy=False, **kwargs)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
return False
|
||||
|
||||
# attach network
|
||||
if not network_mode:
|
||||
alias = [hostname] if hostname else None
|
||||
if self.network.attach_container(container, alias=alias):
|
||||
self.network.detach_default_bridge(container)
|
||||
else:
|
||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||
|
||||
# run container
|
||||
try:
|
||||
container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", name, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def run_command(self, image, command=None, **kwargs):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
stdout = kwargs.get('stdout', True)
|
||||
stderr = kwargs.get('stderr', True)
|
||||
|
||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||
try:
|
||||
container = self.docker.containers.run(
|
||||
image,
|
||||
command=command,
|
||||
network=self.network.name,
|
||||
use_config_proxy=False,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
result = container.wait()
|
||||
output = container.logs(stdout=stdout, stderr=stderr)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command: %s", err)
|
||||
return CommandReturn(None, b"")
|
||||
|
||||
finally:
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return CommandReturn(result.get('StatusCode'), output)
|
447
hassio/docker/addon.py
Normal file
447
hassio/docker/addon.py
Normal file
@@ -0,0 +1,447 @@
|
||||
"""Init file for Hass.io add-on Docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE,
|
||||
ENV_TOKEN, ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
|
||||
|
||||
class DockerAddon(DockerInterface):
|
||||
"""Docker Hass.io wrapper for Home Assistant."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize Docker Home Assistant wrapper."""
|
||||
super().__init__(coresys)
|
||||
self._id = slug
|
||||
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return add-on of Docker image."""
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of Docker image."""
|
||||
return self.addon.image
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout for Docker actions."""
|
||||
return self.addon.timeout
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of Docker image."""
|
||||
if self.addon.legacy:
|
||||
return self.addon.version_installed
|
||||
return super().version
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of Docker image."""
|
||||
if self.addon.legacy:
|
||||
return self.sys_arch.default
|
||||
return super().arch
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of Docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def ipc(self):
|
||||
"""Return the IPC namespace."""
|
||||
if self.addon.host_ipc:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def full_access(self):
|
||||
"""Return True if full access is enabled."""
|
||||
return not self.addon.protected and self.addon.with_full_access
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of add-on."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for Docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
# Provide options for legacy add-ons
|
||||
if self.addon.legacy:
|
||||
for key, value in self.addon.options.items():
|
||||
if isinstance(value, (int, str)):
|
||||
addon_env[key] = value
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Can not set nested option %s as Docker env", key)
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
ENV_TIME: self.sys_timezone,
|
||||
ENV_TOKEN: self.addon.hassio_token,
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# Use audio devices
|
||||
if self.addon.with_audio and self.sys_hardware.support_audio:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Auto mapping UART devices
|
||||
if self.addon.auto_uart:
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
|
||||
# Return None if no devices is present
|
||||
return devices or None
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Filter None from add-on ports."""
|
||||
if not self.addon.ports:
|
||||
return None
|
||||
|
||||
return {
|
||||
container_port: host_port
|
||||
for container_port, host_port in self.addon.ports.items()
|
||||
if host_port
|
||||
}
|
||||
|
||||
@property
|
||||
def security_opt(self):
|
||||
"""Controlling security options."""
|
||||
security = []
|
||||
|
||||
# AppArmor
|
||||
apparmor = self.sys_host.apparmor.available
|
||||
if not apparmor or self.addon.apparmor == SECURITY_DISABLE:
|
||||
security.append("apparmor:unconfined")
|
||||
elif self.addon.apparmor == SECURITY_PROFILE:
|
||||
security.append(f"apparmor={self.addon.slug}")
|
||||
|
||||
# Disable Seccomp / We don't support it official and it
|
||||
# make troubles on some kind of host systems.
|
||||
security.append("seccomp=unconfined")
|
||||
|
||||
return security
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for Docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": f"{options}"}
|
||||
return None
|
||||
|
||||
@property
|
||||
def network_mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
return {
|
||||
'homeassistant': self.sys_docker.network.gateway,
|
||||
'hassio': self.sys_docker.network.supervisor,
|
||||
}
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
"""Return network mode for add-on."""
|
||||
if self.addon.host_network:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def pid_mode(self):
|
||||
"""Return PID mode for add-on."""
|
||||
if not self.addon.protected and self.addon.host_pid:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': "/data",
|
||||
'mode': 'rw'
|
||||
}
|
||||
}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
# setup config mappings
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
'bind': "/config",
|
||||
'mode': addon_mapping[MAP_CONFIG]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
'bind': "/ssl",
|
||||
'mode': addon_mapping[MAP_SSL]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
'bind': "/addons",
|
||||
'mode': addon_mapping[MAP_ADDONS]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
'bind': "/backup",
|
||||
'mode': addon_mapping[MAP_BACKUP]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_share): {
|
||||
'bind': "/share",
|
||||
'mode': addon_mapping[MAP_SHARE]
|
||||
}
|
||||
})
|
||||
|
||||
# Init other hardware mappings
|
||||
|
||||
# GPIO support
|
||||
if self.addon.with_gpio and self.sys_hardware.support_gpio:
|
||||
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
|
||||
volumes.update({
|
||||
gpio_path: {
|
||||
'bind': gpio_path,
|
||||
'mode': 'rw'
|
||||
},
|
||||
})
|
||||
|
||||
# DeviceTree support
|
||||
if self.addon.with_devicetree:
|
||||
volumes.update({
|
||||
"/sys/firmware/devicetree/base": {
|
||||
'bind': "/device-tree",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Kernel Modules support
|
||||
if self.addon.with_kernel_modules:
|
||||
volumes.update({
|
||||
"/lib/modules": {
|
||||
'bind': "/lib/modules",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Docker API support
|
||||
if not self.addon.protected and self.addon.access_docker_api:
|
||||
volumes.update({
|
||||
"/var/run/docker.sock": {
|
||||
'bind': "/var/run/docker.sock",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Host D-Bus system
|
||||
if self.addon.host_dbus:
|
||||
volumes.update({
|
||||
"/var/run/dbus": {
|
||||
'bind': "/var/run/dbus",
|
||||
'mode': 'rw'
|
||||
}
|
||||
})
|
||||
|
||||
# ALSA configuration
|
||||
if self.addon.with_audio:
|
||||
volumes.update({
|
||||
str(self.addon.path_extern_asound): {
|
||||
'bind': "/etc/asound.conf",
|
||||
'mode': 'ro'
|
||||
}
|
||||
})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return True
|
||||
|
||||
# Security check
|
||||
if not self.addon.protected:
|
||||
_LOGGER.warning("%s run with disabled protected mode!",
|
||||
self.addon.name)
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
init=True,
|
||||
privileged=self.full_access,
|
||||
ipc_mode=self.ipc,
|
||||
stdin_open=self.addon.with_stdin,
|
||||
network_mode=self.network_mode,
|
||||
pid_mode=self.pid_mode,
|
||||
ports=self.ports,
|
||||
extra_hosts=self.network_mapping,
|
||||
devices=self.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
security_opt=self.security_opt,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image,
|
||||
self.version)
|
||||
|
||||
return ret
|
||||
|
||||
def _install(self, tag, image=None):
|
||||
"""Pull Docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag, image)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a Docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_env = AddonBuild(self.coresys, self._id)
|
||||
|
||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||
try:
|
||||
image, log = self.sys_docker.images.build(
|
||||
use_config_proxy=False, **build_env.get_docker_args(tag))
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||
image.tag(self.image, tag='latest')
|
||||
|
||||
# Update meta data
|
||||
self._meta = image.attrs
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
return self.sys_run_in_executor(self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image:
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s done", self.image)
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
return self.sys_run_in_executor(self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.sys_docker.api.load_image(read_tar, quiet=True)
|
||||
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self._meta = image.attrs
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def write_stdin(self, data):
|
||||
"""Write to add-on stdin."""
|
||||
return self.sys_run_in_executor(self._write_stdin, data)
|
||||
|
||||
def _write_stdin(self, data):
|
||||
"""Write to add-on stdin.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self._is_running():
|
||||
return False
|
||||
|
||||
try:
|
||||
# Load needed docker objects
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
# Write to stdin
|
||||
data += b"\n"
|
||||
os.write(socket.fileno(), data)
|
||||
socket.close()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
return True
|
38
hassio/docker/hassos_cli.py
Normal file
38
hassio/docker/hassos_cli.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""HassOS Cli docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for HassOS Cli."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS CLI image."""
|
||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
||||
|
||||
def _stop(self, remove_container=True):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running Docker container.
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info(
|
||||
"Found HassOS CLI %s with version %s", self.image, self.version
|
||||
)
|
134
hassio/docker/homeassistant.py
Normal file
134
hassio/docker/homeassistant.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..const import ENV_TOKEN, ENV_TIME, LABEL_MACHINE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = "homeassistant"
|
||||
|
||||
|
||||
class DockerHomeAssistant(DockerInterface):
|
||||
"""Docker Hass.io wrapper for Home Assistant."""
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return machine of Home Assistant Docker image."""
|
||||
if self._meta and LABEL_MACHINE in self._meta["Config"]["Labels"]:
|
||||
return self._meta["Config"]["Labels"][LABEL_MACHINE]
|
||||
return None
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of Docker image."""
|
||||
return self.sys_homeassistant.image
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of Docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into Docker."""
|
||||
devices = []
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
return devices or None
|
||||
|
||||
def _run(self):
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return False
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
network_mode="host",
|
||||
environment={
|
||||
"HASSIO": self.sys_docker.network.supervisor,
|
||||
ENV_TIME: self.sys_timezone,
|
||||
ENV_TOKEN: self.sys_homeassistant.hassio_token,
|
||||
},
|
||||
volumes={
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
"bind": "/config",
|
||||
"mode": "rw",
|
||||
},
|
||||
str(self.sys_config.path_extern_ssl): {"bind": "/ssl", "mode": "ro"},
|
||||
str(self.sys_config.path_extern_share): {
|
||||
"bind": "/share",
|
||||
"mode": "rw",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info(
|
||||
"Start homeassistant %s with version %s", self.image, self.version
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
return self.sys_docker.run_command(
|
||||
self.image,
|
||||
command,
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={ENV_TIME: self.sys_timezone},
|
||||
volumes={
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
"bind": "/config",
|
||||
"mode": "rw",
|
||||
},
|
||||
str(self.sys_config.path_extern_ssl): {"bind": "/ssl", "mode": "ro"},
|
||||
str(self.sys_config.path_extern_share): {
|
||||
"bind": "/share",
|
||||
"mode": "ro",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def is_initialize(self):
|
||||
"""Return True if Docker container exists."""
|
||||
return self.sys_run_in_executor(self._is_initialize)
|
||||
|
||||
def _is_initialize(self):
|
||||
"""Return True if docker container exists.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# we run on an old image, stop and start it
|
||||
if docker_container.image.id != docker_image.id:
|
||||
return False
|
||||
|
||||
return True
|
408
hassio/docker/interface.py
Normal file
408
hassio/docker/interface.py
Normal file
@@ -0,0 +1,408 @@
|
||||
"""Interface class for Hass.io Docker object."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..const import LABEL_ARCH, LABEL_VERSION
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils import process_lock
|
||||
from .stats import DockerStats
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerInterface(CoreSysAttributes):
|
||||
"""Docker Hass.io interface."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self._meta = None
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout for Docker actions."""
|
||||
return 30
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of Docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def meta_config(self):
|
||||
"""Return meta data of configuration for container/image."""
|
||||
if not self._meta:
|
||||
return {}
|
||||
return self._meta.get("Config", {})
|
||||
|
||||
@property
|
||||
def meta_labels(self):
|
||||
"""Return meta data of labels for container/image."""
|
||||
return self.meta_config.get("Labels") or {}
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of Docker image."""
|
||||
return self.meta_config.get("Image")
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of Docker image."""
|
||||
return self.meta_labels.get(LABEL_VERSION)
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of Docker image."""
|
||||
return self.meta_labels.get(LABEL_ARCH)
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.lock.locked()
|
||||
|
||||
@process_lock
|
||||
def install(self, tag, image=None):
|
||||
"""Pull docker image."""
|
||||
return self.sys_run_in_executor(self._install, tag, image)
|
||||
|
||||
def _install(self, tag, image=None):
|
||||
"""Pull Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
image = image or self.image
|
||||
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", image, tag)
|
||||
docker_image = self.sys_docker.images.pull(f"{image}:{tag}")
|
||||
|
||||
docker_image.tag(image, tag="latest")
|
||||
self._meta = docker_image.attrs
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
"""Return True if Docker image exists in local repository."""
|
||||
return self.sys_run_in_executor(self._exists)
|
||||
|
||||
def _exists(self):
|
||||
"""Return True if Docker image exists in local repository.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
assert f"{self.image}:{self.version}" in docker_image.tags
|
||||
except (docker.errors.DockerException, AssertionError):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != "running":
|
||||
return False
|
||||
|
||||
# we run on an old image, stop and start it
|
||||
if docker_container.image.id != docker_image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def attach(self):
|
||||
"""Attach to running Docker container."""
|
||||
return self.sys_run_in_executor(self._attach)
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
if self.image:
|
||||
self._meta = self.sys_docker.images.get(self.image).attrs
|
||||
else:
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Attach to image %s with version %s", self.image, self.version)
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def run(self):
|
||||
"""Run Docker image."""
|
||||
return self.sys_run_in_executor(self._run)
|
||||
|
||||
def _run(self):
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@process_lock
|
||||
def stop(self, remove_container=True):
|
||||
"""Stop/remove Docker container."""
|
||||
return self.sys_run_in_executor(self._stop, remove_container)
|
||||
|
||||
def _stop(self, remove_container=True):
|
||||
"""Stop/remove Docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
if docker_container.status == "running":
|
||||
_LOGGER.info("Stop %s Docker application", self.image)
|
||||
with suppress(docker.errors.DockerException):
|
||||
docker_container.stop(timeout=self.timeout)
|
||||
|
||||
if remove_container:
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Clean %s Docker application", self.image)
|
||||
docker_container.remove(force=True)
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def start(self):
|
||||
"""Start Docker container."""
|
||||
return self.sys_run_in_executor(self._start)
|
||||
|
||||
def _start(self):
|
||||
"""Start docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Start %s", self.image)
|
||||
try:
|
||||
docker_container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def remove(self):
|
||||
"""Remove Docker images."""
|
||||
return self.sys_run_in_executor(self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
# Cleanup container
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove Docker %s with latest and %s", self.image, self.version)
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.sys_docker.images.remove(image=f"{self.image}:latest", force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:{self.version}", force=True
|
||||
)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
self._meta = None
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def update(self, tag, image=None):
|
||||
"""Update a Docker image."""
|
||||
return self.sys_run_in_executor(self._update, tag, image)
|
||||
|
||||
def _update(self, tag, image=None):
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
image = image or self.image
|
||||
|
||||
_LOGGER.info(
|
||||
"Update Docker %s:%s to %s:%s", self.image, self.version, image, tag
|
||||
)
|
||||
|
||||
# Update docker image
|
||||
if not self._install(tag, image):
|
||||
return False
|
||||
|
||||
# Stop container & cleanup
|
||||
self._stop()
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
def logs(self):
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._logs)
|
||||
|
||||
def _logs(self):
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return docker_container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grep logs from %s: %s", self.image, err)
|
||||
|
||||
@process_lock
|
||||
def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self.sys_run_in_executor(self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return False
|
||||
|
||||
for image in self.sys_docker.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup Docker images: %s", image.tags)
|
||||
self.sys_docker.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def restart(self):
|
||||
"""Restart docker container."""
|
||||
return self.sys_loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s: %s", self.image, err)
|
||||
return False
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def execute_command(self, command):
|
||||
"""Create a temporary container and run command."""
|
||||
return self.sys_run_in_executor(self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def stats(self):
|
||||
"""Read and return stats from container."""
|
||||
return self.sys_run_in_executor(self._stats)
|
||||
|
||||
def _stats(self):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return None
|
||||
|
||||
try:
|
||||
stats = docker_container.stats(stream=False)
|
||||
return DockerStats(stats)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||
return None
|
||||
|
||||
def is_fails(self):
|
||||
"""Return True if Docker is failing state.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_fails)
|
||||
|
||||
def _is_fails(self):
|
||||
"""Return True if Docker is failing state.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != "exited":
|
||||
return False
|
||||
|
||||
# Check return value
|
||||
if int(docker_container.attrs["State"]["ExitCode"]) != 0:
|
||||
return True
|
||||
|
||||
return False
|
93
hassio/docker/network.py
Normal file
93
hassio/docker/network.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Internal network manager for Hass.io."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerNetwork:
|
||||
"""Internal Hass.io Network.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self, dock):
|
||||
"""Initialize internal Hass.io network."""
|
||||
self.docker = dock
|
||||
self.network = self._get_network()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of network."""
|
||||
return DOCKER_NETWORK
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
"""Return of connected containers from network."""
|
||||
return self.network.containers
|
||||
|
||||
@property
|
||||
def gateway(self):
|
||||
"""Return gateway of the network."""
|
||||
return DOCKER_NETWORK_MASK[1]
|
||||
|
||||
@property
|
||||
def supervisor(self):
|
||||
"""Return supervisor of the network."""
|
||||
return DOCKER_NETWORK_MASK[2]
|
||||
|
||||
def _get_network(self):
|
||||
"""Get HassIO network."""
|
||||
try:
|
||||
return self.docker.networks.get(DOCKER_NETWORK)
|
||||
except docker.errors.NotFound:
|
||||
_LOGGER.info("Can't find Hass.io network, create new network")
|
||||
|
||||
ipam_pool = docker.types.IPAMPool(
|
||||
subnet=str(DOCKER_NETWORK_MASK),
|
||||
gateway=str(self.gateway),
|
||||
iprange=str(DOCKER_NETWORK_RANGE)
|
||||
)
|
||||
|
||||
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||
|
||||
return self.docker.networks.create(
|
||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
|
||||
enable_ipv6=False, options={
|
||||
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||
})
|
||||
|
||||
def attach_container(self, container, alias=None, ipv4=None):
|
||||
"""Attach container to Hass.io network.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
ipv4 = str(ipv4) if ipv4 else None
|
||||
|
||||
try:
|
||||
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't link container to hassio-net: %s", err)
|
||||
return False
|
||||
|
||||
self.network.reload()
|
||||
return True
|
||||
|
||||
def detach_default_bridge(self, container):
|
||||
"""Detach default Docker bridge.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
default_network = self.docker.networks.get('bridge')
|
||||
default_network.disconnect(container)
|
||||
|
||||
except docker.errors.NotFound:
|
||||
return
|
||||
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning(
|
||||
"Can't disconnect container from default: %s", err)
|
90
hassio/docker/stats.py
Normal file
90
hassio/docker/stats.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Calc and represent docker stats data."""
|
||||
from contextlib import suppress
|
||||
|
||||
|
||||
class DockerStats:
|
||||
"""Hold stats data from container inside."""
|
||||
|
||||
def __init__(self, stats):
|
||||
"""Initialize Docker stats."""
|
||||
self._cpu = 0.0
|
||||
self._network_rx = 0
|
||||
self._network_tx = 0
|
||||
self._blk_read = 0
|
||||
self._blk_write = 0
|
||||
|
||||
try:
|
||||
self._memory_usage = stats['memory_stats']['usage']
|
||||
self._memory_limit = stats['memory_stats']['limit']
|
||||
except KeyError:
|
||||
self._memory_usage = 0
|
||||
self._memory_limit = 0
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_cpu_percent(stats)
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_network(stats['networks'])
|
||||
|
||||
with suppress(KeyError):
|
||||
self._calc_block_io(stats['blkio_stats'])
|
||||
|
||||
def _calc_cpu_percent(self, stats):
|
||||
"""Calculate CPU percent."""
|
||||
cpu_delta = stats['cpu_stats']['cpu_usage']['total_usage'] - \
|
||||
stats['precpu_stats']['cpu_usage']['total_usage']
|
||||
system_delta = stats['cpu_stats']['system_cpu_usage'] - \
|
||||
stats['precpu_stats']['system_cpu_usage']
|
||||
|
||||
if system_delta > 0.0 and cpu_delta > 0.0:
|
||||
self._cpu = (cpu_delta / system_delta) * \
|
||||
len(stats['cpu_stats']['cpu_usage']['percpu_usage']) * 100.0
|
||||
|
||||
def _calc_network(self, networks):
|
||||
"""Calculate Network IO stats."""
|
||||
for _, stats in networks.items():
|
||||
self._network_rx += stats['rx_bytes']
|
||||
self._network_tx += stats['tx_bytes']
|
||||
|
||||
def _calc_block_io(self, blkio):
|
||||
"""Calculate block IO stats."""
|
||||
for stats in blkio['io_service_bytes_recursive']:
|
||||
if stats['op'] == 'Read':
|
||||
self._blk_read += stats['value']
|
||||
elif stats['op'] == 'Write':
|
||||
self._blk_write += stats['value']
|
||||
|
||||
@property
|
||||
def cpu_percent(self):
|
||||
"""Return CPU percent."""
|
||||
return self._cpu
|
||||
|
||||
@property
|
||||
def memory_usage(self):
|
||||
"""Return memory usage."""
|
||||
return self._memory_usage
|
||||
|
||||
@property
|
||||
def memory_limit(self):
|
||||
"""Return memory limit."""
|
||||
return self._memory_limit
|
||||
|
||||
@property
|
||||
def network_rx(self):
|
||||
"""Return network rx stats."""
|
||||
return self._network_rx
|
||||
|
||||
@property
|
||||
def network_tx(self):
|
||||
"""Return network rx stats."""
|
||||
return self._network_tx
|
||||
|
||||
@property
|
||||
def blk_read(self):
|
||||
"""Return block IO read stats."""
|
||||
return self._blk_read
|
||||
|
||||
@property
|
||||
def blk_write(self):
|
||||
"""Return block IO write stats."""
|
||||
return self._blk_write
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user