mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-17 13:09:22 +00:00
Compare commits
1277 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c51496ad2f | ||
![]() |
fbe409337b | ||
![]() |
443a43cc5b | ||
![]() |
0e25fad1c0 | ||
![]() |
1ebbf2b693 | ||
![]() |
62d198111c | ||
![]() |
1fc0ab71aa | ||
![]() |
f4402a1633 | ||
![]() |
13a17bcb34 | ||
![]() |
e1b49d90c2 | ||
![]() |
85ab25ea16 | ||
![]() |
80131ddfa8 | ||
![]() |
e9c123459f | ||
![]() |
d3e4bb7219 | ||
![]() |
fd98d38125 | ||
![]() |
3237611034 | ||
![]() |
ce2bffda15 | ||
![]() |
977e7b7adc | ||
![]() |
5082078527 | ||
![]() |
3615091c93 | ||
![]() |
fb1eb44d82 | ||
![]() |
13910d44bf | ||
![]() |
cda1d15070 | ||
![]() |
d0a1de23a6 | ||
![]() |
44fd75220f | ||
![]() |
ed594d653f | ||
![]() |
40bb3a7581 | ||
![]() |
df7f0345e8 | ||
![]() |
f7ab76bb9a | ||
![]() |
45e24bfa65 | ||
![]() |
8cd149783c | ||
![]() |
8e8e6e48a9 | ||
![]() |
816e0d503a | ||
![]() |
c43acd50f4 | ||
![]() |
16ce4296a2 | ||
![]() |
65386b753f | ||
![]() |
2be1529cb8 | ||
![]() |
98f8e032e3 | ||
![]() |
900b785789 | ||
![]() |
9194088947 | ||
![]() |
58c40cbef6 | ||
![]() |
e6c57dfc80 | ||
![]() |
82f76f60bd | ||
![]() |
b9af4aec6b | ||
![]() |
f71ce27248 | ||
![]() |
5b2b1765bc | ||
![]() |
2a892544c2 | ||
![]() |
bedb37ca6b | ||
![]() |
a456cd645f | ||
![]() |
9c68094cf6 | ||
![]() |
379cef9e35 | ||
![]() |
cb3e2dab71 | ||
![]() |
3e89f83e0b | ||
![]() |
af0bdd890a | ||
![]() |
f93f5d0e71 | ||
![]() |
667672a20b | ||
![]() |
9e1f899274 | ||
![]() |
75e0741665 | ||
![]() |
392d0e929b | ||
![]() |
b342073ba9 | ||
![]() |
ff4e550ba3 | ||
![]() |
17aa544be5 | ||
![]() |
390676dbc4 | ||
![]() |
d423252bc7 | ||
![]() |
790e887b70 | ||
![]() |
47e377683e | ||
![]() |
b1232c0d8d | ||
![]() |
059233c111 | ||
![]() |
55382d000b | ||
![]() |
75ab6eec43 | ||
![]() |
e30171746b | ||
![]() |
73849b7468 | ||
![]() |
a52713611c | ||
![]() |
85a66c663c | ||
![]() |
e478e68b70 | ||
![]() |
16095c319a | ||
![]() |
f4a6100fba | ||
![]() |
82060dd242 | ||
![]() |
a58cfb797c | ||
![]() |
c8256a50f4 | ||
![]() |
3ae974e9e2 | ||
![]() |
ac5e74a375 | ||
![]() |
05e3d3b779 | ||
![]() |
681a1ecff5 | ||
![]() |
2b411b0bf9 | ||
![]() |
fee16847d3 | ||
![]() |
501a52a3c6 | ||
![]() |
2bb014fda5 | ||
![]() |
09203f67b2 | ||
![]() |
169c7ec004 | ||
![]() |
202e94615e | ||
![]() |
5fe2a815ad | ||
![]() |
a13a0b4770 | ||
![]() |
455bbc457b | ||
![]() |
d50fd3b580 | ||
![]() |
455e80b07c | ||
![]() |
291becbdf9 | ||
![]() |
33385b46a7 | ||
![]() |
df17668369 | ||
![]() |
43449c85bb | ||
![]() |
9e86eda05a | ||
![]() |
b288554d9c | ||
![]() |
bee55d08fb | ||
![]() |
7a542aeb38 | ||
![]() |
8d42513ba8 | ||
![]() |
89b7247aa2 | ||
![]() |
29132e7f4c | ||
![]() |
3fd9baf78e | ||
![]() |
f3aa3757ce | ||
![]() |
3760967f59 | ||
![]() |
f7ab8e0f7f | ||
![]() |
0e46ea12b2 | ||
![]() |
be226b2b01 | ||
![]() |
9e1239e192 | ||
![]() |
2eba3d85b0 | ||
![]() |
9b569268ab | ||
![]() |
31f5033dca | ||
![]() |
78d9c60be5 | ||
![]() |
baa86f09e5 | ||
![]() |
a4c4b39ba8 | ||
![]() |
752068bb56 | ||
![]() |
739cfbb273 | ||
![]() |
115af4cadf | ||
![]() |
ae3274e559 | ||
![]() |
c61f096dbd | ||
![]() |
ee7b5c42fd | ||
![]() |
85d527bfbc | ||
![]() |
dd561da819 | ||
![]() |
cb5932cb8b | ||
![]() |
8630adc54a | ||
![]() |
90d8832cd2 | ||
![]() |
3802b97bb6 | ||
![]() |
2de175e181 | ||
![]() |
6b7d437b00 | ||
![]() |
e2faf906de | ||
![]() |
bb44ce5cd2 | ||
![]() |
15544ae589 | ||
![]() |
e421284471 | ||
![]() |
785dc64787 | ||
![]() |
7e7e3a7876 | ||
![]() |
2b45c059e0 | ||
![]() |
14ec61f9bd | ||
![]() |
5cc72756f8 | ||
![]() |
44785ef3e2 | ||
![]() |
e60d858feb | ||
![]() |
b31ecfefcd | ||
![]() |
c342231052 | ||
![]() |
673666837e | ||
![]() |
c8f74d6c0d | ||
![]() |
7ed9de8014 | ||
![]() |
8650947f04 | ||
![]() |
a0ac8ced31 | ||
![]() |
2145bbea81 | ||
![]() |
480000ee7f | ||
![]() |
9ec2ad022e | ||
![]() |
43e40816dc | ||
![]() |
941ea3ee68 | ||
![]() |
a6e4b5159e | ||
![]() |
6f542d58d5 | ||
![]() |
b2b5fcee7d | ||
![]() |
59a82345a9 | ||
![]() |
b61a747876 | ||
![]() |
72e5d800d5 | ||
![]() |
c7aa6d4804 | ||
![]() |
b31063449d | ||
![]() |
477672459d | ||
![]() |
9c33897296 | ||
![]() |
100cfb57c5 | ||
![]() |
40b34071e7 | ||
![]() |
341833fd8f | ||
![]() |
f647fd6fea | ||
![]() |
53642f2389 | ||
![]() |
b9bdd655ab | ||
![]() |
e9e1b5b54f | ||
![]() |
be2163d635 | ||
![]() |
7f6dde3a5f | ||
![]() |
334aafee23 | ||
![]() |
1a20c18b19 | ||
![]() |
6e655b165c | ||
![]() |
d768b2fa1e | ||
![]() |
85bce1cfba | ||
![]() |
a798a2466f | ||
![]() |
2a5d8a5c82 | ||
![]() |
ea62171d98 | ||
![]() |
196389d5ee | ||
![]() |
1776021620 | ||
![]() |
c42a9124d3 | ||
![]() |
a44647b4cd | ||
![]() |
e0c3fd87c5 | ||
![]() |
ed8f2a85b7 | ||
![]() |
48f8553c75 | ||
![]() |
af4517fd1e | ||
![]() |
78e6a46318 | ||
![]() |
49ca923e51 | ||
![]() |
7ad22e0399 | ||
![]() |
bb8acc6065 | ||
![]() |
c0fa4a19e9 | ||
![]() |
3f1741dd18 | ||
![]() |
9ef02e4110 | ||
![]() |
15a6f38ebb | ||
![]() |
227f2e5a21 | ||
![]() |
517d6ee981 | ||
![]() |
184eeb7f49 | ||
![]() |
a3555c74e8 | ||
![]() |
657bafd458 | ||
![]() |
2ad5df420c | ||
![]() |
b24d489ec5 | ||
![]() |
6bb0210f1f | ||
![]() |
c1de50266a | ||
![]() |
562e02bc64 | ||
![]() |
f71ec7913a | ||
![]() |
d98baaf660 | ||
![]() |
72db591576 | ||
![]() |
509a37fc04 | ||
![]() |
17f62b6e86 | ||
![]() |
b09aee7644 | ||
![]() |
babcc0de0c | ||
![]() |
5cc47c9222 | ||
![]() |
833559a3b3 | ||
![]() |
b8a976b344 | ||
![]() |
10b14132b9 | ||
![]() |
18953f0b7c | ||
![]() |
19f5fba3aa | ||
![]() |
636bc3e61a | ||
![]() |
521037e1a6 | ||
![]() |
e024c3e38d | ||
![]() |
6a0206c1e7 | ||
![]() |
69a8a83528 | ||
![]() |
0307d700fa | ||
![]() |
919c383b41 | ||
![]() |
d331af4d5a | ||
![]() |
b5467d3c23 | ||
![]() |
3ef0040d66 | ||
![]() |
ec4dfd2172 | ||
![]() |
8f54d7c8e9 | ||
![]() |
b59e709dc0 | ||
![]() |
b236e6c886 | ||
![]() |
8acbb7d6f0 | ||
![]() |
49e4bc9381 | ||
![]() |
36106cc08d | ||
![]() |
8f1763abe2 | ||
![]() |
480eebc6cb | ||
![]() |
dccfffd979 | ||
![]() |
1a978f4762 | ||
![]() |
1fbc8f4060 | ||
![]() |
db3fc1421c | ||
![]() |
9ecd03db0e | ||
![]() |
f111ccb1b6 | ||
![]() |
a32341cc5d | ||
![]() |
f73e277230 | ||
![]() |
8d8587ca29 | ||
![]() |
88eb9511bf | ||
![]() |
e1068997ea | ||
![]() |
560e04c64a | ||
![]() |
621ec03971 | ||
![]() |
d14a47d3f7 | ||
![]() |
3e9de0c210 | ||
![]() |
01a6e074a5 | ||
![]() |
1434077f4e | ||
![]() |
3922175af1 | ||
![]() |
ec6852a8d7 | ||
![]() |
b0b908b4ae | ||
![]() |
5a00336ef1 | ||
![]() |
d53f5e21f4 | ||
![]() |
bd173fa333 | ||
![]() |
6b32fa31b6 | ||
![]() |
d1dba89e39 | ||
![]() |
b2f2806465 | ||
![]() |
3b70cd58a3 | ||
![]() |
6769bfd824 | ||
![]() |
bff4af2534 | ||
![]() |
aabf575ac5 | ||
![]() |
4aacaf6bd6 | ||
![]() |
268070e89c | ||
![]() |
4fa2134cc6 | ||
![]() |
97c35de49a | ||
![]() |
32fb550969 | ||
![]() |
3457441929 | ||
![]() |
32f0fc7a46 | ||
![]() |
c891a8f164 | ||
![]() |
991764af94 | ||
![]() |
1df447272e | ||
![]() |
f032ae757b | ||
![]() |
e529b2859e | ||
![]() |
d1cb2368fa | ||
![]() |
7b812184d1 | ||
![]() |
4f7ce1c6ee | ||
![]() |
44a5ac63db | ||
![]() |
0989ee88cc | ||
![]() |
ba8b72c2c8 | ||
![]() |
2dbb4583f4 | ||
![]() |
81ad42e029 | ||
![]() |
5d3695f8ba | ||
![]() |
c771694aa0 | ||
![]() |
1ac0fd4c10 | ||
![]() |
631ff8caef | ||
![]() |
7382182132 | ||
![]() |
488a2327fb | ||
![]() |
b99ed631c5 | ||
![]() |
726dd3a8f9 | ||
![]() |
4ff9da68ef | ||
![]() |
dee2998ee3 | ||
![]() |
1d43236211 | ||
![]() |
792bc610a3 | ||
![]() |
7a51c828c2 | ||
![]() |
fab6fcd5ac | ||
![]() |
c8e00ba160 | ||
![]() |
6245b6d823 | ||
![]() |
0c55bf20fc | ||
![]() |
f8fd7b5933 | ||
![]() |
6462eea2ef | ||
![]() |
3d79891249 | ||
![]() |
80763c4bbf | ||
![]() |
59102afd45 | ||
![]() |
0b085354db | ||
![]() |
e2a473baa3 | ||
![]() |
06e10fdd3c | ||
![]() |
fb4386a7ad | ||
![]() |
2d294f6841 | ||
![]() |
e09a839148 | ||
![]() |
d9c4dae739 | ||
![]() |
49853e92a4 | ||
![]() |
19620d6808 | ||
![]() |
f6bf44de1c | ||
![]() |
06fae59fc8 | ||
![]() |
5c25fcd84c | ||
![]() |
aa5297026f | ||
![]() |
b94810d044 | ||
![]() |
841520b75e | ||
![]() |
d9e20307de | ||
![]() |
fda1b523ba | ||
![]() |
7cccbc682c | ||
![]() |
621eb4c4c0 | ||
![]() |
056926242f | ||
![]() |
84294f286f | ||
![]() |
b6bc6b8498 | ||
![]() |
845c935b39 | ||
![]() |
19d8de89df | ||
![]() |
cfae20a3ec | ||
![]() |
6db6ab96e6 | ||
![]() |
48695c6805 | ||
![]() |
d74908e3b5 | ||
![]() |
7e94537e36 | ||
![]() |
1427e0ae96 | ||
![]() |
01e27dfa2f | ||
![]() |
f48249c9d1 | ||
![]() |
e607d4feeb | ||
![]() |
5367ac257e | ||
![]() |
46dc6dc63b | ||
![]() |
a59ea72c66 | ||
![]() |
2daf46c444 | ||
![]() |
1bf38bdc99 | ||
![]() |
131909973c | ||
![]() |
ecdf4e53b8 | ||
![]() |
7aa039d162 | ||
![]() |
3dd3340e35 | ||
![]() |
2f9fc39b72 | ||
![]() |
80f4309799 | ||
![]() |
550fca4bcd | ||
![]() |
4b500ef873 | ||
![]() |
476f021fbf | ||
![]() |
8393ca5b23 | ||
![]() |
4eb7a60b88 | ||
![]() |
2040102e21 | ||
![]() |
7ee5737f75 | ||
![]() |
8d499753a0 | ||
![]() |
028ec277eb | ||
![]() |
5552b1da49 | ||
![]() |
06ab7e904f | ||
![]() |
e4bf820038 | ||
![]() |
c209d2fa8d | ||
![]() |
784c5d3b7c | ||
![]() |
a18b706f99 | ||
![]() |
cd34a40dd8 | ||
![]() |
ced72e1273 | ||
![]() |
5416eda1d6 | ||
![]() |
c76a4ff422 | ||
![]() |
d558ad2d76 | ||
![]() |
5f2d183b1d | ||
![]() |
46e92036ec | ||
![]() |
280d423bfe | ||
![]() |
c4847ad10d | ||
![]() |
0a7c75830b | ||
![]() |
7aceb21123 | ||
![]() |
9264d437b1 | ||
![]() |
edc8d8960f | ||
![]() |
0f4f196dc9 | ||
![]() |
a027f4b5fc | ||
![]() |
f025d1df05 | ||
![]() |
4c560d7c54 | ||
![]() |
a976ef6e67 | ||
![]() |
e1b9d754af | ||
![]() |
ee49935b7d | ||
![]() |
36694c9ef0 | ||
![]() |
bd786811a3 | ||
![]() |
ffaeb2b96d | ||
![]() |
517e6cb437 | ||
![]() |
9479672b88 | ||
![]() |
934e59596a | ||
![]() |
8f4ac10361 | ||
![]() |
50e0fd159f | ||
![]() |
28344ff5f3 | ||
![]() |
608ae14246 | ||
![]() |
c0c0d44c2d | ||
![]() |
5e947348ae | ||
![]() |
1f4032f56f | ||
![]() |
336ab0d2b1 | ||
![]() |
0f9d80dde4 | ||
![]() |
0fcab4d92b | ||
![]() |
abd35b62c8 | ||
![]() |
7b721ad8c6 | ||
![]() |
37eaaf356d | ||
![]() |
8cbb4b510b | ||
![]() |
f71549e3df | ||
![]() |
fe15bb6a30 | ||
![]() |
50d36b857a | ||
![]() |
db260dfbde | ||
![]() |
a0c99615aa | ||
![]() |
c66c806e6e | ||
![]() |
a432d28ee3 | ||
![]() |
742bc43500 | ||
![]() |
223e2f1df5 | ||
![]() |
ed9aea6219 | ||
![]() |
aa7b68d4d5 | ||
![]() |
8e57cd2751 | ||
![]() |
64229a188e | ||
![]() |
10cbbcc2de | ||
![]() |
5318e4fbcd | ||
![]() |
007251a04c | ||
![]() |
fd4b3ee539 | ||
![]() |
976ae96633 | ||
![]() |
042bdcdf37 | ||
![]() |
c423e9cf8e | ||
![]() |
1f13d6aa91 | ||
![]() |
78c09a0fa6 | ||
![]() |
e6d6f2ee8c | ||
![]() |
2918ef6225 | ||
![]() |
35b626a1c5 | ||
![]() |
6f26536d97 | ||
![]() |
01064564b4 | ||
![]() |
0c6c6a6620 | ||
![]() |
be166d533f | ||
![]() |
d3e5535221 | ||
![]() |
7206213cd8 | ||
![]() |
605782d707 | ||
![]() |
2c387349c9 | ||
![]() |
e9c9f98168 | ||
![]() |
a98c7819b0 | ||
![]() |
7c42c5758d | ||
![]() |
c555146094 | ||
![]() |
f48c9b5774 | ||
![]() |
8d1732e5eb | ||
![]() |
f2843db421 | ||
![]() |
b513512551 | ||
![]() |
1a59839b1b | ||
![]() |
45861617b9 | ||
![]() |
353544085e | ||
![]() |
144d3921f7 | ||
![]() |
e44d22880e | ||
![]() |
c7692b43e8 | ||
![]() |
9c53caae80 | ||
![]() |
7a1d85ca2b | ||
![]() |
e684223f32 | ||
![]() |
9744f3354b | ||
![]() |
c6e3787681 | ||
![]() |
eab76a6d1d | ||
![]() |
6549a10935 | ||
![]() |
530d40dbbd | ||
![]() |
50f2d8e7d8 | ||
![]() |
7a9aac491e | ||
![]() |
7dcb609fd5 | ||
![]() |
d119e99001 | ||
![]() |
fe0e41adec | ||
![]() |
034393bd42 | ||
![]() |
02e72726a5 | ||
![]() |
d599c3ad76 | ||
![]() |
b00f7c44df | ||
![]() |
028b170cff | ||
![]() |
8da686fc34 | ||
![]() |
3f6453aa89 | ||
![]() |
7967254673 | ||
![]() |
0f60fdd20b | ||
![]() |
ccb8e5fe06 | ||
![]() |
ba576d8748 | ||
![]() |
edcd9ca6e6 | ||
![]() |
ac4277cd7b | ||
![]() |
4c525de5e2 | ||
![]() |
cb751e0397 | ||
![]() |
ac457c1c28 | ||
![]() |
caa77b9337 | ||
![]() |
96d8785349 | ||
![]() |
e4f57d2269 | ||
![]() |
f946de1e46 | ||
![]() |
d588987b8b | ||
![]() |
4925b5fa97 | ||
![]() |
aa3f6390d3 | ||
![]() |
6ba413f452 | ||
![]() |
10b6706e4a | ||
![]() |
17559bfc8e | ||
![]() |
9dc2f43ffb | ||
![]() |
38db375fea | ||
![]() |
f35b6d0b00 | ||
![]() |
8d75583a07 | ||
![]() |
361fc51477 | ||
![]() |
f6019b4e68 | ||
![]() |
998dd5387b | ||
![]() |
3b7776ca01 | ||
![]() |
5788d1dd32 | ||
![]() |
ff04d339f4 | ||
![]() |
e9d03c5c8e | ||
![]() |
ab4b98470e | ||
![]() |
c4f0702595 | ||
![]() |
736c9cb2bd | ||
![]() |
f24e8535d3 | ||
![]() |
8e4f3e0526 | ||
![]() |
a9abd933b5 | ||
![]() |
f1121fe66f | ||
![]() |
c26a2e399c | ||
![]() |
1af90721cc | ||
![]() |
9274a0fa17 | ||
![]() |
9443032c2a | ||
![]() |
8deb1cf2e6 | ||
![]() |
13c7ce6a0a | ||
![]() |
0f54824cdb | ||
![]() |
10cd722806 | ||
![]() |
4aca056c5b | ||
![]() |
15a8f40f6f | ||
![]() |
2ca2701f7a | ||
![]() |
78f63380f2 | ||
![]() |
7633d26806 | ||
![]() |
15cae6562f | ||
![]() |
c3546eb566 | ||
![]() |
2a77a29f48 | ||
![]() |
77be115eec | ||
![]() |
64a6c2e07c | ||
![]() |
045a3ba416 | ||
![]() |
4da2715d14 | ||
![]() |
2f0e99d420 | ||
![]() |
8694eaaf1a | ||
![]() |
0761885ebb | ||
![]() |
ca60a69b22 | ||
![]() |
c9db42583b | ||
![]() |
052a691a4d | ||
![]() |
2bcb0e5195 | ||
![]() |
745845db19 | ||
![]() |
de064d1d9c | ||
![]() |
3b2351af0b | ||
![]() |
5cf833e3d6 | ||
![]() |
409b53109b | ||
![]() |
da83edf231 | ||
![]() |
7be508214a | ||
![]() |
8b4a137252 | ||
![]() |
4565b01eeb | ||
![]() |
0675f66ee6 | ||
![]() |
b60d57c3a0 | ||
![]() |
0e3d95cac0 | ||
![]() |
548737a559 | ||
![]() |
598108d294 | ||
![]() |
a0261dbbcc | ||
![]() |
2418122b46 | ||
![]() |
f104e60afa | ||
![]() |
ed45f27f3e | ||
![]() |
40aa5c9caf | ||
![]() |
14b1ea4eb0 | ||
![]() |
5052a339e3 | ||
![]() |
2321890dde | ||
![]() |
4cb5770ee0 | ||
![]() |
3a35561d1d | ||
![]() |
6fbec53f8a | ||
![]() |
c707934018 | ||
![]() |
efd8efa248 | ||
![]() |
979861b764 | ||
![]() |
cdc53a159c | ||
![]() |
a203ed9cc5 | ||
![]() |
5cab5f0c08 | ||
![]() |
25ea80e169 | ||
![]() |
f43b4e9e24 | ||
![]() |
160fbb2589 | ||
![]() |
c85aa664e1 | ||
![]() |
51dcbf5db7 | ||
![]() |
fa114a4a03 | ||
![]() |
d7fd58bdb9 | ||
![]() |
38b0aea8e2 | ||
![]() |
41eade9325 | ||
![]() |
e64cf41aec | ||
![]() |
02872b5e75 | ||
![]() |
e4d49bb459 | ||
![]() |
d38b7d5a82 | ||
![]() |
537c5d3197 | ||
![]() |
575df2fcf6 | ||
![]() |
c08c3c6b37 | ||
![]() |
2acf28609e | ||
![]() |
bb59d0431e | ||
![]() |
1c7b1f1462 | ||
![]() |
f32d17d924 | ||
![]() |
928a4d8dce | ||
![]() |
dd3ba93308 | ||
![]() |
7e1b179cdd | ||
![]() |
a9a2c35f06 | ||
![]() |
58b88a6919 | ||
![]() |
f937876a1b | ||
![]() |
8193f43634 | ||
![]() |
1d3f880f82 | ||
![]() |
ef2fa8d2e2 | ||
![]() |
51997b3e7c | ||
![]() |
98785b00e2 | ||
![]() |
8d3694884d | ||
![]() |
a2821a98ad | ||
![]() |
8d552ae15c | ||
![]() |
6db4c60f47 | ||
![]() |
805c0385a0 | ||
![]() |
cea6e7a9f2 | ||
![]() |
127073c01b | ||
![]() |
30fe36ae05 | ||
![]() |
58bd677832 | ||
![]() |
1a3b369dd7 | ||
![]() |
6e38216abd | ||
![]() |
efcfc1f841 | ||
![]() |
8dea50ce83 | ||
![]() |
7a5a01bdcc | ||
![]() |
bd1450a682 | ||
![]() |
c538c1ce7f | ||
![]() |
b6d59c4f64 | ||
![]() |
a758ccaf5c | ||
![]() |
e8b04cc20a | ||
![]() |
9bcb15dbc0 | ||
![]() |
1e953167b6 | ||
![]() |
979586cdb2 | ||
![]() |
cd31fad56d | ||
![]() |
ff57d88e2a | ||
![]() |
06cb5e171e | ||
![]() |
a8b70a2e13 | ||
![]() |
948019ccee | ||
![]() |
89ed109505 | ||
![]() |
fae246c503 | ||
![]() |
2411b4287d | ||
![]() |
b3308ecbe0 | ||
![]() |
3541cbff5e | ||
![]() |
838ba7ff36 | ||
![]() |
e9802f92c9 | ||
![]() |
016fd24859 | ||
![]() |
d315e81ab2 | ||
![]() |
97c38b8534 | ||
![]() |
011e2b3df5 | ||
![]() |
e3ee9a299f | ||
![]() |
d73c10f874 | ||
![]() |
9e448b46ba | ||
![]() |
9f09c46789 | ||
![]() |
fe6634551a | ||
![]() |
22a7931a7c | ||
![]() |
94f112512f | ||
![]() |
b6509dca1f | ||
![]() |
620234e708 | ||
![]() |
d50e866cec | ||
![]() |
76ad6dca02 | ||
![]() |
cdb1520a63 | ||
![]() |
bbef706a33 | ||
![]() |
835509901f | ||
![]() |
b51f9586c4 | ||
![]() |
fc83cb9559 | ||
![]() |
f5f5f829ac | ||
![]() |
930eed4500 | ||
![]() |
01a8b58054 | ||
![]() |
eba1d01fc2 | ||
![]() |
84755836c9 | ||
![]() |
c9585033cb | ||
![]() |
2d312c276f | ||
![]() |
3b0d0e9928 | ||
![]() |
8307b153e3 | ||
![]() |
dfaffe3ec5 | ||
![]() |
8d7b15cbeb | ||
![]() |
00969a67ac | ||
![]() |
a374d4e817 | ||
![]() |
f5dda39f63 | ||
![]() |
fb5d54d5fe | ||
![]() |
d392b35fdd | ||
![]() |
3ceec006ac | ||
![]() |
62a574c6bd | ||
![]() |
821c10b2bd | ||
![]() |
fa3269a098 | ||
![]() |
a9bdab4b49 | ||
![]() |
0df5b7d87b | ||
![]() |
4861fc70ce | ||
![]() |
47c443bb92 | ||
![]() |
9cb4b49597 | ||
![]() |
865523fd37 | ||
![]() |
1df35a6fe1 | ||
![]() |
e70c9d8a30 | ||
![]() |
7d6b00ea4a | ||
![]() |
e5fc985915 | ||
![]() |
71ccaa2bd0 | ||
![]() |
e127f23a08 | ||
![]() |
495f9f2373 | ||
![]() |
27274286db | ||
![]() |
85ba886029 | ||
![]() |
2f3a868e42 | ||
![]() |
a51b80f456 | ||
![]() |
f27a426879 | ||
![]() |
19ca485c28 | ||
![]() |
7deed55c2d | ||
![]() |
4c5c6f072c | ||
![]() |
f174e08ad6 | ||
![]() |
2658f95347 | ||
![]() |
311c981d1a | ||
![]() |
d6d3bf0583 | ||
![]() |
a1a601a4d3 | ||
![]() |
14776eae76 | ||
![]() |
bef4034ab8 | ||
![]() |
ad988f2a24 | ||
![]() |
6599ae0ee0 | ||
![]() |
4f1ed690cd | ||
![]() |
4ffaee6013 | ||
![]() |
e1ce19547e | ||
![]() |
039040b972 | ||
![]() |
7a1af3d346 | ||
![]() |
1e98774b62 | ||
![]() |
4b4d6c6866 | ||
![]() |
65ff83d359 | ||
![]() |
e509c804ae | ||
![]() |
992827e225 | ||
![]() |
083e97add8 | ||
![]() |
05378d18c0 | ||
![]() |
3dd465acc9 | ||
![]() |
8f6e36f781 | ||
![]() |
85fe56db57 | ||
![]() |
8e07429e47 | ||
![]() |
ced6d702b9 | ||
![]() |
25d7de4dfa | ||
![]() |
82754c0dfe | ||
![]() |
e604b022ee | ||
![]() |
6b29022822 | ||
![]() |
2e671cc5ee | ||
![]() |
f25692b98c | ||
![]() |
c4a011b261 | ||
![]() |
a935bac20b | ||
![]() |
0a3a98cb42 | ||
![]() |
adb39ca93f | ||
![]() |
5fdc340e58 | ||
![]() |
bb64dca6e6 | ||
![]() |
685788bcdf | ||
![]() |
e949aa35f3 | ||
![]() |
fc80bf0df4 | ||
![]() |
bd9740e866 | ||
![]() |
3a260a8fd9 | ||
![]() |
c87e6a5a42 | ||
![]() |
8bc3319523 | ||
![]() |
bdfcf1a2df | ||
![]() |
7f4284f2af | ||
![]() |
fd69120aa6 | ||
![]() |
5df60b17e8 | ||
![]() |
cb835b5ae6 | ||
![]() |
9eab92513a | ||
![]() |
29e8f50ab8 | ||
![]() |
aa0496b236 | ||
![]() |
06e9cec21a | ||
![]() |
0fe27088df | ||
![]() |
54d226116d | ||
![]() |
4b37e30680 | ||
![]() |
7c5f710deb | ||
![]() |
5a3ebaf683 | ||
![]() |
233da0e48f | ||
![]() |
96380d8d28 | ||
![]() |
c84a0edf20 | ||
![]() |
a3cf445c93 | ||
![]() |
3f31979f66 | ||
![]() |
44416edfd2 | ||
![]() |
351c45da75 | ||
![]() |
e27c5dad15 | ||
![]() |
dc510f22ac | ||
![]() |
1b78011f8b | ||
![]() |
a908828bf4 | ||
![]() |
55b7eb62f6 | ||
![]() |
10e8fcf3b9 | ||
![]() |
f1b0c05447 | ||
![]() |
de22bd688e | ||
![]() |
9fe35b4fb5 | ||
![]() |
f13d08d37a | ||
![]() |
a0ecb46584 | ||
![]() |
0c57df0c8e | ||
![]() |
9c902c5c69 | ||
![]() |
af412c3105 | ||
![]() |
ec43448163 | ||
![]() |
9f7e0ecd55 | ||
![]() |
e50515a17c | ||
![]() |
7c345db6fe | ||
![]() |
51c2268c1e | ||
![]() |
51feca05a5 | ||
![]() |
3889504292 | ||
![]() |
7bd6ff374a | ||
![]() |
44fa34203a | ||
![]() |
ff351c7f6d | ||
![]() |
960b00d85a | ||
![]() |
18e3eacd7f | ||
![]() |
f4a1da33c4 | ||
![]() |
49de5be44e | ||
![]() |
383657e8ce | ||
![]() |
3af970ead6 | ||
![]() |
6caec79958 | ||
![]() |
33bbd92d9b | ||
![]() |
9dba78fbcd | ||
![]() |
630d85ec78 | ||
![]() |
f0d46e8671 | ||
![]() |
db0593f0b2 | ||
![]() |
1d83c0c77a | ||
![]() |
5e5fd3a79b | ||
![]() |
c61995aab8 | ||
![]() |
37c393f857 | ||
![]() |
8e043a01c1 | ||
![]() |
c7b6b2ddb3 | ||
![]() |
522f68bf68 | ||
![]() |
7d4866234f | ||
![]() |
7aa5bcfc7c | ||
![]() |
04b59f0896 | ||
![]() |
796f9a203e | ||
![]() |
22c8cda0d7 | ||
![]() |
1cf534ccc5 | ||
![]() |
6d8c821148 | ||
![]() |
264e9665b0 | ||
![]() |
53fa8e48c0 | ||
![]() |
e406aa4144 | ||
![]() |
4953ba5077 | ||
![]() |
0a97ac0578 | ||
![]() |
56af4752f4 | ||
![]() |
81413d08ed | ||
![]() |
2bc2a476d9 | ||
![]() |
4d070a65c6 | ||
![]() |
6185fbaf26 | ||
![]() |
698a126b93 | ||
![]() |
acf921f55d | ||
![]() |
f5a78c88f8 | ||
![]() |
206ece1575 | ||
![]() |
a8028dbe10 | ||
![]() |
c605af6ccc | ||
![]() |
b7b8e6c40e | ||
![]() |
3fcb1de419 | ||
![]() |
12034fe5fc | ||
![]() |
56959d781a | ||
![]() |
9a2f025646 | ||
![]() |
12cc163058 | ||
![]() |
74971d9753 | ||
![]() |
a9157e3a9f | ||
![]() |
b96697b708 | ||
![]() |
81e6896391 | ||
![]() |
2dcaa3608d | ||
![]() |
e21671ec5e | ||
![]() |
7841f14163 | ||
![]() |
cc9f594ab4 | ||
![]() |
ebfaaeaa6b | ||
![]() |
ffa91e150d | ||
![]() |
06fa9f9a9e | ||
![]() |
9f203c42ec | ||
![]() |
5d0d34a4af | ||
![]() |
c2cfc0d3d4 | ||
![]() |
0f4810d41f | ||
![]() |
175848f2a8 | ||
![]() |
472bd66f4d | ||
![]() |
168ea32d2c | ||
![]() |
e82d6b1ea4 | ||
![]() |
6c60ca088c | ||
![]() |
83e8f935fd | ||
![]() |
71867302a4 | ||
![]() |
8bcc402c5f | ||
![]() |
72b7d2a123 | ||
![]() |
20c1183450 | ||
![]() |
0bbfbd2544 | ||
![]() |
350bd9c32f | ||
![]() |
dcca8b0a9a | ||
![]() |
f77b479e45 | ||
![]() |
216565affb | ||
![]() |
6f235c2a11 | ||
![]() |
27a770bd1d | ||
![]() |
ef15b67571 | ||
![]() |
6aad966c52 | ||
![]() |
9811f11859 | ||
![]() |
13148ec7fb | ||
![]() |
b2d7464790 | ||
![]() |
ce84e185ad | ||
![]() |
c3f5ee43b6 | ||
![]() |
e2dc1a4471 | ||
![]() |
e787e59b49 | ||
![]() |
f0ed2eba2b | ||
![]() |
2364e1e652 | ||
![]() |
cc56944d75 | ||
![]() |
69cea9fc96 | ||
![]() |
fcebc9d1ed | ||
![]() |
9350e4f961 | ||
![]() |
387e0ad03e | ||
![]() |
61fec8b290 | ||
![]() |
1228baebf4 | ||
![]() |
a30063e85c | ||
![]() |
524cebac4d | ||
![]() |
c94114a566 | ||
![]() |
b6ec7a9e64 | ||
![]() |
69be7a6d22 | ||
![]() |
58155c35f9 | ||
![]() |
7b2377291f | ||
![]() |
657ee84e39 | ||
![]() |
2e4b545265 | ||
![]() |
2de1d35dd1 | ||
![]() |
2b082b362d | ||
![]() |
dfdd0d6b4b | ||
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a | ||
![]() |
39baea759a | ||
![]() |
80ddb1d262 | ||
![]() |
e24987a610 | ||
![]() |
9e5c276e3b | ||
![]() |
c33d31996d | ||
![]() |
aa1f08fe8a | ||
![]() |
d78689554a | ||
![]() |
5bee1d851c | ||
![]() |
ddb8eef4d1 | ||
![]() |
da513e7347 | ||
![]() |
4279d7fd16 | ||
![]() |
934eab2e8c | ||
![]() |
2a31edc768 | ||
![]() |
fcdd66dc6e | ||
![]() |
a65d3222b9 | ||
![]() |
36179596a0 | ||
![]() |
c083c850c1 | ||
![]() |
ff903d7b5a | ||
![]() |
dd603e1ec2 | ||
![]() |
a2f06b1553 | ||
![]() |
8115d2b3d3 | ||
![]() |
4f97bb9e0b | ||
![]() |
84d24a2c4d | ||
![]() |
b709061656 | ||
![]() |
cd9034b3f1 | ||
![]() |
25d324c73a | ||
![]() |
3a834d1a73 | ||
![]() |
e9fecb817d | ||
![]() |
56e70d7ec4 | ||
![]() |
2e73a85aa9 | ||
![]() |
1e119e9c03 | ||
![]() |
6f6e5c97df | ||
![]() |
6ef99974cf | ||
![]() |
8984b9aef6 | ||
![]() |
63e08b15bc | ||
![]() |
319b2b5d4c | ||
![]() |
bae7bb8ce4 | ||
![]() |
0b44df366c | ||
![]() |
f253c797af | ||
![]() |
0a8b1c2797 | ||
![]() |
3b45fb417b | ||
![]() |
2a2d92e3c5 | ||
![]() |
a320e42ed5 | ||
![]() |
fdef712e01 | ||
![]() |
5717ac19d7 | ||
![]() |
33d7d76fee | ||
![]() |
73bdaa623c | ||
![]() |
8ca8f59a0b | ||
![]() |
745af3c039 | ||
![]() |
5d17e1011a | ||
![]() |
826464c41b | ||
![]() |
a643df8cac | ||
![]() |
24ded99286 | ||
![]() |
6646eee504 | ||
![]() |
f55c10914e | ||
![]() |
b1e768f69e | ||
![]() |
4702f8bd5e | ||
![]() |
69959b2c97 | ||
![]() |
9d6f4f5392 | ||
![]() |
36b9a609bf | ||
![]() |
36ae0c82b6 | ||
![]() |
e11011ee51 | ||
![]() |
9125211a57 | ||
![]() |
3a4ef6ceb3 | ||
![]() |
ca82993278 | ||
![]() |
0925af91e3 | ||
![]() |
80bc32243c | ||
![]() |
f0d232880d | ||
![]() |
7c790dbbd9 | ||
![]() |
899b17e992 | ||
![]() |
d1b4521290 | ||
![]() |
9bb4feef29 | ||
![]() |
4bcdc98a31 | ||
![]() |
26f8c1df92 | ||
![]() |
a481ad73f3 | ||
![]() |
e4ac17fea6 | ||
![]() |
bcd940e95b | ||
![]() |
5365aa4466 | ||
![]() |
a0d106529c | ||
![]() |
bf1a9ec42d | ||
![]() |
fc5d97562f | ||
![]() |
f5c171e44f | ||
![]() |
a3c3f15806 | ||
![]() |
ef58a219ec | ||
![]() |
6708fe36e3 | ||
![]() |
e02fa2824c | ||
![]() |
a20f927082 | ||
![]() |
6d71e3fe81 | ||
![]() |
4056fcd75d | ||
![]() |
1e723cf0e3 | ||
![]() |
ce3f670597 | ||
![]() |
ce3d3d58ec | ||
![]() |
a92cab48e0 | ||
![]() |
ee76317392 | ||
![]() |
380ca13be1 | ||
![]() |
93f4c5e207 | ||
![]() |
e438858da0 | ||
![]() |
428a4dd849 | ||
![]() |
39cc8aaa13 | ||
![]() |
39a62864de | ||
![]() |
71a162a871 | ||
![]() |
05d7eff09a | ||
![]() |
7b8ad0782d | ||
![]() |
df3e9e3a5e | ||
![]() |
8cdc769ec8 | ||
![]() |
76e1304241 | ||
![]() |
eb9b1ff03d | ||
![]() |
b3b12d35fd | ||
![]() |
74485262e7 | ||
![]() |
615e68b29b | ||
![]() |
927b4695c9 | ||
![]() |
11811701d0 | ||
![]() |
05c8022db3 | ||
![]() |
a9ebb147c5 | ||
![]() |
ba8ca4d9ee | ||
![]() |
3574df1385 | ||
![]() |
b4497d231b | ||
![]() |
5aa9b0245a | ||
![]() |
4c72c3aafc | ||
![]() |
bf4f40f991 | ||
![]() |
603334f4f3 | ||
![]() |
46548af165 | ||
![]() |
8ef32b40c8 | ||
![]() |
fb25377087 | ||
![]() |
a75fd2d07e | ||
![]() |
e30f39e97e | ||
![]() |
4818ad7465 | ||
![]() |
5e4e9740c7 | ||
![]() |
d4e41dbf80 | ||
![]() |
cea1a1a15f | ||
![]() |
c2700b14dc | ||
![]() |
07d27170db | ||
![]() |
8eb8c07df6 | ||
![]() |
7bee6f884c | ||
![]() |
78dd20e314 | ||
![]() |
2a011b6448 | ||
![]() |
5c90370ec8 | ||
![]() |
120465b88d | ||
![]() |
c77292439a | ||
![]() |
0a0209f81a | ||
![]() |
69a7ed8a5c | ||
![]() |
8df35ab488 | ||
![]() |
a12567d0a8 | ||
![]() |
64fe190119 | ||
![]() |
e3ede66943 | ||
![]() |
2672b800d4 | ||
![]() |
c60d4bda92 | ||
![]() |
db9d0f2639 | ||
![]() |
02d4045ec3 | ||
![]() |
a308ea6927 | ||
![]() |
edc5e5e812 | ||
![]() |
23b65cb479 | ||
![]() |
e5eabd2143 | ||
![]() |
b0dd043975 | ||
![]() |
435a1096ed | ||
![]() |
21a9084ca0 | ||
![]() |
10d9135d86 | ||
![]() |
272d8b29f3 | ||
![]() |
3d665b9eec | ||
![]() |
c563f484c9 | ||
![]() |
38268ea4ea | ||
![]() |
c1ad64cddf | ||
![]() |
b898cd2a3a | ||
![]() |
937b31d845 | ||
![]() |
e4e655493b | ||
![]() |
387d2dcc2e | ||
![]() |
8abe33d48a | ||
![]() |
860442d5c4 | ||
![]() |
ce5183ce16 | ||
![]() |
3e69b04b86 | ||
![]() |
8b9cd4f122 | ||
![]() |
c0e3ccdb83 | ||
![]() |
e8cc85c487 | ||
![]() |
b3eff41692 | ||
![]() |
1ea63f185c | ||
![]() |
a513d5c09a | ||
![]() |
fb8216c102 | ||
![]() |
4f381d01df | ||
![]() |
de3382226e | ||
![]() |
77be830b72 | ||
![]() |
09c0e1320f | ||
![]() |
cc4ee59542 | ||
![]() |
1f448744f3 | ||
![]() |
ee2c257057 | ||
![]() |
be8439d4ac | ||
![]() |
981f2b193c | ||
![]() |
39087e09ce | ||
![]() |
59960efb9c | ||
![]() |
5a53bb5981 | ||
![]() |
a67fe69cbb | ||
![]() |
9ce2b0765f | ||
![]() |
2e53a48504 | ||
![]() |
8e4db0c3ec | ||
![]() |
4072b06faf | ||
![]() |
a2cf7ece70 | ||
![]() |
734fe3afde | ||
![]() |
7f3bc91c1d | ||
![]() |
9c2c95757d | ||
![]() |
b5ed6c586a | ||
![]() |
35033d1f76 | ||
![]() |
9e41d0c5b0 | ||
![]() |
62e92fada9 | ||
![]() |
ae0a1a657f | ||
![]() |
81e511ba8e | ||
![]() |
d89cb91c8c | ||
![]() |
dc31b6e6fe | ||
![]() |
930a32de1a | ||
![]() |
e40f2ed8e3 | ||
![]() |
abbd3d1078 | ||
![]() |
63c9948456 | ||
![]() |
b6c81d779a | ||
![]() |
2480c83169 | ||
![]() |
334cc66cf6 | ||
![]() |
3cf189ad94 | ||
![]() |
6ffb94a0f5 | ||
![]() |
3593826441 | ||
![]() |
0a0a62f238 | ||
![]() |
41ce9913d2 | ||
![]() |
b77c42384d | ||
![]() |
138bb12f98 | ||
![]() |
4fe2859f4e | ||
![]() |
0768b2b4bc | ||
![]() |
e6f1772a93 | ||
![]() |
5374b2b3b9 | ||
![]() |
1196788856 | ||
![]() |
9f3f47eb80 | ||
![]() |
1a90a478f2 | ||
![]() |
ee773f3b63 | ||
![]() |
5ffc27f60c | ||
![]() |
4c13dfb43c | ||
![]() |
bc099f0d81 | ||
![]() |
b26dd0af19 | ||
![]() |
0dee5bd763 | ||
![]() |
0765387ad8 | ||
![]() |
a07517bd3c | ||
![]() |
e5f0d80d96 | ||
![]() |
2fc5e3b7d9 | ||
![]() |
778bc46848 | ||
![]() |
882586b246 | ||
![]() |
b7c07a2555 | ||
![]() |
814b504fa9 | ||
![]() |
7ae430e7a8 | ||
![]() |
0e7e95ba20 | ||
![]() |
e577d8acb2 | ||
![]() |
0a76ab5054 | ||
![]() |
03c5596e04 | ||
![]() |
3af4e14e83 | ||
![]() |
7c8cf57820 | ||
![]() |
8d84a8a62e | ||
![]() |
08c45060bd | ||
![]() |
7ca8d2811b | ||
![]() |
bb6898b032 | ||
![]() |
cd86c6814e | ||
![]() |
b67e116650 | ||
![]() |
57ce411fb6 | ||
![]() |
85ed4d9e8d | ||
![]() |
ccb39da569 | ||
![]() |
dd7ba64d32 | ||
![]() |
de3edb1654 | ||
![]() |
d262151727 | ||
![]() |
a37c90af96 | ||
![]() |
0a3a752b4c | ||
![]() |
0a34f427f8 | ||
![]() |
157740e374 | ||
![]() |
b0e994f3f5 | ||
![]() |
f374852801 | ||
![]() |
709f034f2e | ||
![]() |
6d6deb8c66 | ||
![]() |
5771b417bc | ||
![]() |
51efcefdab | ||
![]() |
d31ab5139d | ||
![]() |
ce18183daa | ||
![]() |
b8b73cf880 | ||
![]() |
5291e6c1f3 | ||
![]() |
626a9f06c4 | ||
![]() |
72338eb5b8 | ||
![]() |
7bd77c6e99 | ||
![]() |
69151b962a | ||
![]() |
86305d4fe4 | ||
![]() |
d5c3850a3f | ||
![]() |
3e645b6175 | ||
![]() |
89dc78bc05 | ||
![]() |
164c403d05 | ||
![]() |
5e8007453f | ||
![]() |
0a0d97b084 | ||
![]() |
eb604ed92d | ||
![]() |
c47828dbaa | ||
![]() |
ea437dc745 | ||
![]() |
c16a208b39 | ||
![]() |
55d803b2a0 | ||
![]() |
611f6f2829 | ||
![]() |
b94df76731 | ||
![]() |
218619e7f0 | ||
![]() |
273eed901a | ||
![]() |
8ea712a937 | ||
![]() |
658449a7a0 | ||
![]() |
968c471591 | ||
![]() |
b4665f3907 | ||
![]() |
496cee1ec4 | ||
![]() |
0f8c80f3ba | ||
![]() |
6c28f82239 | ||
![]() |
def32abb57 | ||
![]() |
f57a241b9e | ||
![]() |
11a7e8b15d | ||
![]() |
fa4f7697b7 | ||
![]() |
6098b7de8e | ||
![]() |
0a382ce54d | ||
![]() |
dd53aaa30c | ||
![]() |
31e175a15a |
60
.devcontainer/Dockerfile
Normal file
60
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
SHELL ["/bin/bash", "-c"]
|
||||||
|
|
||||||
|
WORKDIR /workspaces
|
||||||
|
|
||||||
|
# Set Docker daemon config
|
||||||
|
RUN \
|
||||||
|
mkdir -p /etc/docker \
|
||||||
|
&& echo '{"storage-driver": "vfs"}' > /etc/docker/daemon.json
|
||||||
|
|
||||||
|
# Install Node/Yarn for Frontent
|
||||||
|
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
apt-utils \
|
||||||
|
apt-transport-https \
|
||||||
|
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
nodejs \
|
||||||
|
yarn \
|
||||||
|
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
ENV NVM_DIR /root/.nvm
|
||||||
|
|
||||||
|
# Install docker
|
||||||
|
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
software-properties-common \
|
||||||
|
gpg-agent \
|
||||||
|
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||||
|
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
docker-ce \
|
||||||
|
docker-ce-cli \
|
||||||
|
containerd.io \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install tools
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
jq \
|
||||||
|
dbus \
|
||||||
|
network-manager \
|
||||||
|
libpulse0 \
|
||||||
|
&& bash <(curl https://getvcn.codenotary.com -L) \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Python dependencies from requirements.txt if it exists
|
||||||
|
COPY requirements.txt requirements_tests.txt ./
|
||||||
|
RUN pip3 install -U setuptools pip \
|
||||||
|
&& pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||||
|
&& pip3 install tox \
|
||||||
|
&& rm -f requirements.txt requirements_tests.txt
|
33
.devcontainer/devcontainer.json
Normal file
33
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
{
|
||||||
|
"name": "Supervisor dev",
|
||||||
|
"context": "..",
|
||||||
|
"dockerFile": "Dockerfile",
|
||||||
|
"appPort": "9123:8123",
|
||||||
|
"postCreateCommand": "pre-commit install",
|
||||||
|
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||||
|
"containerEnv": {"NVM_DIR":"/usr/local/share/nvm"},
|
||||||
|
"extensions": [
|
||||||
|
"ms-python.python",
|
||||||
|
"ms-python.vscode-pylance",
|
||||||
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
|
"esbenp.prettier-vscode"
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"terminal.integrated.shell.linux": "/bin/bash",
|
||||||
|
"editor.formatOnPaste": false,
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.formatOnType": true,
|
||||||
|
"files.trimTrailingWhitespace": true,
|
||||||
|
"python.pythonPath": "/usr/local/bin/python3",
|
||||||
|
"python.linting.pylintEnabled": true,
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.formatting.provider": "black",
|
||||||
|
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||||
|
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||||
|
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||||
|
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||||
|
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||||
|
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||||
|
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||||
|
}
|
||||||
|
}
|
@@ -1,13 +1,23 @@
|
|||||||
# General files
|
# General files
|
||||||
.git
|
.git
|
||||||
.github
|
.github
|
||||||
|
.devcontainer
|
||||||
|
.vscode
|
||||||
|
|
||||||
# Test related files
|
# Test related files
|
||||||
.tox
|
.tox
|
||||||
|
|
||||||
# Temporary files
|
# Temporary files
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
|
.pytest_cache
|
||||||
|
|
||||||
# virtualenv
|
# virtualenv
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
|
||||||
|
# Data
|
||||||
|
home-assistant-polymer/
|
||||||
|
script/
|
||||||
|
tests/
|
||||||
|
|
||||||
|
# Test ENV
|
||||||
|
data/
|
||||||
|
64
.github/ISSUE_TEMPLATE.md
vendored
64
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,29 +1,69 @@
|
|||||||
|
---
|
||||||
|
name: Report a bug with the Supervisor on a supported System
|
||||||
|
about: Report an issue related to the Home Assistant Supervisor.
|
||||||
|
labels: bug
|
||||||
|
---
|
||||||
|
|
||||||
<!-- READ THIS FIRST:
|
<!-- READ THIS FIRST:
|
||||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
|
||||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
|
||||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||||
- If you have a problem with a Add-on, make a issue on there repository.
|
- If you have a problem with an add-on, make an issue in it's repository.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
**Home Assistant release with the issue:**
|
|
||||||
<!--
|
<!--
|
||||||
- Frontend -> Developer tools -> Info
|
Important: You can only fill a bug repport for an supported system! If you run an unsupported installation. This report would be closed without comment.
|
||||||
- Or use this command: hass --version
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
**Operating environment (HassOS/Generic):**
|
### Describe the issue
|
||||||
|
|
||||||
|
<!-- Provide as many details as possible. -->
|
||||||
|
|
||||||
|
### Steps to reproduce
|
||||||
|
|
||||||
|
<!-- What do you do to encounter the issue. -->
|
||||||
|
|
||||||
|
1. ...
|
||||||
|
2. ...
|
||||||
|
3. ...
|
||||||
|
|
||||||
|
### Enviroment details
|
||||||
|
|
||||||
|
<!-- You can find these details in the system tab of the supervisor panel, or by using the `ha` CLI. -->
|
||||||
|
|
||||||
|
- **Operating System:**: xxx
|
||||||
|
- **Supervisor version:**: xxx
|
||||||
|
- **Home Assistant version**: xxx
|
||||||
|
|
||||||
|
### Supervisor logs
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Supervisor logs</summary>
|
||||||
<!--
|
<!--
|
||||||
Please provide details about your environment.
|
- Frontend -> Supervisor -> System
|
||||||
|
- Or use this command: ha supervisor logs
|
||||||
|
- Logs are more than just errors, even if you don't think it's important, it is.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
**Supervisor logs:**
|
```
|
||||||
|
Paste supervisor logs here
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### System Information
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>System Information</summary>
|
||||||
<!--
|
<!--
|
||||||
- Frontend -> Hass.io -> System
|
- Use this command: ha info
|
||||||
- Or use this command: hassio su logs
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
```
|
||||||
|
Paste system info here
|
||||||
|
|
||||||
**Description of problem:**
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
106
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
106
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
name: Bug Report Form
|
||||||
|
about: Report an issue related to the Home Assistant Supervisor.
|
||||||
|
labels: bug
|
||||||
|
title: ""
|
||||||
|
issue_body: true
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
This issue form is for reporting bugs with **supported** setups only!
|
||||||
|
|
||||||
|
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
||||||
|
|
||||||
|
[fr]: https://community.home-assistant.io/c/feature-requests
|
||||||
|
- type: textarea
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: Describe the issue you are experiencing
|
||||||
|
description: Provide a clear and concise description of what the bug is.
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
## Environment
|
||||||
|
- type: input
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: What is the used version of the Supervisor?
|
||||||
|
placeholder: supervisor-
|
||||||
|
description: >
|
||||||
|
Can be found in the Supervisor panel -> System tab. Starts with
|
||||||
|
`supervisor-....`.
|
||||||
|
- type: dropdown
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: What type of installation are you running?
|
||||||
|
description: >
|
||||||
|
If you don't know, you can find it in: Configuration panel -> Info.
|
||||||
|
options:
|
||||||
|
- Home Assistant OS
|
||||||
|
- Home Assistant Supervised
|
||||||
|
- type: dropdown
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: Which operating system are you running on?
|
||||||
|
options:
|
||||||
|
- Home Assistant Operating System
|
||||||
|
- Debian
|
||||||
|
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
|
||||||
|
- type: input
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: What is the version of your installed operating system?
|
||||||
|
placeholder: "5.11"
|
||||||
|
description: Can be found in the Supervisor panel -> System tab.
|
||||||
|
- type: input
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: What version of Home Assistant Core is installed?
|
||||||
|
placeholder: core-
|
||||||
|
description: >
|
||||||
|
Can be found in the Supervisor panel -> System tab. Starts with
|
||||||
|
`core-....`.
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
# Details
|
||||||
|
- type: textarea
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce the issue
|
||||||
|
description: |
|
||||||
|
Please tell us exactly how to reproduce your issue.
|
||||||
|
Provide clear and concise step by step instructions and add code snippets if needed.
|
||||||
|
value: |
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
...
|
||||||
|
- type: textarea
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: Anything in the Supervisor logs that might be useful for us?
|
||||||
|
description: >
|
||||||
|
The Supervisor logs can be found in the Supervisor panel -> System tab.
|
||||||
|
value: |
|
||||||
|
```txt
|
||||||
|
# Put your logs below this line
|
||||||
|
|
||||||
|
```
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
## Additional information
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
If you have any additional information for us, use the field below.
|
||||||
|
Please note, you can attach screenshots or screen recordings here.
|
25
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
25
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: Report a bug/issues with an unsupported Supervisor
|
||||||
|
url: https://community.home-assistant.io
|
||||||
|
about: The Community guide can help or was updated to solve your issue
|
||||||
|
|
||||||
|
- name: Report a bug for the Supervisor panel
|
||||||
|
url: https://github.com/home-assistant/frontend/issues
|
||||||
|
about: The Supervisor panel is a part of the Home Assistant frontend
|
||||||
|
|
||||||
|
- name: Report incorrect or missing information on our developer documentation
|
||||||
|
url: https://github.com/home-assistant/developers.home-assistant.io/issues
|
||||||
|
about: Our documentation has its own issue tracker. Please report issues with the website there.
|
||||||
|
|
||||||
|
- name: Request a feature for the Supervisor
|
||||||
|
url: https://community.home-assistant.io/c/feature-requests
|
||||||
|
about: Request an new feature for the Supervisor.
|
||||||
|
|
||||||
|
- name: I have a question or need support
|
||||||
|
url: https://www.home-assistant.io/help
|
||||||
|
about: We use GitHub for tracking bugs, check our website for resources on getting help.
|
||||||
|
|
||||||
|
- name: I'm unsure where to go?
|
||||||
|
url: https://www.home-assistant.io/join-chat
|
||||||
|
about: If you are unsure where to go, then joining our chat is recommended; Just ask!
|
69
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
69
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
<!--
|
||||||
|
You are amazing! Thanks for contributing to our project!
|
||||||
|
Please, DO NOT DELETE ANY TEXT from this template! (unless instructed).
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Proposed change
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Describe the big picture of your changes here to communicate to the
|
||||||
|
maintainers why we should accept this pull request. If it fixes a bug
|
||||||
|
or resolves a feature request, be sure to link to that issue in the
|
||||||
|
additional information section.
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Type of change
|
||||||
|
|
||||||
|
<!--
|
||||||
|
What type of change does your PR introduce to Home Assistant?
|
||||||
|
NOTE: Please, check only 1! box!
|
||||||
|
If your PR requires multiple boxes to be checked, you'll most likely need to
|
||||||
|
split it into multiple PRs. This makes things easier and faster to code review.
|
||||||
|
-->
|
||||||
|
|
||||||
|
- [ ] Dependency upgrade
|
||||||
|
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||||
|
- [ ] New feature (which adds functionality to the supervisor)
|
||||||
|
- [ ] Breaking change (fix/feature causing existing functionality to break)
|
||||||
|
- [ ] Code quality improvements to existing code or addition of tests
|
||||||
|
|
||||||
|
## Additional information
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Details are important, and help maintainers processing your PR.
|
||||||
|
Please be sure to fill out additional details, if applicable.
|
||||||
|
-->
|
||||||
|
|
||||||
|
- This PR fixes or closes issue: fixes #
|
||||||
|
- This PR is related to issue:
|
||||||
|
- Link to documentation pull request:
|
||||||
|
- Link to cli pull request:
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Put an `x` in the boxes that apply. You can also fill these out after
|
||||||
|
creating the PR. If you're unsure about any of them, don't hesitate to ask.
|
||||||
|
We're here to help! This is simply a reminder of what we are going to look
|
||||||
|
for before merging your code.
|
||||||
|
-->
|
||||||
|
|
||||||
|
- [ ] The code change is tested and works locally.
|
||||||
|
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||||
|
- [ ] There is no commented out code in this PR.
|
||||||
|
- [ ] I have followed the [development checklist][dev-checklist]
|
||||||
|
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
||||||
|
- [ ] Tests have been added to verify that the new code works.
|
||||||
|
|
||||||
|
If API endpoints of add-on configuration are added/changed:
|
||||||
|
|
||||||
|
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Thank you for contributing <3
|
||||||
|
|
||||||
|
Below, some useful links you could explore:
|
||||||
|
-->
|
||||||
|
|
||||||
|
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
||||||
|
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: pip
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "06:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "06:00"
|
||||||
|
open-pull-requests-limit: 10
|
47
.github/release-drafter.yml
vendored
47
.github/release-drafter.yml
vendored
@@ -1,4 +1,49 @@
|
|||||||
|
change-template: "- #$NUMBER $TITLE @$AUTHOR"
|
||||||
|
sort-direction: ascending
|
||||||
|
|
||||||
|
categories:
|
||||||
|
- title: ":boom: Breaking Changes"
|
||||||
|
label: "breaking-change"
|
||||||
|
|
||||||
|
- title: ":wrench: Build"
|
||||||
|
label: "build"
|
||||||
|
|
||||||
|
- title: ":boar: Chore"
|
||||||
|
label: "chore"
|
||||||
|
|
||||||
|
- title: ":sparkles: New Features"
|
||||||
|
label: "new-feature"
|
||||||
|
|
||||||
|
- title: ":zap: Performance"
|
||||||
|
label: "performance"
|
||||||
|
|
||||||
|
- title: ":recycle: Refactor"
|
||||||
|
label: "refactor"
|
||||||
|
|
||||||
|
- title: ":green_heart: CI"
|
||||||
|
label: "ci"
|
||||||
|
|
||||||
|
- title: ":bug: Bug Fixes"
|
||||||
|
label: "bugfix"
|
||||||
|
|
||||||
|
- title: ":white_check_mark: Test"
|
||||||
|
label: "test"
|
||||||
|
|
||||||
|
- title: ":arrow_up: Dependency Updates"
|
||||||
|
label: "dependencies"
|
||||||
|
|
||||||
|
include-labels:
|
||||||
|
- "breaking-change"
|
||||||
|
- "build"
|
||||||
|
- "chore"
|
||||||
|
- "performance"
|
||||||
|
- "refactor"
|
||||||
|
- "new-feature"
|
||||||
|
- "bugfix"
|
||||||
|
- "dependencies"
|
||||||
|
- "test"
|
||||||
|
- "ci"
|
||||||
|
|
||||||
template: |
|
template: |
|
||||||
## What's Changed
|
|
||||||
|
|
||||||
$CHANGES
|
$CHANGES
|
||||||
|
17
.github/stale.yml
vendored
17
.github/stale.yml
vendored
@@ -1,17 +0,0 @@
|
|||||||
# Number of days of inactivity before an issue becomes stale
|
|
||||||
daysUntilStale: 60
|
|
||||||
# Number of days of inactivity before a stale issue is closed
|
|
||||||
daysUntilClose: 7
|
|
||||||
# Issues with these labels will never be considered stale
|
|
||||||
exemptLabels:
|
|
||||||
- pinned
|
|
||||||
- security
|
|
||||||
# Label to use when marking an issue as stale
|
|
||||||
staleLabel: wontfix
|
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
|
||||||
markComment: >
|
|
||||||
This issue has been automatically marked as stale because it has not had
|
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
|
||||||
for your contributions.
|
|
||||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
|
||||||
closeComment: false
|
|
252
.github/workflows/builder.yml
vendored
Normal file
252
.github/workflows/builder.yml
vendored
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
name: Build supervisor
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
channel:
|
||||||
|
description: "Channel"
|
||||||
|
required: true
|
||||||
|
default: "dev"
|
||||||
|
version:
|
||||||
|
description: "Version"
|
||||||
|
required: true
|
||||||
|
publish:
|
||||||
|
description: "Publish"
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
stable:
|
||||||
|
description: "Stable"
|
||||||
|
required: true
|
||||||
|
default: "false"
|
||||||
|
pull_request:
|
||||||
|
branches: ["main"]
|
||||||
|
release:
|
||||||
|
types: ["published"]
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
paths:
|
||||||
|
- "rootfs/**"
|
||||||
|
- "supervisor/**"
|
||||||
|
- build.json
|
||||||
|
- Dockerfile
|
||||||
|
- requirements.txt
|
||||||
|
- setup.py
|
||||||
|
|
||||||
|
env:
|
||||||
|
BUILD_NAME: supervisor
|
||||||
|
BUILD_TYPE: supervisor
|
||||||
|
WHEELS_TAG: 3.8-alpine3.13
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
init:
|
||||||
|
name: Initialize build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
architectures: ${{ steps.info.outputs.architectures }}
|
||||||
|
version: ${{ steps.version.outputs.version }}
|
||||||
|
channel: ${{ steps.version.outputs.channel }}
|
||||||
|
publish: ${{ steps.version.outputs.publish }}
|
||||||
|
requirements: ${{ steps.requirements.outputs.changed }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout the repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Get information
|
||||||
|
id: info
|
||||||
|
uses: home-assistant/actions/helpers/info@master
|
||||||
|
|
||||||
|
- name: Get version
|
||||||
|
id: version
|
||||||
|
uses: home-assistant/actions/helpers/version@master
|
||||||
|
with:
|
||||||
|
type: ${{ env.BUILD_TYPE }}
|
||||||
|
|
||||||
|
- name: Get changed files
|
||||||
|
id: changed_files
|
||||||
|
if: steps.version.outputs.publish == 'false'
|
||||||
|
uses: jitterbit/get-changed-files@v1
|
||||||
|
|
||||||
|
- name: Check if requirements files changed
|
||||||
|
id: requirements
|
||||||
|
run: |
|
||||||
|
if [[ "${{ steps.changed_files.outputs.all }}" =~ requirements.txt ]]; then
|
||||||
|
echo "::set-output name=changed::true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: Build ${{ matrix.arch }} supervisor
|
||||||
|
needs: init
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout the repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Build wheels
|
||||||
|
if: needs.init.outputs.requirements == 'true'
|
||||||
|
uses: home-assistant/wheels@master
|
||||||
|
with:
|
||||||
|
tag: ${{ env.WHEELS_TAG }}
|
||||||
|
arch: ${{ matrix.arch }}
|
||||||
|
wheels-host: ${{ secrets.WHEELS_HOST }}
|
||||||
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
|
wheels-user: wheels
|
||||||
|
apk: "build-base;libffi-dev;openssl-dev;cargo"
|
||||||
|
skip-binary: aiohttp
|
||||||
|
requirements: "requirements.txt"
|
||||||
|
|
||||||
|
- name: Set version
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: home-assistant/actions/helpers/version@master
|
||||||
|
with:
|
||||||
|
type: ${{ env.BUILD_TYPE }}
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ secrets.GIT_USER }}
|
||||||
|
password: ${{ secrets.GIT_TOKEN }}
|
||||||
|
|
||||||
|
- name: Set build arguments
|
||||||
|
if: needs.init.outputs.publish == 'false'
|
||||||
|
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Build supervisor
|
||||||
|
uses: home-assistant/builder@2021.04.0
|
||||||
|
with:
|
||||||
|
args: |
|
||||||
|
$BUILD_ARGS \
|
||||||
|
--${{ matrix.arch }} \
|
||||||
|
--target /data \
|
||||||
|
--with-codenotary "${{ secrets.VCN_USER }}" "${{ secrets.VCN_PASSWORD }}" "${{ secrets.VCN_ORG }}" \
|
||||||
|
--validate-from "${{ secrets.VCN_ORG }}" \
|
||||||
|
--validate-cache "${{ secrets.VCN_ORG }}" \
|
||||||
|
--generic ${{ needs.init.outputs.version }}
|
||||||
|
|
||||||
|
codenotary:
|
||||||
|
name: CodeNotary signature
|
||||||
|
needs: init
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout the repository
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set version
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: home-assistant/actions/helpers/version@master
|
||||||
|
with:
|
||||||
|
type: ${{ env.BUILD_TYPE }}
|
||||||
|
|
||||||
|
- name: Signing image
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: home-assistant/actions/helpers/codenotary@master
|
||||||
|
with:
|
||||||
|
source: dir://${{ github.workspace }}
|
||||||
|
user: ${{ secrets.VCN_USER }}
|
||||||
|
password: ${{ secrets.VCN_PASSWORD }}
|
||||||
|
organisation: ${{ secrets.VCN_ORG }}
|
||||||
|
|
||||||
|
version:
|
||||||
|
name: Update version
|
||||||
|
needs: ["init", "run_supervisor"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout the repository
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Initialize git
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: home-assistant/actions/helpers/git-init@master
|
||||||
|
with:
|
||||||
|
name: ${{ secrets.GIT_NAME }}
|
||||||
|
email: ${{ secrets.GIT_EMAIL }}
|
||||||
|
token: ${{ secrets.GIT_TOKEN }}
|
||||||
|
|
||||||
|
- name: Update version file
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: home-assistant/actions/helpers/version-push@master
|
||||||
|
with:
|
||||||
|
key: ${{ env.BUILD_NAME }}
|
||||||
|
version: ${{ needs.init.outputs.version }}
|
||||||
|
channel: ${{ needs.init.outputs.channel }}
|
||||||
|
|
||||||
|
run_supervisor:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Run the Supervisor
|
||||||
|
needs: ["build", "codenotary"]
|
||||||
|
steps:
|
||||||
|
- name: Checkout the repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build the Supervisor
|
||||||
|
uses: home-assistant/builder@2021.04.0
|
||||||
|
with:
|
||||||
|
args: |
|
||||||
|
--test \
|
||||||
|
--amd64 \
|
||||||
|
--target /data \
|
||||||
|
--generic runner
|
||||||
|
|
||||||
|
- name: Create the Supervisor
|
||||||
|
run: |
|
||||||
|
mkdir -p /tmp/supervisor/data
|
||||||
|
docker create --name hassio_supervisor \
|
||||||
|
--privileged \
|
||||||
|
--security-opt seccomp=unconfined \
|
||||||
|
--security-opt apparmor:unconfined \
|
||||||
|
-v /run/docker.sock:/run/docker.sock \
|
||||||
|
-v /run/dbus:/run/dbus \
|
||||||
|
-v /tmp/supervisor/data:/data \
|
||||||
|
-v /etc/machine-id:/etc/machine-id:ro \
|
||||||
|
-e SUPERVISOR_SHARE="/tmp/supervisor/data" \
|
||||||
|
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||||
|
-e SUPERVISOR_DEV=1 \
|
||||||
|
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||||
|
homeassistant/amd64-hassio-supervisor:runner
|
||||||
|
|
||||||
|
- name: Start the Supervisor
|
||||||
|
run: docker start hassio_supervisor
|
||||||
|
|
||||||
|
- name: Wait for Supervisor to come up
|
||||||
|
run: |
|
||||||
|
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
|
||||||
|
ping="error"
|
||||||
|
while [ "$ping" != "ok" ]; do
|
||||||
|
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r .result)
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Check the Supervisor
|
||||||
|
run: |
|
||||||
|
echo "Checking supervisor info"
|
||||||
|
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r .result)
|
||||||
|
if [ "$test" != "ok" ];then
|
||||||
|
docker logs hassio_supervisor
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Checking supervisor network info"
|
||||||
|
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r .result)
|
||||||
|
if [ "$test" != "ok" ];then
|
||||||
|
docker logs hassio_supervisor
|
||||||
|
exit 1
|
||||||
|
fi
|
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
name: Check PR
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: ["main"]
|
||||||
|
types: [labeled, unlabeled, synchronize]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
init:
|
||||||
|
name: Check labels
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check labels
|
||||||
|
run: |
|
||||||
|
labels=$(jq -r '.pull_request.labels[] | .name' ${{github.event_path }})
|
||||||
|
echo "$labels"
|
||||||
|
if [ "$labels" == "cla-signed" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
435
.github/workflows/ci.yaml
vendored
Normal file
435
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,435 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request: ~
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEFAULT_PYTHON: 3.8
|
||||||
|
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Separate job to pre-populate the base dependency cache
|
||||||
|
# This prevent upcoming jobs to do the same individually
|
||||||
|
prepare:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.8]
|
||||||
|
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
id: python
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||||
|
- name: Create Python virtual environment
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
pip install -U pip setuptools
|
||||||
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pre-commit-
|
||||||
|
- name: Install pre-commit dependencies
|
||||||
|
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit install-hooks
|
||||||
|
|
||||||
|
lint-black:
|
||||||
|
name: Check black
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run black
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
black --target-version py38 --check supervisor tests setup.py
|
||||||
|
|
||||||
|
lint-dockerfile:
|
||||||
|
name: Check Dockerfile
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Register hadolint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
|
- name: Check Dockerfile
|
||||||
|
uses: docker://hadolint/hadolint:v1.18.0
|
||||||
|
with:
|
||||||
|
args: hadolint Dockerfile
|
||||||
|
|
||||||
|
lint-executable-shebangs:
|
||||||
|
name: Check executables
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register check executables problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||||
|
- name: Run executables check
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||||
|
|
||||||
|
lint-flake8:
|
||||||
|
name: Check flake8
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register flake8 problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||||
|
- name: Run flake8
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
flake8 supervisor tests
|
||||||
|
|
||||||
|
lint-isort:
|
||||||
|
name: Check isort
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run isort
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
lint-json:
|
||||||
|
name: Check JSON
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register check-json problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||||
|
- name: Run check-json
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual check-json --all-files
|
||||||
|
|
||||||
|
lint-pylint:
|
||||||
|
name: Check pylint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register pylint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||||
|
- name: Run pylint
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pylint supervisor tests
|
||||||
|
|
||||||
|
lint-pyupgrade:
|
||||||
|
name: Check pyupgrade
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run pyupgrade
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.8]
|
||||||
|
name: Run tests Python ${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Install CodeNotary
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
bash <(curl https://getvcn.codenotary.com -L)
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Install additional system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||||
|
- name: Register Python problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||||
|
- name: Install Pytest Annotation plugin
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
# Ideally this should be part of our dependencies
|
||||||
|
# However this plugin is fairly new and doesn't run correctly
|
||||||
|
# on a non-GitHub environment.
|
||||||
|
pip install pytest-github-actions-annotate-failures
|
||||||
|
- name: Run pytest
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pytest \
|
||||||
|
-qq \
|
||||||
|
--timeout=10 \
|
||||||
|
--durations=10 \
|
||||||
|
--cov supervisor \
|
||||||
|
-o console_output_style=count \
|
||||||
|
tests
|
||||||
|
- name: Upload coverage artifact
|
||||||
|
uses: actions/upload-artifact@v2.2.3
|
||||||
|
with:
|
||||||
|
name: coverage-${{ matrix.python-version }}
|
||||||
|
path: .coverage
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
name: Process test coverage
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pytest
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.2.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2.1.5
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Download all coverage artifacts
|
||||||
|
uses: actions/download-artifact@v2
|
||||||
|
- name: Combine coverage results
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
coverage combine coverage*/.coverage*
|
||||||
|
coverage report
|
||||||
|
coverage xml
|
||||||
|
- name: Upload coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v1.3.2
|
20
.github/workflows/lock.yml
vendored
Normal file
20
.github/workflows/lock.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
name: Lock
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * *"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lock:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: dessant/lock-threads@v2.0.3
|
||||||
|
with:
|
||||||
|
github-token: ${{ github.token }}
|
||||||
|
issue-lock-inactive-days: "30"
|
||||||
|
issue-exclude-created-before: "2020-10-01T00:00:00Z"
|
||||||
|
issue-lock-reason: ""
|
||||||
|
pr-lock-inactive-days: "1"
|
||||||
|
pr-exclude-created-before: "2020-11-01T00:00:00Z"
|
||||||
|
pr-lock-reason: ""
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "check-executables-have-shebangs",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):\\s(.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"message": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "check-json",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||||
|
"file": 1,
|
||||||
|
"message": 2,
|
||||||
|
"line": 3,
|
||||||
|
"column": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "flake8-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "flake8-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "hadolint",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"message": 3,
|
||||||
|
"code": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "pylint-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "pylint-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "python",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||||
|
"message": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
44
.github/workflows/release-drafter.yml
vendored
Normal file
44
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: Release Drafter
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update_release_draft:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Release Drafter
|
||||||
|
steps:
|
||||||
|
- name: Checkout the repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Find Next Version
|
||||||
|
id: version
|
||||||
|
run: |
|
||||||
|
declare -i newpost
|
||||||
|
latest=$(git describe --tags $(git rev-list --tags --max-count=1))
|
||||||
|
latestpre=$(echo "$latest" | awk '{split($0,a,"."); print a[1] "." a[2]}')
|
||||||
|
datepre=$(date --utc '+%Y.%m')
|
||||||
|
|
||||||
|
|
||||||
|
if [[ "$latestpre" == "$datepre" ]]; then
|
||||||
|
latestpost=$(echo "$latest" | awk '{split($0,a,"."); print a[3]}')
|
||||||
|
newpost=$latestpost+1
|
||||||
|
else
|
||||||
|
newpost=0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo Current version: $latest
|
||||||
|
echo New target version: $datepre.$newpost
|
||||||
|
echo "::set-output name=version::$datepre.$newpost"
|
||||||
|
|
||||||
|
- name: Run Release Drafter
|
||||||
|
uses: release-drafter/release-drafter@v5
|
||||||
|
with:
|
||||||
|
tag: ${{ steps.version.outputs.version }}
|
||||||
|
name: ${{ steps.version.outputs.version }}
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
21
.github/workflows/sentry.yaml
vendored
Normal file
21
.github/workflows/sentry.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
name: Sentry Release
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published, prereleased]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
createSentryRelease:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Sentry Release
|
||||||
|
uses: getsentry/action-release@v1.1
|
||||||
|
env:
|
||||||
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||||
|
with:
|
||||||
|
environment: production
|
39
.github/workflows/stale.yml
vendored
Normal file
39
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
name: Stale
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 * * * *"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v3.0.18
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
days-before-stale: 60
|
||||||
|
days-before-close: 7
|
||||||
|
stale-issue-label: "stale"
|
||||||
|
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
||||||
|
stale-issue-message: >
|
||||||
|
There hasn't been any activity on this issue recently. Due to the
|
||||||
|
high number of incoming GitHub notifications, we have to clean some
|
||||||
|
of the old issues, as many of them have already been resolved with
|
||||||
|
the latest updates.
|
||||||
|
|
||||||
|
Please make sure to update to the latest version and check if that
|
||||||
|
solves the issue. Let us know if that works for you by
|
||||||
|
adding a comment 👍
|
||||||
|
|
||||||
|
This issue has now been marked as stale and will be closed if no
|
||||||
|
further activity occurs. Thank you for your contributions.
|
||||||
|
|
||||||
|
stale-pr-label: "stale"
|
||||||
|
exempt-pr-labels: "no-stale,pinned,rfc,security"
|
||||||
|
stale-pr-message: >
|
||||||
|
There hasn't been any activity on this pull request recently. This
|
||||||
|
pull request has been automatically marked as stale because of that
|
||||||
|
and will be closed if no further activity occurs within 7 days.
|
||||||
|
|
||||||
|
Thank you for your contributions.
|
9
.gitignore
vendored
9
.gitignore
vendored
@@ -92,4 +92,11 @@ ENV/
|
|||||||
.pylint.d/
|
.pylint.d/
|
||||||
|
|
||||||
# VS Code
|
# VS Code
|
||||||
.vscode/
|
.vscode/*
|
||||||
|
!.vscode/cSpell.json
|
||||||
|
!.vscode/tasks.json
|
||||||
|
!.vscode/launch.json
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
/.mypy_cache/*
|
||||||
|
/.dmypy.json
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
ignored:
|
ignored:
|
||||||
- DL3018
|
- DL3003
|
||||||
- DL3006
|
- DL3006
|
||||||
- DL3013
|
- DL3013
|
||||||
|
- DL3018
|
||||||
- SC2155
|
- SC2155
|
||||||
|
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 20.8b1
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
args:
|
||||||
|
- --safe
|
||||||
|
- --quiet
|
||||||
|
- --target-version
|
||||||
|
- py38
|
||||||
|
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||||
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
|
rev: 3.8.3
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
additional_dependencies:
|
||||||
|
- flake8-docstrings==1.5.0
|
||||||
|
- pydocstyle==5.0.2
|
||||||
|
files: ^(supervisor|script|tests)/.+\.py$
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v3.1.0
|
||||||
|
hooks:
|
||||||
|
- id: check-executables-have-shebangs
|
||||||
|
stages: [manual]
|
||||||
|
- id: check-json
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-isort
|
||||||
|
rev: v4.3.21
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.6.2
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py37-plus]
|
21
.vcnignore
Normal file
21
.vcnignore
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
*.egg-info/
|
||||||
|
|
||||||
|
# General files
|
||||||
|
.git
|
||||||
|
.github
|
||||||
|
.devcontainer
|
||||||
|
.vscode
|
||||||
|
.tox
|
||||||
|
|
||||||
|
# Data
|
||||||
|
home-assistant-polymer/
|
||||||
|
script/
|
||||||
|
tests/
|
||||||
|
data/
|
||||||
|
venv/
|
18
.vscode/launch.json
vendored
Normal file
18
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "Supervisor remote debug",
|
||||||
|
"type": "python",
|
||||||
|
"request": "attach",
|
||||||
|
"port": 33333,
|
||||||
|
"host": "172.30.32.2",
|
||||||
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}",
|
||||||
|
"remoteRoot": "/usr/src/supervisor"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
104
.vscode/tasks.json
vendored
Normal file
104
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "Run Supervisor",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./scripts/run-supervisor.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Build Supervisor",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./scripts/build-supervisor.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Run Supervisor CLI",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Update Supervisor Panel",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./scripts/update-frontend.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pytest",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pytest --timeout=10 tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Flake8",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "flake8 supervisor tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pylint",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pylint supervisor",
|
||||||
|
"dependsOn": ["Install all Requirements"],
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
747
API.md
747
API.md
@@ -1,747 +0,0 @@
|
|||||||
# Hass.io
|
|
||||||
|
|
||||||
## Hass.io RESTful API
|
|
||||||
|
|
||||||
Interface for Home Assistant to control things from supervisor.
|
|
||||||
|
|
||||||
On error / Code 400:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"result": "error",
|
|
||||||
"message": ""
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
On success / Code 200:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"result": "ok",
|
|
||||||
"data": { }
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with environment `HASSIO_TOKEN`.
|
|
||||||
|
|
||||||
### Hass.io
|
|
||||||
|
|
||||||
- GET `/supervisor/ping`
|
|
||||||
|
|
||||||
This API call don't need a token.
|
|
||||||
|
|
||||||
- GET `/supervisor/info`
|
|
||||||
|
|
||||||
The addons from `addons` are only installed one.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "INSTALL_VERSION",
|
|
||||||
"last_version": "LAST_VERSION",
|
|
||||||
"arch": "armhf|aarch64|i386|amd64",
|
|
||||||
"channel": "stable|beta|dev",
|
|
||||||
"timezone": "TIMEZONE",
|
|
||||||
"logging": "debug|info|warning|error|critical",
|
|
||||||
"ip_address": "ip address",
|
|
||||||
"wait_boot": "int",
|
|
||||||
"debug": "bool",
|
|
||||||
"debug_block": "bool",
|
|
||||||
"addons": [
|
|
||||||
{
|
|
||||||
"name": "xy bla",
|
|
||||||
"slug": "xy",
|
|
||||||
"description": "description",
|
|
||||||
"repository": "12345678|null",
|
|
||||||
"version": "LAST_VERSION",
|
|
||||||
"installed": "INSTALL_VERSION",
|
|
||||||
"icon": "bool",
|
|
||||||
"logo": "bool",
|
|
||||||
"state": "started|stopped",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"addons_repositories": [
|
|
||||||
"REPO_URL"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/supervisor/update`
|
|
||||||
|
|
||||||
Optional:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "VERSION"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/supervisor/options`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"channel": "stable|beta|dev",
|
|
||||||
"timezone": "TIMEZONE",
|
|
||||||
"wait_boot": "int",
|
|
||||||
"debug": "bool",
|
|
||||||
"debug_block": "bool",
|
|
||||||
"logging": "debug|info|warning|error|critical",
|
|
||||||
"addons_repositories": [
|
|
||||||
"REPO_URL"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/supervisor/reload`
|
|
||||||
|
|
||||||
Reload addons/version.
|
|
||||||
|
|
||||||
- GET `/supervisor/logs`
|
|
||||||
|
|
||||||
Output is the raw docker log.
|
|
||||||
|
|
||||||
- GET `/supervisor/stats`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"cpu_percent": 0.0,
|
|
||||||
"memory_usage": 283123,
|
|
||||||
"memory_limit": 329392,
|
|
||||||
"network_tx": 0,
|
|
||||||
"network_rx": 0,
|
|
||||||
"blk_read": 0,
|
|
||||||
"blk_write": 0
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Snapshot
|
|
||||||
|
|
||||||
- GET `/snapshots`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"snapshots": [
|
|
||||||
{
|
|
||||||
"slug": "SLUG",
|
|
||||||
"date": "ISO",
|
|
||||||
"name": "Custom name",
|
|
||||||
"type": "full|partial",
|
|
||||||
"protected": "bool"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/snapshots/reload`
|
|
||||||
|
|
||||||
- POST `/snapshots/new/upload`
|
|
||||||
|
|
||||||
return:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"slug": ""
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/snapshots/new/full`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"name": "Optional",
|
|
||||||
"password": "Optional"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
return:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"slug": ""
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/snapshots/new/partial`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"name": "Optional",
|
|
||||||
"addons": ["ADDON_SLUG"],
|
|
||||||
"folders": ["FOLDER_NAME"],
|
|
||||||
"password": "Optional"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
return:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"slug": ""
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/snapshots/reload`
|
|
||||||
|
|
||||||
- GET `/snapshots/{slug}/info`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"slug": "SNAPSHOT ID",
|
|
||||||
"type": "full|partial",
|
|
||||||
"name": "custom snapshot name / description",
|
|
||||||
"date": "ISO",
|
|
||||||
"size": "SIZE_IN_MB",
|
|
||||||
"protected": "bool",
|
|
||||||
"homeassistant": "version",
|
|
||||||
"addons": [
|
|
||||||
{
|
|
||||||
"slug": "ADDON_SLUG",
|
|
||||||
"name": "NAME",
|
|
||||||
"version": "INSTALLED_VERSION",
|
|
||||||
"size": "SIZE_IN_MB"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"repositories": ["URL"],
|
|
||||||
"folders": ["NAME"]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/remove`
|
|
||||||
|
|
||||||
- GET `/snapshots/{slug}/download`
|
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/full`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"password": "Optional"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/partial`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"homeassistant": "bool",
|
|
||||||
"addons": ["ADDON_SLUG"],
|
|
||||||
"folders": ["FOLDER_NAME"],
|
|
||||||
"password": "Optional"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Host
|
|
||||||
|
|
||||||
- POST `/host/reload`
|
|
||||||
|
|
||||||
- POST `/host/shutdown`
|
|
||||||
|
|
||||||
- POST `/host/reboot`
|
|
||||||
|
|
||||||
- GET `/host/info`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"hostname": "hostname|null",
|
|
||||||
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
|
||||||
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
|
||||||
"kernel": "4.15.7|null",
|
|
||||||
"chassis": "specific|null",
|
|
||||||
"deployment": "stable|beta|dev|null",
|
|
||||||
"cpe": "xy|null",
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/host/options`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"hostname": "",
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/host/reload`
|
|
||||||
|
|
||||||
#### Services
|
|
||||||
|
|
||||||
- GET `/host/services`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"services": [
|
|
||||||
{
|
|
||||||
"name": "xy.service",
|
|
||||||
"description": "XY ...",
|
|
||||||
"state": "active|"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/host/service/{unit}/stop`
|
|
||||||
|
|
||||||
- POST `/host/service/{unit}/start`
|
|
||||||
|
|
||||||
- POST `/host/service/{unit}/reload`
|
|
||||||
|
|
||||||
### HassOS
|
|
||||||
|
|
||||||
- GET `/hassos/info`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "2.3",
|
|
||||||
"version_cli": "7",
|
|
||||||
"version_latest": "2.4",
|
|
||||||
"version_cli_latest": "8",
|
|
||||||
"board": "ova|rpi"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/hassos/update`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "optional"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/hassos/update/cli`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "optional"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/hassos/config/sync`
|
|
||||||
|
|
||||||
Load host configs from a USB stick.
|
|
||||||
|
|
||||||
### Hardware
|
|
||||||
|
|
||||||
- GET `/hardware/info`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"serial": ["/dev/xy"],
|
|
||||||
"input": ["Input device name"],
|
|
||||||
"disk": ["/dev/sdax"],
|
|
||||||
"gpio": ["gpiochip0", "gpiochip100"],
|
|
||||||
"audio": {
|
|
||||||
"CARD_ID": {
|
|
||||||
"name": "xy",
|
|
||||||
"type": "microphone",
|
|
||||||
"devices": [
|
|
||||||
"chan_id": "channel ID",
|
|
||||||
"chan_type": "type of device"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- GET `/hardware/audio`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"audio": {
|
|
||||||
"input": {
|
|
||||||
"0,0": "Mic"
|
|
||||||
},
|
|
||||||
"output": {
|
|
||||||
"1,0": "Jack",
|
|
||||||
"1,1": "HDMI"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Home Assistant
|
|
||||||
|
|
||||||
- GET `/homeassistant/info`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "INSTALL_VERSION",
|
|
||||||
"last_version": "LAST_VERSION",
|
|
||||||
"arch": "arch",
|
|
||||||
"machine": "Image machine type",
|
|
||||||
"ip_address": "ip address",
|
|
||||||
"image": "str",
|
|
||||||
"custom": "bool -> if custom image",
|
|
||||||
"boot": "bool",
|
|
||||||
"port": 8123,
|
|
||||||
"ssl": "bool",
|
|
||||||
"watchdog": "bool",
|
|
||||||
"wait_boot": 600
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/homeassistant/update`
|
|
||||||
|
|
||||||
Optional:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "VERSION"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- GET `/homeassistant/logs`
|
|
||||||
|
|
||||||
Output is the raw Docker log.
|
|
||||||
|
|
||||||
- POST `/homeassistant/restart`
|
|
||||||
- POST `/homeassistant/check`
|
|
||||||
- POST `/homeassistant/start`
|
|
||||||
- POST `/homeassistant/stop`
|
|
||||||
- POST `/homeassistant/rebuild`
|
|
||||||
|
|
||||||
- POST `/homeassistant/options`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"image": "Optional|null",
|
|
||||||
"last_version": "Optional for custom image|null",
|
|
||||||
"port": "port for access hass",
|
|
||||||
"ssl": "bool",
|
|
||||||
"password": "",
|
|
||||||
"refresh_token": "",
|
|
||||||
"watchdog": "bool",
|
|
||||||
"wait_boot": 600
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Image with `null` and last_version with `null` reset this options.
|
|
||||||
|
|
||||||
- POST/GET `/homeassistant/api`
|
|
||||||
|
|
||||||
Proxy to real home-assistant instance.
|
|
||||||
|
|
||||||
- GET `/homeassistant/websocket`
|
|
||||||
|
|
||||||
Proxy to real websocket instance.
|
|
||||||
|
|
||||||
- GET `/homeassistant/stats`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"cpu_percent": 0.0,
|
|
||||||
"memory_usage": 283123,
|
|
||||||
"memory_limit": 329392,
|
|
||||||
"network_tx": 0,
|
|
||||||
"network_rx": 0,
|
|
||||||
"blk_read": 0,
|
|
||||||
"blk_write": 0
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### RESTful for API addons
|
|
||||||
|
|
||||||
If an add-on will call itself, you can use `/addons/self/...`.
|
|
||||||
|
|
||||||
- GET `/addons`
|
|
||||||
|
|
||||||
Get all available addons.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"addons": [
|
|
||||||
{
|
|
||||||
"name": "xy bla",
|
|
||||||
"slug": "xy",
|
|
||||||
"description": "description",
|
|
||||||
"repository": "core|local|REP_ID",
|
|
||||||
"version": "LAST_VERSION",
|
|
||||||
"installed": "none|INSTALL_VERSION",
|
|
||||||
"detached": "bool",
|
|
||||||
"available": "bool",
|
|
||||||
"build": "bool",
|
|
||||||
"url": "null|url",
|
|
||||||
"icon": "bool",
|
|
||||||
"logo": "bool"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"repositories": [
|
|
||||||
{
|
|
||||||
"slug": "12345678",
|
|
||||||
"name": "Repitory Name|unknown",
|
|
||||||
"source": "URL_OF_REPOSITORY",
|
|
||||||
"url": "WEBSITE|REPOSITORY",
|
|
||||||
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/addons/reload`
|
|
||||||
- GET `/addons/{addon}/info`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"name": "xy bla",
|
|
||||||
"slug": "xdssd_xybla",
|
|
||||||
"description": "description",
|
|
||||||
"long_description": "null|markdown",
|
|
||||||
"auto_update": "bool",
|
|
||||||
"url": "null|url of addon",
|
|
||||||
"detached": "bool",
|
|
||||||
"available": "bool",
|
|
||||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
|
||||||
"machine": "[raspberrypi2, tinker]",
|
|
||||||
"homeassistant": "null|min Home Assistant version",
|
|
||||||
"repository": "12345678|null",
|
|
||||||
"version": "null|VERSION_INSTALLED",
|
|
||||||
"last_version": "LAST_VERSION",
|
|
||||||
"state": "none|started|stopped",
|
|
||||||
"boot": "auto|manual",
|
|
||||||
"build": "bool",
|
|
||||||
"options": "{}",
|
|
||||||
"network": "{}|null",
|
|
||||||
"network_description": "{}|null",
|
|
||||||
"host_network": "bool",
|
|
||||||
"host_pid": "bool",
|
|
||||||
"host_ipc": "bool",
|
|
||||||
"host_dbus": "bool",
|
|
||||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
|
||||||
"apparmor": "disable|default|profile",
|
|
||||||
"devices": ["/dev/xy"],
|
|
||||||
"auto_uart": "bool",
|
|
||||||
"icon": "bool",
|
|
||||||
"logo": "bool",
|
|
||||||
"changelog": "bool",
|
|
||||||
"hassio_api": "bool",
|
|
||||||
"hassio_role": "default|homeassistant|manager|admin",
|
|
||||||
"homeassistant_api": "bool",
|
|
||||||
"auth_api": "bool",
|
|
||||||
"full_access": "bool",
|
|
||||||
"protected": "bool",
|
|
||||||
"rating": "1-6",
|
|
||||||
"stdin": "bool",
|
|
||||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
|
||||||
"gpio": "bool",
|
|
||||||
"kernel_modules": "bool",
|
|
||||||
"devicetree": "bool",
|
|
||||||
"docker_api": "bool",
|
|
||||||
"audio": "bool",
|
|
||||||
"audio_input": "null|0,0",
|
|
||||||
"audio_output": "null|0,0",
|
|
||||||
"services_role": "['service:access']",
|
|
||||||
"discovery": "['service']",
|
|
||||||
"ip_address": "ip address",
|
|
||||||
"ingress": "bool",
|
|
||||||
"ingress_entry": "null|/api/hassio_ingress/slug",
|
|
||||||
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
|
||||||
"ingress_port": "null|int",
|
|
||||||
"ingress_panel": "null|bool"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- GET `/addons/{addon}/icon`
|
|
||||||
|
|
||||||
- GET `/addons/{addon}/logo`
|
|
||||||
|
|
||||||
- GET `/addons/{addon}/changelog`
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/options`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"boot": "auto|manual",
|
|
||||||
"auto_update": "bool",
|
|
||||||
"network": {
|
|
||||||
"CONTAINER": "port|[ip, port]"
|
|
||||||
},
|
|
||||||
"options": {},
|
|
||||||
"audio_output": "null|0,0",
|
|
||||||
"audio_input": "null|0,0",
|
|
||||||
"ingress_panel": "bool"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Reset custom network/audio/options, set it `null`.
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/security`
|
|
||||||
|
|
||||||
This function is not callable by itself.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"protected": "bool",
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/start`
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/stop`
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/install`
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/uninstall`
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/update`
|
|
||||||
|
|
||||||
- GET `/addons/{addon}/logs`
|
|
||||||
|
|
||||||
Output is the raw Docker log.
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/restart`
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/rebuild`
|
|
||||||
|
|
||||||
Only supported for local build addons
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/stdin`
|
|
||||||
|
|
||||||
Write data to add-on stdin
|
|
||||||
|
|
||||||
- GET `/addons/{addon}/stats`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"cpu_percent": 0.0,
|
|
||||||
"memory_usage": 283123,
|
|
||||||
"memory_limit": 329392,
|
|
||||||
"network_tx": 0,
|
|
||||||
"network_rx": 0,
|
|
||||||
"blk_read": 0,
|
|
||||||
"blk_write": 0
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### ingress
|
|
||||||
|
|
||||||
- POST `/ingress/session`
|
|
||||||
|
|
||||||
Create a new Session for access to ingress service.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"session": "token"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- GET `/ingress/panels`
|
|
||||||
|
|
||||||
Return a list of enabled panels.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"panels": {
|
|
||||||
"addon_slug": {
|
|
||||||
"enable": "boolean",
|
|
||||||
"icon": "mdi:...",
|
|
||||||
"title": "title",
|
|
||||||
"admin": "boolean"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- VIEW `/ingress/{token}`
|
|
||||||
|
|
||||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
|
||||||
Need ingress session as cookie.
|
|
||||||
|
|
||||||
### discovery
|
|
||||||
|
|
||||||
- GET `/discovery`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"discovery": [
|
|
||||||
{
|
|
||||||
"addon": "slug",
|
|
||||||
"service": "name",
|
|
||||||
"uuid": "uuid",
|
|
||||||
"config": {}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- GET `/discovery/{UUID}`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"addon": "slug",
|
|
||||||
"service": "name",
|
|
||||||
"uuid": "uuid",
|
|
||||||
"config": {}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/discovery`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"service": "name",
|
|
||||||
"config": {}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
return:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"uuid": "uuid"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- DEL `/discovery/{UUID}`
|
|
||||||
|
|
||||||
### Services
|
|
||||||
|
|
||||||
- GET `/services`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"services": [
|
|
||||||
{
|
|
||||||
"slug": "name",
|
|
||||||
"available": "bool",
|
|
||||||
"providers": "list"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### MQTT
|
|
||||||
|
|
||||||
- GET `/services/mqtt`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"addon": "name",
|
|
||||||
"host": "xy",
|
|
||||||
"port": "8883",
|
|
||||||
"ssl": "bool",
|
|
||||||
"username": "optional",
|
|
||||||
"password": "optional",
|
|
||||||
"protocol": "3.1.1"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/services/mqtt`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"host": "xy",
|
|
||||||
"port": "8883",
|
|
||||||
"ssl": "bool|optional",
|
|
||||||
"username": "optional",
|
|
||||||
"password": "optional",
|
|
||||||
"protocol": "3.1.1"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- DEL `/services/mqtt`
|
|
||||||
|
|
||||||
### Misc
|
|
||||||
|
|
||||||
- GET `/info`
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"supervisor": "version",
|
|
||||||
"homeassistant": "version",
|
|
||||||
"hassos": "null|version",
|
|
||||||
"hostname": "name",
|
|
||||||
"machine": "type",
|
|
||||||
"arch": "arch",
|
|
||||||
"supported_arch": ["arch1", "arch2"],
|
|
||||||
"channel": "stable|beta|dev",
|
|
||||||
"logging": "debug|info|warning|error|critical"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Auth / SSO API
|
|
||||||
|
|
||||||
You can use the user system on homeassistant. We handle this auth system on
|
|
||||||
supervisor.
|
|
||||||
|
|
||||||
You can call post `/auth`
|
|
||||||
|
|
||||||
We support:
|
|
||||||
- Json `{ "user|name": "...", "password": "..." }`
|
|
||||||
- application/x-www-form-urlencoded `user|name=...&password=...`
|
|
||||||
- BasicAuth
|
|
82
Dockerfile
82
Dockerfile
@@ -1,28 +1,72 @@
|
|||||||
ARG BUILD_FROM
|
ARG BUILD_FROM
|
||||||
FROM $BUILD_FROM
|
FROM ${BUILD_FROM}
|
||||||
|
|
||||||
|
ENV \
|
||||||
|
S6_SERVICES_GRACETIME=10000 \
|
||||||
|
SUPERVISOR_API=http://localhost
|
||||||
|
|
||||||
ARG BUILD_ARCH
|
ARG BUILD_ARCH
|
||||||
|
ARG VCN_VERSION
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
# Install base
|
# Install base
|
||||||
RUN apk add --no-cache \
|
RUN \
|
||||||
openssl \
|
set -x \
|
||||||
libffi \
|
&& apk add --no-cache \
|
||||||
musl \
|
eudev \
|
||||||
git \
|
eudev-libs \
|
||||||
socat \
|
git \
|
||||||
glib \
|
glib \
|
||||||
libstdc++ \
|
libffi \
|
||||||
eudev-libs
|
libpulse \
|
||||||
|
musl \
|
||||||
|
openssl \
|
||||||
|
&& apk add --no-cache --virtual .build-dependencies \
|
||||||
|
build-base \
|
||||||
|
go \
|
||||||
|
\
|
||||||
|
&& git clone -b v${VCN_VERSION} --depth 1 \
|
||||||
|
https://github.com/codenotary/vcn \
|
||||||
|
&& cd vcn \
|
||||||
|
\
|
||||||
|
# Fix: https://github.com/codenotary/vcn/issues/131
|
||||||
|
&& go get github.com/codenotary/immudb@4cf9e2ae06ac2e6ec98a60364c3de3eab5524757 \
|
||||||
|
\
|
||||||
|
&& if [ "${BUILD_ARCH}" = "armhf" ]; then \
|
||||||
|
GOARM=6 GOARCH=arm go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
|
||||||
|
elif [ "${BUILD_ARCH}" = "armv7" ]; then \
|
||||||
|
GOARM=7 GOARCH=arm go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
|
||||||
|
elif [ "${BUILD_ARCH}" = "aarch64" ]; then \
|
||||||
|
GOARCH=arm64 go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
|
||||||
|
elif [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||||
|
GOARCH=386 go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
|
||||||
|
elif [ "${BUILD_ARCH}" = "amd64" ]; then \
|
||||||
|
GOARCH=amd64 go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
|
||||||
|
else \
|
||||||
|
exit 1; \
|
||||||
|
fi \
|
||||||
|
\
|
||||||
|
&& rm -rf /root/go /root/.cache \
|
||||||
|
&& mv vcn /usr/bin/vcn \
|
||||||
|
\
|
||||||
|
&& apk del .build-dependencies \
|
||||||
|
&& rm -rf /usr/src/vcn
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt /usr/src/
|
COPY requirements.txt .
|
||||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
RUN \
|
||||||
&& pip3 install --no-cache-dir --find-links https://wheels.hass.io/alpine-3.9/${BUILD_ARCH}/ \
|
export MAKEFLAGS="-j$(nproc)" \
|
||||||
-r /usr/src/requirements.txt \
|
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||||
&& rm -f /usr/src/requirements.txt
|
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||||
|
-r ./requirements.txt \
|
||||||
|
&& rm -f requirements.txt
|
||||||
|
|
||||||
# Install HassIO
|
# Install Home Assistant Supervisor
|
||||||
COPY . /usr/src/hassio
|
COPY . supervisor
|
||||||
RUN pip3 install --no-cache-dir -e /usr/src/hassio
|
RUN \
|
||||||
|
pip3 install --no-cache-dir -e ./supervisor \
|
||||||
|
&& python3 -m compileall ./supervisor/supervisor
|
||||||
|
|
||||||
CMD [ "python3", "-m", "hassio" ]
|
|
||||||
|
WORKDIR /
|
||||||
|
COPY rootfs /
|
||||||
|
4
LICENSE
4
LICENSE
@@ -178,7 +178,7 @@
|
|||||||
APPENDIX: How to apply the Apache License to your work.
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
To apply the Apache License to your work, attach the following
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
replaced with your own identifying information. (Don't include
|
replaced with your own identifying information. (Don't include
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
comment syntax for the file format. We also recommend that a
|
comment syntax for the file format. We also recommend that a
|
||||||
@@ -186,7 +186,7 @@
|
|||||||
same "printed page" as the copyright notice for easier
|
same "printed page" as the copyright notice for easier
|
||||||
identification within third-party archives.
|
identification within third-party archives.
|
||||||
|
|
||||||
Copyright 2017 Pascal Vizeli
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
|
@@ -1,3 +1,3 @@
|
|||||||
include LICENSE.md
|
include LICENSE.md
|
||||||
graft hassio
|
graft supervisor
|
||||||
recursive-exclude * *.py[co]
|
recursive-exclude * *.py[co]
|
||||||
|
40
README.md
40
README.md
@@ -1,30 +1,32 @@
|
|||||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
# Home Assistant Supervisor
|
||||||
|
|
||||||
# Hass.io
|
|
||||||
|
|
||||||
## First private cloud solution for home automation
|
## First private cloud solution for home automation
|
||||||
|
|
||||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
Home Assistant (former Hass.io) is a container-based system for managing your
|
||||||
and related applications. The system is controlled via Home Assistant which
|
Home Assistant Core installation and related applications. The system is
|
||||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
controlled via Home Assistant which communicates with the Supervisor. The
|
||||||
installation. This includes changing network settings or installing
|
Supervisor provides an API to manage the installation. This includes changing
|
||||||
and updating software.
|
network settings or installing and updating software.
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
Installation instructions can be found at https://home-assistant.io/getting-started.
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
For small changes and bugfixes you can just follow this, but for significant changes open a RFC first.
|
||||||
|
Development instructions can be found [here][development].
|
||||||
|
|
||||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
## Release
|
||||||
- Go into a HassOS device or VM and pull your supervisor.
|
|
||||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
|
||||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
|
||||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
|
||||||
- Test your changes
|
|
||||||
|
|
||||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
Releases are done in 3 stages (channels) with this structure:
|
||||||
|
|
||||||
|
1. Pull requests are merged to the `main` branch.
|
||||||
|
2. A new build is pushed to the `dev` stage.
|
||||||
|
3. Releases are published.
|
||||||
|
4. A new build is pushed to the `beta` stage.
|
||||||
|
5. The [`stable.json`][stable] file is updated.
|
||||||
|
6. The build that was pushed to `beta` will now be pushed to `stable`.
|
||||||
|
|
||||||
|
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||||
|
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||||
|
@@ -1,156 +0,0 @@
|
|||||||
# https://dev.azure.com/home-assistant
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- master
|
|
||||||
- dev
|
|
||||||
tags:
|
|
||||||
include:
|
|
||||||
- '*'
|
|
||||||
exclude:
|
|
||||||
- untagged*
|
|
||||||
pr:
|
|
||||||
- dev
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- name: versionHadolint
|
|
||||||
value: 'v1.16.3'
|
|
||||||
- name: versionBuilder
|
|
||||||
value: '3.2'
|
|
||||||
- name: versionWheels
|
|
||||||
value: '0.3'
|
|
||||||
- group: docker
|
|
||||||
- group: wheels
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
- job: 'Tox'
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
steps:
|
|
||||||
- task: UsePythonVersion@0
|
|
||||||
displayName: 'Use Python $(python.version)'
|
|
||||||
inputs:
|
|
||||||
versionSpec: '3.7'
|
|
||||||
- script: pip install tox
|
|
||||||
displayName: 'Install Tox'
|
|
||||||
- script: tox
|
|
||||||
displayName: 'Run Tox'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'JQ'
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
steps:
|
|
||||||
- script: sudo apt-get install -y jq
|
|
||||||
displayName: 'Install JQ'
|
|
||||||
- bash: |
|
|
||||||
shopt -s globstar
|
|
||||||
cat **/*.json | jq '.'
|
|
||||||
displayName: 'Run JQ'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'Hadolint'
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
steps:
|
|
||||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
|
||||||
displayName: 'Install Hadolint'
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm -i \
|
|
||||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
|
||||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
|
||||||
displayName: 'Run Hadolint'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'Wheels'
|
|
||||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
|
||||||
timeoutInMinutes: 360
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
strategy:
|
|
||||||
maxParallel: 3
|
|
||||||
matrix:
|
|
||||||
amd64:
|
|
||||||
buildArch: 'amd64'
|
|
||||||
i386:
|
|
||||||
buildArch: 'i386'
|
|
||||||
armhf:
|
|
||||||
buildArch: 'armhf'
|
|
||||||
armv7:
|
|
||||||
buildArch: 'armv7'
|
|
||||||
aarch64:
|
|
||||||
buildArch: 'aarch64'
|
|
||||||
steps:
|
|
||||||
- script: |
|
|
||||||
sudo apt-get install -y --no-install-recommends \
|
|
||||||
qemu-user-static \
|
|
||||||
binfmt-support
|
|
||||||
|
|
||||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
|
||||||
sudo update-binfmts --enable qemu-arm
|
|
||||||
sudo update-binfmts --enable qemu-aarch64
|
|
||||||
displayName: 'Initial cross build'
|
|
||||||
- script: |
|
|
||||||
mkdir -p .ssh
|
|
||||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
|
||||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
|
||||||
chmod 600 .ssh/*
|
|
||||||
displayName: 'Install ssh key'
|
|
||||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
|
||||||
displayName: 'Install wheels builder'
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
|
||||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
|
||||||
--apk "build-base;libffi-dev;openssl-dev" \
|
|
||||||
--index https://wheels.hass.io \
|
|
||||||
--requirement requirements.txt \
|
|
||||||
--upload rsync \
|
|
||||||
--remote wheels@$(wheelsHost):/opt/wheels
|
|
||||||
displayName: 'Run wheels build'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'ReleaseDEV'
|
|
||||||
condition: and(eq(variables['Build.SourceBranchName'], 'dev'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('Wheels'))
|
|
||||||
dependsOn:
|
|
||||||
- 'JQ'
|
|
||||||
- 'Tox'
|
|
||||||
- 'Hadolint'
|
|
||||||
- 'Wheels'
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
steps:
|
|
||||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
|
||||||
displayName: 'Docker hub login'
|
|
||||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
|
||||||
displayName: 'Install Builder'
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm --privileged \
|
|
||||||
-v ~/.docker:/root/.docker \
|
|
||||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
|
||||||
homeassistant/amd64-builder:$(versionBuilder) \
|
|
||||||
--supervisor --all -t /data --version dev --docker-hub homeassistant
|
|
||||||
displayName: 'Build DEV'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'Release'
|
|
||||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'))
|
|
||||||
dependsOn:
|
|
||||||
- 'JQ'
|
|
||||||
- 'Tox'
|
|
||||||
- 'Hadolint'
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
steps:
|
|
||||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
|
||||||
displayName: 'Docker hub login'
|
|
||||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
|
||||||
displayName: 'Install Builder'
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm --privileged \
|
|
||||||
-v ~/.docker:/root/.docker \
|
|
||||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
|
||||||
homeassistant/amd64-builder:$(versionBuilder) \
|
|
||||||
--supervisor --all -t /data --docker-hub homeassistant
|
|
||||||
displayName: 'Build Release'
|
|
18
build.json
Normal file
18
build.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||||
|
"shadow_repository": "ghcr.io/home-assistant",
|
||||||
|
"build_from": {
|
||||||
|
"aarch64": "ghcr.io/home-assistant/aarch64-base-python:3.8-alpine3.13",
|
||||||
|
"armhf": "ghcr.io/home-assistant/armhf-base-python:3.8-alpine3.13",
|
||||||
|
"armv7": "ghcr.io/home-assistant/armv7-base-python:3.8-alpine3.13",
|
||||||
|
"amd64": "ghcr.io/home-assistant/amd64-base-python:3.8-alpine3.13",
|
||||||
|
"i386": "ghcr.io/home-assistant/i386-base-python:3.8-alpine3.13"
|
||||||
|
},
|
||||||
|
"args": {
|
||||||
|
"VCN_VERSION": "0.9.4"
|
||||||
|
},
|
||||||
|
"labels": {
|
||||||
|
"io.hass.type": "supervisor",
|
||||||
|
"org.opencontainers.image.source": "https://github.com/home-assistant/supervisor"
|
||||||
|
}
|
||||||
|
}
|
11
codecov.yaml
Normal file
11
codecov.yaml
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
codecov:
|
||||||
|
branch: dev
|
||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
target: 40
|
||||||
|
threshold: 0.09
|
||||||
|
comment: false
|
||||||
|
github_checks:
|
||||||
|
annotations: false
|
@@ -1 +0,0 @@
|
|||||||
"""Init file for Hass.io."""
|
|
@@ -1,61 +0,0 @@
|
|||||||
"""Main file for Hass.io."""
|
|
||||||
import asyncio
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from hassio import bootstrap
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def initialize_event_loop():
|
|
||||||
"""Attempt to use uvloop."""
|
|
||||||
try:
|
|
||||||
import uvloop
|
|
||||||
|
|
||||||
uvloop.install()
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return asyncio.get_event_loop()
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
if __name__ == "__main__":
|
|
||||||
bootstrap.initialize_logging()
|
|
||||||
|
|
||||||
# Init async event loop
|
|
||||||
loop = initialize_event_loop()
|
|
||||||
|
|
||||||
# Check if all information are available to setup Hass.io
|
|
||||||
if not bootstrap.check_environment():
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# init executor pool
|
|
||||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
|
||||||
loop.set_default_executor(executor)
|
|
||||||
|
|
||||||
_LOGGER.info("Initialize Hass.io setup")
|
|
||||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
|
||||||
|
|
||||||
bootstrap.migrate_system_env(coresys)
|
|
||||||
bootstrap.supervisor_debugger(coresys)
|
|
||||||
|
|
||||||
_LOGGER.info("Setup HassIO")
|
|
||||||
loop.run_until_complete(coresys.core.setup())
|
|
||||||
|
|
||||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
|
||||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
|
||||||
|
|
||||||
try:
|
|
||||||
_LOGGER.info("Run Hass.io")
|
|
||||||
loop.run_forever()
|
|
||||||
finally:
|
|
||||||
_LOGGER.info("Stopping Hass.io")
|
|
||||||
loop.run_until_complete(coresys.core.stop())
|
|
||||||
executor.shutdown(wait=False)
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
_LOGGER.info("Close Hass.io")
|
|
||||||
sys.exit(0)
|
|
@@ -1,251 +0,0 @@
|
|||||||
"""Init file for Hass.io add-ons."""
|
|
||||||
import asyncio
|
|
||||||
from contextlib import suppress
|
|
||||||
import logging
|
|
||||||
import tarfile
|
|
||||||
from typing import Dict, List, Optional, Union
|
|
||||||
|
|
||||||
from ..const import BOOT_AUTO, STATE_STARTED
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
|
||||||
from ..exceptions import (
|
|
||||||
AddonsError,
|
|
||||||
AddonsNotSupportedError,
|
|
||||||
DockerAPIError,
|
|
||||||
HostAppArmorError,
|
|
||||||
)
|
|
||||||
from ..store.addon import AddonStore
|
|
||||||
from .addon import Addon
|
|
||||||
from .data import AddonsData
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
AnyAddon = Union[Addon, AddonStore]
|
|
||||||
|
|
||||||
|
|
||||||
class AddonManager(CoreSysAttributes):
|
|
||||||
"""Manage add-ons inside Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
|
||||||
"""Initialize Docker base wrapper."""
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self.data: AddonsData = AddonsData(coresys)
|
|
||||||
self.local: Dict[str, Addon] = {}
|
|
||||||
self.store: Dict[str, AddonStore] = {}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def all(self) -> List[AnyAddon]:
|
|
||||||
"""Return a list of all add-ons."""
|
|
||||||
addons = {**self.store, **self.local}
|
|
||||||
return list(addons.values())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def installed(self) -> List[Addon]:
|
|
||||||
"""Return a list of all installed add-ons."""
|
|
||||||
return list(self.local.values())
|
|
||||||
|
|
||||||
def get(self, addon_slug: str) -> Optional[AnyAddon]:
|
|
||||||
"""Return an add-on from slug.
|
|
||||||
|
|
||||||
Prio:
|
|
||||||
1 - Local
|
|
||||||
2 - Store
|
|
||||||
"""
|
|
||||||
if addon_slug in self.local:
|
|
||||||
return self.local[addon_slug]
|
|
||||||
return self.store.get(addon_slug)
|
|
||||||
|
|
||||||
def from_token(self, token: str) -> Optional[Addon]:
|
|
||||||
"""Return an add-on from Hass.io token."""
|
|
||||||
for addon in self.installed:
|
|
||||||
if token == addon.hassio_token:
|
|
||||||
return addon
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def load(self) -> None:
|
|
||||||
"""Start up add-on management."""
|
|
||||||
tasks = []
|
|
||||||
for slug in self.data.system:
|
|
||||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
|
||||||
tasks.append(addon.load())
|
|
||||||
|
|
||||||
# Run initial tasks
|
|
||||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
async def boot(self, stage: str) -> None:
|
|
||||||
"""Boot add-ons with mode auto."""
|
|
||||||
tasks = []
|
|
||||||
for addon in self.installed:
|
|
||||||
if addon.boot != BOOT_AUTO or addon.startup != stage:
|
|
||||||
continue
|
|
||||||
tasks.append(addon.start())
|
|
||||||
|
|
||||||
_LOGGER.info("Phase '%s' start %d add-ons", stage, len(tasks))
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
await asyncio.sleep(self.sys_config.wait_boot)
|
|
||||||
|
|
||||||
async def shutdown(self, stage: str) -> None:
|
|
||||||
"""Shutdown addons."""
|
|
||||||
tasks = []
|
|
||||||
for addon in self.installed:
|
|
||||||
if await addon.state() != STATE_STARTED or addon.startup != stage:
|
|
||||||
continue
|
|
||||||
tasks.append(addon.stop())
|
|
||||||
|
|
||||||
_LOGGER.info("Phase '%s' stop %d add-ons", stage, len(tasks))
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
async def install(self, slug: str) -> None:
|
|
||||||
"""Install an add-on."""
|
|
||||||
if slug in self.local:
|
|
||||||
_LOGGER.warning("Add-on %s is already installed", slug)
|
|
||||||
return
|
|
||||||
store = self.store.get(slug)
|
|
||||||
|
|
||||||
if not store:
|
|
||||||
_LOGGER.error("Add-on %s not exists", slug)
|
|
||||||
raise AddonsError()
|
|
||||||
|
|
||||||
if not store.available:
|
|
||||||
_LOGGER.error(
|
|
||||||
"Add-on %s not supported on that platform", slug)
|
|
||||||
raise AddonsNotSupportedError()
|
|
||||||
|
|
||||||
self.data.install(store)
|
|
||||||
addon = Addon(self.coresys, slug)
|
|
||||||
|
|
||||||
if not addon.path_data.is_dir():
|
|
||||||
_LOGGER.info(
|
|
||||||
"Create Home Assistant add-on data folder %s", addon.path_data)
|
|
||||||
addon.path_data.mkdir()
|
|
||||||
|
|
||||||
# Setup/Fix AppArmor profile
|
|
||||||
await addon.install_apparmor()
|
|
||||||
|
|
||||||
try:
|
|
||||||
await addon.instance.install(store.version, store.image)
|
|
||||||
except DockerAPIError:
|
|
||||||
self.data.uninstall(addon)
|
|
||||||
raise AddonsError() from None
|
|
||||||
else:
|
|
||||||
self.local[slug] = addon
|
|
||||||
|
|
||||||
async def uninstall(self, slug: str) -> None:
|
|
||||||
"""Remove an add-on."""
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
|
||||||
return
|
|
||||||
addon = self.local.get(slug)
|
|
||||||
|
|
||||||
try:
|
|
||||||
await addon.instance.remove()
|
|
||||||
except DockerAPIError:
|
|
||||||
raise AddonsError() from None
|
|
||||||
|
|
||||||
await addon.remove_data()
|
|
||||||
|
|
||||||
# Cleanup audio settings
|
|
||||||
if addon.path_asound.exists():
|
|
||||||
with suppress(OSError):
|
|
||||||
addon.path_asound.unlink()
|
|
||||||
|
|
||||||
# Cleanup AppArmor profile
|
|
||||||
with suppress(HostAppArmorError):
|
|
||||||
await addon.uninstall_apparmor()
|
|
||||||
|
|
||||||
# Cleanup internal data
|
|
||||||
addon.remove_discovery()
|
|
||||||
self.data.uninstall(addon)
|
|
||||||
self.local.pop(slug)
|
|
||||||
|
|
||||||
async def update(self, slug: str) -> None:
|
|
||||||
"""Update add-on."""
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.error("Add-on %s is not installed", slug)
|
|
||||||
raise AddonsError()
|
|
||||||
addon = self.local.get(slug)
|
|
||||||
|
|
||||||
if addon.is_detached:
|
|
||||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
|
||||||
raise AddonsError()
|
|
||||||
store = self.store.get(slug)
|
|
||||||
|
|
||||||
if addon.version == store.version:
|
|
||||||
_LOGGER.warning("No update available for add-on %s", slug)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check if available, Maybe something have changed
|
|
||||||
if not store.available:
|
|
||||||
_LOGGER.error(
|
|
||||||
"Add-on %s not supported on that platform", slug)
|
|
||||||
raise AddonsNotSupportedError()
|
|
||||||
|
|
||||||
# Update instance
|
|
||||||
last_state = await addon.state()
|
|
||||||
try:
|
|
||||||
await addon.instance.update(store.version, store.image)
|
|
||||||
except DockerAPIError:
|
|
||||||
raise AddonsError() from None
|
|
||||||
self.data.update(store)
|
|
||||||
|
|
||||||
# Setup/Fix AppArmor profile
|
|
||||||
await addon.install_apparmor()
|
|
||||||
|
|
||||||
# restore state
|
|
||||||
if last_state == STATE_STARTED:
|
|
||||||
await addon.start()
|
|
||||||
|
|
||||||
async def rebuild(self, slug: str) -> None:
|
|
||||||
"""Perform a rebuild of local build add-on."""
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.error("Add-on %s is not installed", slug)
|
|
||||||
raise AddonsError()
|
|
||||||
addon = self.local.get(slug)
|
|
||||||
|
|
||||||
if addon.is_detached:
|
|
||||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
|
||||||
raise AddonsError()
|
|
||||||
store = self.store.get(slug)
|
|
||||||
|
|
||||||
# Check if a rebuild is possible now
|
|
||||||
if addon.version != store.version:
|
|
||||||
_LOGGER.error("Version changed, use Update instead Rebuild")
|
|
||||||
raise AddonsError()
|
|
||||||
if not addon.need_build:
|
|
||||||
_LOGGER.error("Can't rebuild a image based add-on")
|
|
||||||
raise AddonsNotSupportedError()
|
|
||||||
|
|
||||||
# remove docker container but not addon config
|
|
||||||
last_state = await addon.state()
|
|
||||||
try:
|
|
||||||
await addon.instance.remove()
|
|
||||||
await addon.instance.install(addon.version)
|
|
||||||
except DockerAPIError:
|
|
||||||
raise AddonsError() from None
|
|
||||||
else:
|
|
||||||
self.data.update(store)
|
|
||||||
|
|
||||||
# restore state
|
|
||||||
if last_state == STATE_STARTED:
|
|
||||||
await addon.start()
|
|
||||||
|
|
||||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
|
||||||
"""Restore state of an add-on."""
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.debug("Add-on %s is not local available for restore")
|
|
||||||
addon = Addon(self.coresys, slug)
|
|
||||||
else:
|
|
||||||
_LOGGER.debug("Add-on %s is local available for restore")
|
|
||||||
addon = self.local[slug]
|
|
||||||
|
|
||||||
await addon.restore(tar_file)
|
|
||||||
|
|
||||||
# Check if new
|
|
||||||
if slug in self.local:
|
|
||||||
return
|
|
||||||
|
|
||||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
|
||||||
self.local[slug] = addon
|
|
@@ -1,78 +0,0 @@
|
|||||||
"""Hass.io add-on build environment."""
|
|
||||||
from __future__ import annotations
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import TYPE_CHECKING, Dict
|
|
||||||
|
|
||||||
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
|
||||||
from ..utils.json import JsonConfig
|
|
||||||
from .validate import SCHEMA_BUILD_CONFIG
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from . import AnyAddon
|
|
||||||
|
|
||||||
|
|
||||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
|
||||||
"""Handle build options for add-ons."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
|
|
||||||
"""Initialize Hass.io add-on builder."""
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self.addon = addon
|
|
||||||
|
|
||||||
super().__init__(
|
|
||||||
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
|
||||||
|
|
||||||
def save_data(self):
|
|
||||||
"""Ignore save function."""
|
|
||||||
raise RuntimeError()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def base_image(self) -> str:
|
|
||||||
"""Base images for this add-on."""
|
|
||||||
return self._data[ATTR_BUILD_FROM].get(
|
|
||||||
self.sys_arch.default,
|
|
||||||
f"homeassistant/{self.sys_arch.default}-base:latest")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def squash(self) -> bool:
|
|
||||||
"""Return True or False if squash is active."""
|
|
||||||
return self._data[ATTR_SQUASH]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def additional_args(self) -> Dict[str, str]:
|
|
||||||
"""Return additional Docker build arguments."""
|
|
||||||
return self._data[ATTR_ARGS]
|
|
||||||
|
|
||||||
def get_docker_args(self, version):
|
|
||||||
"""Create a dict with Docker build arguments."""
|
|
||||||
args = {
|
|
||||||
'path': str(self.addon.path_location),
|
|
||||||
'tag': f"{self.addon.image}:{version}",
|
|
||||||
'pull': True,
|
|
||||||
'forcerm': True,
|
|
||||||
'squash': self.squash,
|
|
||||||
'labels': {
|
|
||||||
'io.hass.version': version,
|
|
||||||
'io.hass.arch': self.sys_arch.default,
|
|
||||||
'io.hass.type': META_ADDON,
|
|
||||||
'io.hass.name': self._fix_label('name'),
|
|
||||||
'io.hass.description': self._fix_label('description'),
|
|
||||||
},
|
|
||||||
'buildargs': {
|
|
||||||
'BUILD_FROM': self.base_image,
|
|
||||||
'BUILD_VERSION': version,
|
|
||||||
'BUILD_ARCH': self.sys_arch.default,
|
|
||||||
**self.additional_args,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.addon.url:
|
|
||||||
args['labels']['io.hass.url'] = self.addon.url
|
|
||||||
|
|
||||||
return args
|
|
||||||
|
|
||||||
def _fix_label(self, label_name: str) -> str:
|
|
||||||
"""Remove characters they are not supported."""
|
|
||||||
label = getattr(self.addon, label_name, "")
|
|
||||||
return label.replace("'", "")
|
|
@@ -1,382 +0,0 @@
|
|||||||
"""Validate add-ons options schema."""
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import secrets
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ..const import (
|
|
||||||
ARCH_ALL,
|
|
||||||
ATTR_ACCESS_TOKEN,
|
|
||||||
ATTR_APPARMOR,
|
|
||||||
ATTR_ARCH,
|
|
||||||
ATTR_ARGS,
|
|
||||||
ATTR_AUDIO,
|
|
||||||
ATTR_AUDIO_INPUT,
|
|
||||||
ATTR_AUDIO_OUTPUT,
|
|
||||||
ATTR_AUTH_API,
|
|
||||||
ATTR_AUTO_UART,
|
|
||||||
ATTR_AUTO_UPDATE,
|
|
||||||
ATTR_BOOT,
|
|
||||||
ATTR_BUILD_FROM,
|
|
||||||
ATTR_DESCRIPTON,
|
|
||||||
ATTR_DEVICES,
|
|
||||||
ATTR_DEVICETREE,
|
|
||||||
ATTR_DISCOVERY,
|
|
||||||
ATTR_DOCKER_API,
|
|
||||||
ATTR_ENVIRONMENT,
|
|
||||||
ATTR_FULL_ACCESS,
|
|
||||||
ATTR_GPIO,
|
|
||||||
ATTR_HASSIO_API,
|
|
||||||
ATTR_HASSIO_ROLE,
|
|
||||||
ATTR_HOMEASSISTANT,
|
|
||||||
ATTR_HOMEASSISTANT_API,
|
|
||||||
ATTR_HOST_DBUS,
|
|
||||||
ATTR_HOST_IPC,
|
|
||||||
ATTR_HOST_NETWORK,
|
|
||||||
ATTR_HOST_PID,
|
|
||||||
ATTR_IMAGE,
|
|
||||||
ATTR_INGRESS,
|
|
||||||
ATTR_INGRESS_ENTRY,
|
|
||||||
ATTR_INGRESS_PORT,
|
|
||||||
ATTR_INGRESS_TOKEN,
|
|
||||||
ATTR_INGRESS_PANEL,
|
|
||||||
ATTR_PANEL_ADMIN,
|
|
||||||
ATTR_PANEL_ICON,
|
|
||||||
ATTR_PANEL_TITLE,
|
|
||||||
ATTR_KERNEL_MODULES,
|
|
||||||
ATTR_LEGACY,
|
|
||||||
ATTR_LOCATON,
|
|
||||||
ATTR_MACHINE,
|
|
||||||
ATTR_MAP,
|
|
||||||
ATTR_NAME,
|
|
||||||
ATTR_NETWORK,
|
|
||||||
ATTR_OPTIONS,
|
|
||||||
ATTR_PORTS,
|
|
||||||
ATTR_PORTS_DESCRIPTION,
|
|
||||||
ATTR_PRIVILEGED,
|
|
||||||
ATTR_PROTECTED,
|
|
||||||
ATTR_REPOSITORY,
|
|
||||||
ATTR_SCHEMA,
|
|
||||||
ATTR_SERVICES,
|
|
||||||
ATTR_SLUG,
|
|
||||||
ATTR_SQUASH,
|
|
||||||
ATTR_STARTUP,
|
|
||||||
ATTR_STATE,
|
|
||||||
ATTR_STDIN,
|
|
||||||
ATTR_SYSTEM,
|
|
||||||
ATTR_TIMEOUT,
|
|
||||||
ATTR_TMPFS,
|
|
||||||
ATTR_URL,
|
|
||||||
ATTR_USER,
|
|
||||||
ATTR_UUID,
|
|
||||||
ATTR_VERSION,
|
|
||||||
ATTR_WEBUI,
|
|
||||||
BOOT_AUTO,
|
|
||||||
BOOT_MANUAL,
|
|
||||||
PRIVILEGED_ALL,
|
|
||||||
ROLE_ALL,
|
|
||||||
ROLE_DEFAULT,
|
|
||||||
STARTUP_ALL,
|
|
||||||
STARTUP_APPLICATION,
|
|
||||||
STARTUP_SERVICES,
|
|
||||||
STATE_STARTED,
|
|
||||||
STATE_STOPPED,
|
|
||||||
)
|
|
||||||
from ..discovery.validate import valid_discovery_service
|
|
||||||
from ..validate import (
|
|
||||||
ALSA_DEVICE,
|
|
||||||
DOCKER_PORTS,
|
|
||||||
DOCKER_PORTS_DESCRIPTION,
|
|
||||||
NETWORK_PORT,
|
|
||||||
TOKEN,
|
|
||||||
UUID_MATCH,
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
|
||||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
|
||||||
|
|
||||||
V_STR = 'str'
|
|
||||||
V_INT = 'int'
|
|
||||||
V_FLOAT = 'float'
|
|
||||||
V_BOOL = 'bool'
|
|
||||||
V_EMAIL = 'email'
|
|
||||||
V_URL = 'url'
|
|
||||||
V_PORT = 'port'
|
|
||||||
V_MATCH = 'match'
|
|
||||||
|
|
||||||
RE_SCHEMA_ELEMENT = re.compile(
|
|
||||||
r"^(?:"
|
|
||||||
r"|str|bool|email|url|port"
|
|
||||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
|
||||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
|
||||||
r"|match\((?P<match>.*)\)"
|
|
||||||
r")\??$"
|
|
||||||
)
|
|
||||||
|
|
||||||
RE_DOCKER_IMAGE = re.compile(
|
|
||||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
|
||||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
|
||||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
|
|
||||||
|
|
||||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
|
||||||
|
|
||||||
|
|
||||||
MACHINE_ALL = [
|
|
||||||
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
|
|
||||||
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
|
|
||||||
'raspberrypi3', 'raspberrypi3-64', 'tinker',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _simple_startup(value):
|
|
||||||
"""Simple startup schema."""
|
|
||||||
if value == "before":
|
|
||||||
return STARTUP_SERVICES
|
|
||||||
if value == "after":
|
|
||||||
return STARTUP_APPLICATION
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
|
||||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
|
||||||
vol.Optional(ATTR_URL): vol.Url(),
|
|
||||||
vol.Required(ATTR_STARTUP):
|
|
||||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
|
||||||
vol.Required(ATTR_BOOT):
|
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
|
||||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
|
||||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
|
||||||
vol.Optional(ATTR_WEBUI):
|
|
||||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
|
||||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(NETWORK_PORT, vol.Equal(0)),
|
|
||||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
|
||||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
|
||||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_TMPFS):
|
|
||||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
|
||||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
|
||||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
|
||||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
|
||||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
|
||||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
|
||||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
|
||||||
vol.Any(
|
|
||||||
SCHEMA_ELEMENT,
|
|
||||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
|
||||||
),
|
|
||||||
], vol.Schema({
|
|
||||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
|
||||||
}))
|
|
||||||
}), False),
|
|
||||||
vol.Optional(ATTR_IMAGE):
|
|
||||||
vol.Match(RE_DOCKER_IMAGE),
|
|
||||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
|
||||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
|
||||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
|
|
||||||
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
|
|
||||||
}),
|
|
||||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
|
||||||
vol.Coerce(str): vol.Coerce(str)
|
|
||||||
}),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_ADDON_USER = vol.Schema({
|
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
|
||||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
|
||||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_BOOT):
|
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
|
||||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
|
||||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
|
||||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDONS_FILE = vol.Schema({
|
|
||||||
vol.Optional(ATTR_USER, default=dict): {
|
|
||||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
|
||||||
},
|
|
||||||
vol.Optional(ATTR_SYSTEM, default=dict): {
|
|
||||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
|
||||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
|
||||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
|
||||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_options(raw_schema):
|
|
||||||
"""Validate schema."""
|
|
||||||
def validate(struct):
|
|
||||||
"""Create schema validator for add-ons options."""
|
|
||||||
options = {}
|
|
||||||
|
|
||||||
# read options
|
|
||||||
for key, value in struct.items():
|
|
||||||
# Ignore unknown options / remove from list
|
|
||||||
if key not in raw_schema:
|
|
||||||
_LOGGER.warning("Unknown options %s", key)
|
|
||||||
continue
|
|
||||||
|
|
||||||
typ = raw_schema[key]
|
|
||||||
try:
|
|
||||||
if isinstance(typ, list):
|
|
||||||
# nested value list
|
|
||||||
options[key] = _nested_validate_list(typ[0], value, key)
|
|
||||||
elif isinstance(typ, dict):
|
|
||||||
# nested value dict
|
|
||||||
options[key] = _nested_validate_dict(typ, value, key)
|
|
||||||
else:
|
|
||||||
# normal value
|
|
||||||
options[key] = _single_validate(typ, value, key)
|
|
||||||
except (IndexError, KeyError):
|
|
||||||
raise vol.Invalid(f"Type error for {key}") from None
|
|
||||||
|
|
||||||
_check_missing_options(raw_schema, options, 'root')
|
|
||||||
return options
|
|
||||||
|
|
||||||
return validate
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
# pylint: disable=inconsistent-return-statements
|
|
||||||
def _single_validate(typ, value, key):
|
|
||||||
"""Validate a single element."""
|
|
||||||
# if required argument
|
|
||||||
if value is None:
|
|
||||||
raise vol.Invalid(f"Missing required option '{key}'")
|
|
||||||
|
|
||||||
# parse extend data from type
|
|
||||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
|
||||||
|
|
||||||
# prepare range
|
|
||||||
range_args = {}
|
|
||||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
|
||||||
group_value = match.group(group_name)
|
|
||||||
if group_value:
|
|
||||||
range_args[group_name[2:]] = float(group_value)
|
|
||||||
|
|
||||||
if typ.startswith(V_STR):
|
|
||||||
return str(value)
|
|
||||||
elif typ.startswith(V_INT):
|
|
||||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
|
||||||
elif typ.startswith(V_FLOAT):
|
|
||||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
|
||||||
elif typ.startswith(V_BOOL):
|
|
||||||
return vol.Boolean()(value)
|
|
||||||
elif typ.startswith(V_EMAIL):
|
|
||||||
return vol.Email()(value)
|
|
||||||
elif typ.startswith(V_URL):
|
|
||||||
return vol.Url()(value)
|
|
||||||
elif typ.startswith(V_PORT):
|
|
||||||
return NETWORK_PORT(value)
|
|
||||||
elif typ.startswith(V_MATCH):
|
|
||||||
return vol.Match(match.group('match'))(str(value))
|
|
||||||
|
|
||||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
|
||||||
|
|
||||||
|
|
||||||
def _nested_validate_list(typ, data_list, key):
|
|
||||||
"""Validate nested items."""
|
|
||||||
options = []
|
|
||||||
|
|
||||||
for element in data_list:
|
|
||||||
# Nested?
|
|
||||||
if isinstance(typ, dict):
|
|
||||||
c_options = _nested_validate_dict(typ, element, key)
|
|
||||||
options.append(c_options)
|
|
||||||
else:
|
|
||||||
options.append(_single_validate(typ, element, key))
|
|
||||||
|
|
||||||
return options
|
|
||||||
|
|
||||||
|
|
||||||
def _nested_validate_dict(typ, data_dict, key):
|
|
||||||
"""Validate nested items."""
|
|
||||||
options = {}
|
|
||||||
|
|
||||||
for c_key, c_value in data_dict.items():
|
|
||||||
# Ignore unknown options / remove from list
|
|
||||||
if c_key not in typ:
|
|
||||||
_LOGGER.warning("Unknown options %s", c_key)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Nested?
|
|
||||||
if isinstance(typ[c_key], list):
|
|
||||||
options[c_key] = _nested_validate_list(typ[c_key][0],
|
|
||||||
c_value, c_key)
|
|
||||||
else:
|
|
||||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
|
||||||
|
|
||||||
_check_missing_options(typ, options, key)
|
|
||||||
return options
|
|
||||||
|
|
||||||
|
|
||||||
def _check_missing_options(origin, exists, root):
|
|
||||||
"""Check if all options are exists."""
|
|
||||||
missing = set(origin) - set(exists)
|
|
||||||
for miss_opt in missing:
|
|
||||||
if isinstance(origin[miss_opt], str) and \
|
|
||||||
origin[miss_opt].endswith("?"):
|
|
||||||
continue
|
|
||||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
|
@@ -1,274 +0,0 @@
|
|||||||
"""Init file for Hass.io RESTful API."""
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
|
||||||
from .addons import APIAddons
|
|
||||||
from .auth import APIAuth
|
|
||||||
from .discovery import APIDiscovery
|
|
||||||
from .hardware import APIHardware
|
|
||||||
from .hassos import APIHassOS
|
|
||||||
from .homeassistant import APIHomeAssistant
|
|
||||||
from .host import APIHost
|
|
||||||
from .info import APIInfo
|
|
||||||
from .ingress import APIIngress
|
|
||||||
from .proxy import APIProxy
|
|
||||||
from .security import SecurityMiddleware
|
|
||||||
from .services import APIServices
|
|
||||||
from .snapshots import APISnapshots
|
|
||||||
from .supervisor import APISupervisor
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RestAPI(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
|
||||||
"""Initialize Docker base wrapper."""
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
|
||||||
self.webapp: web.Application = web.Application(
|
|
||||||
middlewares=[self.security.token_validation])
|
|
||||||
|
|
||||||
# service stuff
|
|
||||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
|
||||||
self._site: Optional[web.TCPSite] = None
|
|
||||||
|
|
||||||
async def load(self) -> None:
|
|
||||||
"""Register REST API Calls."""
|
|
||||||
self._register_supervisor()
|
|
||||||
self._register_host()
|
|
||||||
self._register_hassos()
|
|
||||||
self._register_hardware()
|
|
||||||
self._register_homeassistant()
|
|
||||||
self._register_proxy()
|
|
||||||
self._register_panel()
|
|
||||||
self._register_addons()
|
|
||||||
self._register_ingress()
|
|
||||||
self._register_snapshots()
|
|
||||||
self._register_discovery()
|
|
||||||
self._register_services()
|
|
||||||
self._register_info()
|
|
||||||
self._register_auth()
|
|
||||||
|
|
||||||
def _register_host(self) -> None:
|
|
||||||
"""Register hostcontrol functions."""
|
|
||||||
api_host = APIHost()
|
|
||||||
api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/host/info', api_host.info),
|
|
||||||
web.post('/host/reboot', api_host.reboot),
|
|
||||||
web.post('/host/shutdown', api_host.shutdown),
|
|
||||||
web.post('/host/reload', api_host.reload),
|
|
||||||
web.post('/host/options', api_host.options),
|
|
||||||
web.get('/host/services', api_host.services),
|
|
||||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
|
||||||
web.post('/host/services/{service}/start', api_host.service_start),
|
|
||||||
web.post('/host/services/{service}/restart',
|
|
||||||
api_host.service_restart),
|
|
||||||
web.post('/host/services/{service}/reload',
|
|
||||||
api_host.service_reload),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_hassos(self) -> None:
|
|
||||||
"""Register HassOS functions."""
|
|
||||||
api_hassos = APIHassOS()
|
|
||||||
api_hassos.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/hassos/info', api_hassos.info),
|
|
||||||
web.post('/hassos/update', api_hassos.update),
|
|
||||||
web.post('/hassos/update/cli', api_hassos.update_cli),
|
|
||||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_hardware(self) -> None:
|
|
||||||
"""Register hardware functions."""
|
|
||||||
api_hardware = APIHardware()
|
|
||||||
api_hardware.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/hardware/info', api_hardware.info),
|
|
||||||
web.get('/hardware/audio', api_hardware.audio),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_info(self) -> None:
|
|
||||||
"""Register info functions."""
|
|
||||||
api_info = APIInfo()
|
|
||||||
api_info.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/info', api_info.info),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_auth(self) -> None:
|
|
||||||
"""Register auth functions."""
|
|
||||||
api_auth = APIAuth()
|
|
||||||
api_auth.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.post('/auth', api_auth.auth),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_supervisor(self) -> None:
|
|
||||||
"""Register Supervisor functions."""
|
|
||||||
api_supervisor = APISupervisor()
|
|
||||||
api_supervisor.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/supervisor/ping', api_supervisor.ping),
|
|
||||||
web.get('/supervisor/info', api_supervisor.info),
|
|
||||||
web.get('/supervisor/stats', api_supervisor.stats),
|
|
||||||
web.get('/supervisor/logs', api_supervisor.logs),
|
|
||||||
web.post('/supervisor/update', api_supervisor.update),
|
|
||||||
web.post('/supervisor/reload', api_supervisor.reload),
|
|
||||||
web.post('/supervisor/options', api_supervisor.options),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_homeassistant(self) -> None:
|
|
||||||
"""Register Home Assistant functions."""
|
|
||||||
api_hass = APIHomeAssistant()
|
|
||||||
api_hass.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/homeassistant/info', api_hass.info),
|
|
||||||
web.get('/homeassistant/logs', api_hass.logs),
|
|
||||||
web.get('/homeassistant/stats', api_hass.stats),
|
|
||||||
web.post('/homeassistant/options', api_hass.options),
|
|
||||||
web.post('/homeassistant/update', api_hass.update),
|
|
||||||
web.post('/homeassistant/restart', api_hass.restart),
|
|
||||||
web.post('/homeassistant/stop', api_hass.stop),
|
|
||||||
web.post('/homeassistant/start', api_hass.start),
|
|
||||||
web.post('/homeassistant/check', api_hass.check),
|
|
||||||
web.post('/homeassistant/rebuild', api_hass.rebuild),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_proxy(self) -> None:
|
|
||||||
"""Register Home Assistant API Proxy."""
|
|
||||||
api_proxy = APIProxy()
|
|
||||||
api_proxy.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
|
||||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
|
||||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
|
||||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
|
||||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
|
||||||
web.get('/homeassistant/api/', api_proxy.api),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_addons(self) -> None:
|
|
||||||
"""Register Add-on functions."""
|
|
||||||
api_addons = APIAddons()
|
|
||||||
api_addons.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/addons', api_addons.list),
|
|
||||||
web.post('/addons/reload', api_addons.reload),
|
|
||||||
web.get('/addons/{addon}/info', api_addons.info),
|
|
||||||
web.post('/addons/{addon}/install', api_addons.install),
|
|
||||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
|
||||||
web.post('/addons/{addon}/start', api_addons.start),
|
|
||||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
|
||||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
|
||||||
web.post('/addons/{addon}/update', api_addons.update),
|
|
||||||
web.post('/addons/{addon}/options', api_addons.options),
|
|
||||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
|
||||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
|
||||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
|
||||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
|
||||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
|
||||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
|
||||||
web.post('/addons/{addon}/security', api_addons.security),
|
|
||||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_ingress(self) -> None:
|
|
||||||
"""Register Ingress functions."""
|
|
||||||
api_ingress = APIIngress()
|
|
||||||
api_ingress.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.post('/ingress/session', api_ingress.create_session),
|
|
||||||
web.get('/ingress/panels', api_ingress.panels),
|
|
||||||
web.view('/ingress/{token}/{path:.*}', api_ingress.handler),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_snapshots(self) -> None:
|
|
||||||
"""Register snapshots functions."""
|
|
||||||
api_snapshots = APISnapshots()
|
|
||||||
api_snapshots.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/snapshots', api_snapshots.list),
|
|
||||||
web.post('/snapshots/reload', api_snapshots.reload),
|
|
||||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
|
||||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
|
||||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
|
||||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
|
||||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
|
||||||
web.post('/snapshots/{snapshot}/restore/full',
|
|
||||||
api_snapshots.restore_full),
|
|
||||||
web.post('/snapshots/{snapshot}/restore/partial',
|
|
||||||
api_snapshots.restore_partial),
|
|
||||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_services(self) -> None:
|
|
||||||
"""Register services functions."""
|
|
||||||
api_services = APIServices()
|
|
||||||
api_services.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/services', api_services.list),
|
|
||||||
web.get('/services/{service}', api_services.get_service),
|
|
||||||
web.post('/services/{service}', api_services.set_service),
|
|
||||||
web.delete('/services/{service}', api_services.del_service),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_discovery(self) -> None:
|
|
||||||
"""Register discovery functions."""
|
|
||||||
api_discovery = APIDiscovery()
|
|
||||||
api_discovery.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/discovery', api_discovery.list),
|
|
||||||
web.get('/discovery/{uuid}', api_discovery.get_discovery),
|
|
||||||
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
|
|
||||||
web.post('/discovery', api_discovery.set_discovery),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_panel(self) -> None:
|
|
||||||
"""Register panel for Home Assistant."""
|
|
||||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
|
||||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
"""Run RESTful API webserver."""
|
|
||||||
await self._runner.setup()
|
|
||||||
self._site = web.TCPSite(
|
|
||||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
|
|
||||||
|
|
||||||
try:
|
|
||||||
await self._site.start()
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
|
|
||||||
err)
|
|
||||||
else:
|
|
||||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
|
||||||
"""Stop RESTful API webserver."""
|
|
||||||
if not self._site:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Shutdown running API
|
|
||||||
await self._site.stop()
|
|
||||||
await self._runner.cleanup()
|
|
||||||
|
|
||||||
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
|
@@ -1,61 +0,0 @@
|
|||||||
"""Init file for Hass.io auth/SSO RESTful API."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from aiohttp import BasicAuth
|
|
||||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
|
||||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION, WWW_AUTHENTICATE
|
|
||||||
|
|
||||||
from .utils import api_process
|
|
||||||
from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from ..exceptions import APIForbidden
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class APIAuth(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for auth functions."""
|
|
||||||
|
|
||||||
def _process_basic(self, request, addon):
|
|
||||||
"""Process login request with basic auth.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
|
|
||||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
|
||||||
|
|
||||||
def _process_dict(self, request, addon, data):
|
|
||||||
"""Process login with dict data.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
username = data.get('username') or data.get('user')
|
|
||||||
password = data.get('password')
|
|
||||||
|
|
||||||
return self.sys_auth.check_login(addon, username, password)
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def auth(self, request):
|
|
||||||
"""Process login request."""
|
|
||||||
addon = request[REQUEST_FROM]
|
|
||||||
|
|
||||||
if not addon.access_auth_api:
|
|
||||||
raise APIForbidden("Can't use Home Assistant auth!")
|
|
||||||
|
|
||||||
# BasicAuth
|
|
||||||
if AUTHORIZATION in request.headers:
|
|
||||||
return await self._process_basic(request, addon)
|
|
||||||
|
|
||||||
# Json
|
|
||||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
|
||||||
data = await request.json()
|
|
||||||
return await self._process_dict(request, addon, data)
|
|
||||||
|
|
||||||
# URL encoded
|
|
||||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
|
||||||
data = await request.post()
|
|
||||||
return await self._process_dict(request, addon, data)
|
|
||||||
|
|
||||||
raise HTTPUnauthorized(headers={
|
|
||||||
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
|
|
||||||
})
|
|
@@ -1,34 +0,0 @@
|
|||||||
"""Init file for Hass.io hardware RESTful API."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .utils import api_process
|
|
||||||
from ..const import (
|
|
||||||
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class APIHardware(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for hardware functions."""
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request):
|
|
||||||
"""Show hardware info."""
|
|
||||||
return {
|
|
||||||
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
|
||||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
|
||||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
|
||||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
|
||||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def audio(self, request):
|
|
||||||
"""Show ALSA audio devices."""
|
|
||||||
return {
|
|
||||||
ATTR_AUDIO: {
|
|
||||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
|
||||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,57 +0,0 @@
|
|||||||
"""Init file for Hass.io HassOS RESTful API."""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from typing import Any, Awaitable, Dict
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
from ..const import (
|
|
||||||
ATTR_BOARD,
|
|
||||||
ATTR_VERSION,
|
|
||||||
ATTR_VERSION_CLI,
|
|
||||||
ATTR_VERSION_CLI_LATEST,
|
|
||||||
ATTR_VERSION_LATEST,
|
|
||||||
)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from .utils import api_process, api_validate
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
|
||||||
|
|
||||||
|
|
||||||
class APIHassOS(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for HassOS functions."""
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
|
||||||
"""Return HassOS information."""
|
|
||||||
return {
|
|
||||||
ATTR_VERSION: self.sys_hassos.version,
|
|
||||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
|
||||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
|
||||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
|
||||||
ATTR_BOARD: self.sys_hassos.board,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def update(self, request: web.Request) -> None:
|
|
||||||
"""Update HassOS."""
|
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
|
||||||
|
|
||||||
await asyncio.shield(self.sys_hassos.update(version))
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def update_cli(self, request: web.Request) -> None:
|
|
||||||
"""Update HassOS CLI."""
|
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
|
||||||
|
|
||||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
|
||||||
"""Trigger config reload on HassOS."""
|
|
||||||
return asyncio.shield(self.sys_hassos.config_sync())
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{102:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(121),i=t.n(e),o=t(123),u=t.n(o),a=i.a,c=u.a}}]);
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.510634470d399e194ace.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
@@ -1,21 +0,0 @@
|
|||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.564a2f7b1c38ddaa4ce0.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,32 +0,0 @@
|
|||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
|
||||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
|
||||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
|
||||||
Code distributed by Google as part of the polymer project is also
|
|
||||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @fileoverview
|
|
||||||
* @suppress {checkPrototypalTypes}
|
|
||||||
* @license Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
|
||||||
* This code may only be used under the BSD style license found at
|
|
||||||
* http://polymer.github.io/LICENSE.txt The complete set of authors may be found
|
|
||||||
* at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
|
|
||||||
* be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
|
|
||||||
* Google as part of the polymer project is also subject to an additional IP
|
|
||||||
* rights grant found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.659084fef4e3b7b66a76.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
@@ -1,31 +0,0 @@
|
|||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
|
||||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
|
||||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
|
||||||
Code distributed by Google as part of the polymer project is also
|
|
||||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.6e9c87e51920a9c354e5.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.a7e5fb452cd1b3a5faef.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.f15d7f41c0d302cbbc7a.js","sourceRoot":""}
|
|
@@ -1 +0,0 @@
|
|||||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,u=[];c<a.length;c++)o=a[c],r[o]&&u.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(f&&f(n);u.length;)u.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"564a2f7b1c38ddaa4ce0",1:"659084fef4e3b7b66a76",2:"510634470d399e194ace",3:"f15d7f41c0d302cbbc7a",5:"5d31a1778f717ac8b063",6:"b60fb48c5280275dd7e2",7:"3a63ad36bccf4ea567fa",8:"a571dfa106202cc57af6",9:"a7e5fb452cd1b3a5faef",10:"b3340b3df270d20af4a1",11:"6e9c87e51920a9c354e5",12:"1b30ffdc501071af245c",13:"739b67c99ab56cdbd75d"}[e]+".js"}(e),i=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var f=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var f=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(9),t.e(6)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,38 +0,0 @@
|
|||||||
<!doctype html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title>Hass.io</title>
|
|
||||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
height: 100vh;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<hassio-app></hassio-app>
|
|
||||||
<script>
|
|
||||||
function addScript(src) {
|
|
||||||
var e = document.createElement('script');
|
|
||||||
e.src = src;
|
|
||||||
document.write(e.outerHTML);
|
|
||||||
}
|
|
||||||
var webComponentsSupported = (
|
|
||||||
'customElements' in window &&
|
|
||||||
'import' in document.createElement('link') &&
|
|
||||||
'content' in document.createElement('template'));
|
|
||||||
if (!webComponentsSupported) {
|
|
||||||
addScript('/static/webcomponents-lite.js');
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<!--
|
|
||||||
Disabled while we make Home Assistant able to serve the right files.
|
|
||||||
<script src="./app.js"></script>
|
|
||||||
-->
|
|
||||||
<link rel='import' href='./hassio-app.html'>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
Binary file not shown.
@@ -1,95 +0,0 @@
|
|||||||
"""Init file for Hass.io util for RESTful API."""
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
import voluptuous as vol
|
|
||||||
from voluptuous.humanize import humanize_error
|
|
||||||
|
|
||||||
from ..const import (
|
|
||||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
|
||||||
CONTENT_TYPE_BINARY)
|
|
||||||
from ..exceptions import HassioError, APIError, APIForbidden
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def json_loads(data):
|
|
||||||
"""Extract json from string with support for '' and None."""
|
|
||||||
if not data:
|
|
||||||
return {}
|
|
||||||
try:
|
|
||||||
return json.loads(data)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
raise APIError("Invalid json")
|
|
||||||
|
|
||||||
|
|
||||||
def api_process(method):
|
|
||||||
"""Wrap function with true/false calls to rest api."""
|
|
||||||
async def wrap_api(api, *args, **kwargs):
|
|
||||||
"""Return API information."""
|
|
||||||
try:
|
|
||||||
answer = await method(api, *args, **kwargs)
|
|
||||||
except (APIError, APIForbidden) as err:
|
|
||||||
return api_return_error(message=str(err))
|
|
||||||
except HassioError:
|
|
||||||
return api_return_error(message="Unknown Error, see logs")
|
|
||||||
|
|
||||||
if isinstance(answer, dict):
|
|
||||||
return api_return_ok(data=answer)
|
|
||||||
if isinstance(answer, web.Response):
|
|
||||||
return answer
|
|
||||||
elif isinstance(answer, bool) and not answer:
|
|
||||||
return api_return_error()
|
|
||||||
return api_return_ok()
|
|
||||||
|
|
||||||
return wrap_api
|
|
||||||
|
|
||||||
|
|
||||||
def api_process_raw(content):
|
|
||||||
"""Wrap content_type into function."""
|
|
||||||
def wrap_method(method):
|
|
||||||
"""Wrap function with raw output to rest api."""
|
|
||||||
async def wrap_api(api, *args, **kwargs):
|
|
||||||
"""Return api information."""
|
|
||||||
try:
|
|
||||||
msg_data = await method(api, *args, **kwargs)
|
|
||||||
msg_type = content
|
|
||||||
except (APIError, APIForbidden) as err:
|
|
||||||
msg_data = str(err).encode()
|
|
||||||
msg_type = CONTENT_TYPE_BINARY
|
|
||||||
except HassioError:
|
|
||||||
msg_data = b''
|
|
||||||
msg_type = CONTENT_TYPE_BINARY
|
|
||||||
|
|
||||||
return web.Response(body=msg_data, content_type=msg_type)
|
|
||||||
|
|
||||||
return wrap_api
|
|
||||||
return wrap_method
|
|
||||||
|
|
||||||
|
|
||||||
def api_return_error(message=None):
|
|
||||||
"""Return an API error message."""
|
|
||||||
return web.json_response({
|
|
||||||
JSON_RESULT: RESULT_ERROR,
|
|
||||||
JSON_MESSAGE: message,
|
|
||||||
}, status=400)
|
|
||||||
|
|
||||||
|
|
||||||
def api_return_ok(data=None):
|
|
||||||
"""Return an API ok answer."""
|
|
||||||
return web.json_response({
|
|
||||||
JSON_RESULT: RESULT_OK,
|
|
||||||
JSON_DATA: data or {},
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
async def api_validate(schema, request):
|
|
||||||
"""Validate request data with schema."""
|
|
||||||
data = await request.json(loads=json_loads)
|
|
||||||
try:
|
|
||||||
data = schema(data)
|
|
||||||
except vol.Invalid as ex:
|
|
||||||
raise APIError(humanize_error(data, ex)) from None
|
|
||||||
|
|
||||||
return data
|
|
@@ -1,49 +0,0 @@
|
|||||||
{
|
|
||||||
"raspberrypi": [
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"raspberrypi2": [
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"raspberrypi3": [
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"raspberrypi3-64": [
|
|
||||||
"aarch64",
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"tinker": [
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"odroid-c2": [
|
|
||||||
"aarch64"
|
|
||||||
],
|
|
||||||
"odroid-xu": [
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"orangepi-prime": [
|
|
||||||
"aarch64"
|
|
||||||
],
|
|
||||||
"qemux86": [
|
|
||||||
"i386"
|
|
||||||
],
|
|
||||||
"qemux86-64": [
|
|
||||||
"amd64",
|
|
||||||
"i386"
|
|
||||||
],
|
|
||||||
"qemuarm": [
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"qemuarm-64": [
|
|
||||||
"aarch64"
|
|
||||||
],
|
|
||||||
"intel-nuc": [
|
|
||||||
"amd64",
|
|
||||||
"i386"
|
|
||||||
]
|
|
||||||
}
|
|
@@ -1,95 +0,0 @@
|
|||||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
|
||||||
import logging
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON)
|
|
||||||
from .coresys import CoreSysAttributes
|
|
||||||
from .utils.json import JsonConfig
|
|
||||||
from .validate import SCHEMA_AUTH_CONFIG
|
|
||||||
from .exceptions import AuthError, HomeAssistantAPIError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Auth(JsonConfig, CoreSysAttributes):
|
|
||||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize updater."""
|
|
||||||
super().__init__(FILE_HASSIO_AUTH, SCHEMA_AUTH_CONFIG)
|
|
||||||
self.coresys = coresys
|
|
||||||
|
|
||||||
def _check_cache(self, username, password):
|
|
||||||
"""Check password in cache."""
|
|
||||||
username_h = _rehash(username)
|
|
||||||
password_h = _rehash(password, username)
|
|
||||||
|
|
||||||
if self._data.get(username_h) == password_h:
|
|
||||||
_LOGGER.info("Cache hit for %s", username)
|
|
||||||
return True
|
|
||||||
|
|
||||||
_LOGGER.warning("No cache hit for %s", username)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _update_cache(self, username, password):
|
|
||||||
"""Cache a username, password."""
|
|
||||||
username_h = _rehash(username)
|
|
||||||
password_h = _rehash(password, username)
|
|
||||||
|
|
||||||
if self._data.get(username_h) == password_h:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._data[username_h] = password_h
|
|
||||||
self.save_data()
|
|
||||||
|
|
||||||
def _dismatch_cache(self, username, password):
|
|
||||||
"""Remove user from cache."""
|
|
||||||
username_h = _rehash(username)
|
|
||||||
password_h = _rehash(password, username)
|
|
||||||
|
|
||||||
if self._data.get(username_h) != password_h:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._data.pop(username_h, None)
|
|
||||||
self.save_data()
|
|
||||||
|
|
||||||
async def check_login(self, addon, username, password):
|
|
||||||
"""Check username login."""
|
|
||||||
if password is None:
|
|
||||||
_LOGGER.error("None as password is not supported!")
|
|
||||||
raise AuthError()
|
|
||||||
_LOGGER.info("Auth request from %s for %s", addon.slug, username)
|
|
||||||
|
|
||||||
# Check API state
|
|
||||||
if not await self.sys_homeassistant.check_api_state():
|
|
||||||
_LOGGER.info("Home Assistant not running, check cache")
|
|
||||||
return self._check_cache(username, password)
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with self.sys_homeassistant.make_request(
|
|
||||||
'post', 'api/hassio_auth', json={
|
|
||||||
ATTR_USERNAME: username,
|
|
||||||
ATTR_PASSWORD: password,
|
|
||||||
ATTR_ADDON: addon.slug,
|
|
||||||
}) as req:
|
|
||||||
|
|
||||||
if req.status == 200:
|
|
||||||
_LOGGER.info("Success login from %s", username)
|
|
||||||
self._update_cache(username, password)
|
|
||||||
return True
|
|
||||||
|
|
||||||
_LOGGER.warning("Wrong login from %s", username)
|
|
||||||
self._dismatch_cache(username, password)
|
|
||||||
return False
|
|
||||||
except HomeAssistantAPIError:
|
|
||||||
_LOGGER.error("Can't request auth on Home Assistant!")
|
|
||||||
|
|
||||||
raise AuthError()
|
|
||||||
|
|
||||||
|
|
||||||
def _rehash(value, salt2=""):
|
|
||||||
"""Rehash a value."""
|
|
||||||
for idx in range(1, 20):
|
|
||||||
value = hashlib.sha256(f"{value}{idx}{salt2}".encode()).hexdigest()
|
|
||||||
return value
|
|
@@ -1,224 +0,0 @@
|
|||||||
"""Bootstrap Hass.io."""
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
import shutil
|
|
||||||
import signal
|
|
||||||
|
|
||||||
from colorlog import ColoredFormatter
|
|
||||||
|
|
||||||
from .addons import AddonManager
|
|
||||||
from .api import RestAPI
|
|
||||||
from .arch import CpuArch
|
|
||||||
from .auth import Auth
|
|
||||||
from .const import SOCKET_DOCKER
|
|
||||||
from .core import HassIO
|
|
||||||
from .coresys import CoreSys
|
|
||||||
from .dbus import DBusManager
|
|
||||||
from .discovery import Discovery
|
|
||||||
from .hassos import HassOS
|
|
||||||
from .homeassistant import HomeAssistant
|
|
||||||
from .host import HostManager
|
|
||||||
from .ingress import Ingress
|
|
||||||
from .services import ServiceManager
|
|
||||||
from .snapshots import SnapshotManager
|
|
||||||
from .supervisor import Supervisor
|
|
||||||
from .store import StoreManager
|
|
||||||
from .tasks import Tasks
|
|
||||||
from .updater import Updater
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
ENV_SHARE = "SUPERVISOR_SHARE"
|
|
||||||
ENV_NAME = "SUPERVISOR_NAME"
|
|
||||||
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
|
|
||||||
|
|
||||||
MACHINE_ID = Path("/etc/machine-id")
|
|
||||||
|
|
||||||
|
|
||||||
async def initialize_coresys():
|
|
||||||
"""Initialize HassIO coresys/objects."""
|
|
||||||
coresys = CoreSys()
|
|
||||||
|
|
||||||
# Initialize core objects
|
|
||||||
coresys.core = HassIO(coresys)
|
|
||||||
coresys.arch = CpuArch(coresys)
|
|
||||||
coresys.auth = Auth(coresys)
|
|
||||||
coresys.updater = Updater(coresys)
|
|
||||||
coresys.api = RestAPI(coresys)
|
|
||||||
coresys.supervisor = Supervisor(coresys)
|
|
||||||
coresys.homeassistant = HomeAssistant(coresys)
|
|
||||||
coresys.addons = AddonManager(coresys)
|
|
||||||
coresys.snapshots = SnapshotManager(coresys)
|
|
||||||
coresys.host = HostManager(coresys)
|
|
||||||
coresys.ingress = Ingress(coresys)
|
|
||||||
coresys.tasks = Tasks(coresys)
|
|
||||||
coresys.services = ServiceManager(coresys)
|
|
||||||
coresys.store = StoreManager(coresys)
|
|
||||||
coresys.discovery = Discovery(coresys)
|
|
||||||
coresys.dbus = DBusManager(coresys)
|
|
||||||
coresys.hassos = HassOS(coresys)
|
|
||||||
|
|
||||||
# bootstrap config
|
|
||||||
initialize_system_data(coresys)
|
|
||||||
|
|
||||||
# Set Machine/Host ID
|
|
||||||
if MACHINE_ID.exists():
|
|
||||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
|
||||||
|
|
||||||
return coresys
|
|
||||||
|
|
||||||
|
|
||||||
def initialize_system_data(coresys: CoreSys):
|
|
||||||
"""Set up the default configuration and create folders."""
|
|
||||||
config = coresys.config
|
|
||||||
|
|
||||||
# Home Assistant configuration folder
|
|
||||||
if not config.path_homeassistant.is_dir():
|
|
||||||
_LOGGER.info(
|
|
||||||
"Create Home Assistant configuration folder %s", config.path_homeassistant
|
|
||||||
)
|
|
||||||
config.path_homeassistant.mkdir()
|
|
||||||
|
|
||||||
# hassio ssl folder
|
|
||||||
if not config.path_ssl.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io SSL/TLS folder %s", config.path_ssl)
|
|
||||||
config.path_ssl.mkdir()
|
|
||||||
|
|
||||||
# hassio addon data folder
|
|
||||||
if not config.path_addons_data.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io Add-on data folder %s", config.path_addons_data)
|
|
||||||
config.path_addons_data.mkdir(parents=True)
|
|
||||||
|
|
||||||
if not config.path_addons_local.is_dir():
|
|
||||||
_LOGGER.info(
|
|
||||||
"Create Hass.io Add-on local repository folder %s", config.path_addons_local
|
|
||||||
)
|
|
||||||
config.path_addons_local.mkdir(parents=True)
|
|
||||||
|
|
||||||
if not config.path_addons_git.is_dir():
|
|
||||||
_LOGGER.info(
|
|
||||||
"Create Hass.io Add-on git repositories folder %s", config.path_addons_git
|
|
||||||
)
|
|
||||||
config.path_addons_git.mkdir(parents=True)
|
|
||||||
|
|
||||||
# hassio tmp folder
|
|
||||||
if not config.path_tmp.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io temp folder %s", config.path_tmp)
|
|
||||||
config.path_tmp.mkdir(parents=True)
|
|
||||||
|
|
||||||
# hassio backup folder
|
|
||||||
if not config.path_backup.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io backup folder %s", config.path_backup)
|
|
||||||
config.path_backup.mkdir()
|
|
||||||
|
|
||||||
# share folder
|
|
||||||
if not config.path_share.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io share folder %s", config.path_share)
|
|
||||||
config.path_share.mkdir()
|
|
||||||
|
|
||||||
# apparmor folder
|
|
||||||
if not config.path_apparmor.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
|
||||||
config.path_apparmor.mkdir()
|
|
||||||
|
|
||||||
# Update log level
|
|
||||||
coresys.config.modify_log_level()
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_system_env(coresys: CoreSys):
|
|
||||||
"""Cleanup some stuff after update."""
|
|
||||||
config = coresys.config
|
|
||||||
|
|
||||||
# hass.io 0.37 -> 0.38
|
|
||||||
old_build = Path(config.path_hassio, "addons/build")
|
|
||||||
if old_build.is_dir():
|
|
||||||
try:
|
|
||||||
old_build.rmdir()
|
|
||||||
except OSError:
|
|
||||||
_LOGGER.warning("Can't cleanup old Add-on build directory")
|
|
||||||
|
|
||||||
|
|
||||||
def initialize_logging():
|
|
||||||
"""Setup the logging."""
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
|
||||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
|
||||||
datefmt = "%y-%m-%d %H:%M:%S"
|
|
||||||
|
|
||||||
# suppress overly verbose logs from libraries that aren't helpful
|
|
||||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
logging.getLogger().handlers[0].setFormatter(
|
|
||||||
ColoredFormatter(
|
|
||||||
colorfmt,
|
|
||||||
datefmt=datefmt,
|
|
||||||
reset=True,
|
|
||||||
log_colors={
|
|
||||||
"DEBUG": "cyan",
|
|
||||||
"INFO": "green",
|
|
||||||
"WARNING": "yellow",
|
|
||||||
"ERROR": "red",
|
|
||||||
"CRITICAL": "red",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_environment():
|
|
||||||
"""Check if all environment are exists."""
|
|
||||||
# check environment variables
|
|
||||||
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
|
|
||||||
try:
|
|
||||||
os.environ[key]
|
|
||||||
except KeyError:
|
|
||||||
_LOGGER.fatal("Can't find %s in env!", key)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check docker socket
|
|
||||||
if not SOCKET_DOCKER.is_socket():
|
|
||||||
_LOGGER.fatal("Can't find Docker socket!")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check socat exec
|
|
||||||
if not shutil.which("socat"):
|
|
||||||
_LOGGER.fatal("Can't find socat!")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check socat exec
|
|
||||||
if not shutil.which("gdbus"):
|
|
||||||
_LOGGER.fatal("Can't find gdbus!")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def reg_signal(loop):
|
|
||||||
"""Register SIGTERM and SIGKILL to stop system."""
|
|
||||||
try:
|
|
||||||
loop.add_signal_handler(signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
|
||||||
except (ValueError, RuntimeError):
|
|
||||||
_LOGGER.warning("Could not bind to SIGTERM")
|
|
||||||
|
|
||||||
try:
|
|
||||||
loop.add_signal_handler(signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
|
||||||
except (ValueError, RuntimeError):
|
|
||||||
_LOGGER.warning("Could not bind to SIGHUP")
|
|
||||||
|
|
||||||
try:
|
|
||||||
loop.add_signal_handler(signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
|
||||||
except (ValueError, RuntimeError):
|
|
||||||
_LOGGER.warning("Could not bind to SIGINT")
|
|
||||||
|
|
||||||
|
|
||||||
def supervisor_debugger(coresys: CoreSys) -> None:
|
|
||||||
"""Setup debugger if needed."""
|
|
||||||
if not coresys.config.debug or not coresys.dev:
|
|
||||||
return
|
|
||||||
import ptvsd
|
|
||||||
|
|
||||||
_LOGGER.info("Initialize Hass.io debugger")
|
|
||||||
|
|
||||||
ptvsd.enable_attach(address=('0.0.0.0', 33333), redirect_output=True)
|
|
||||||
if coresys.config.debug_block:
|
|
||||||
ptvsd.wait_for_attach()
|
|
250
hassio/config.py
250
hassio/config.py
@@ -1,250 +0,0 @@
|
|||||||
"""Bootstrap Hass.io."""
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from pathlib import Path, PurePath
|
|
||||||
import re
|
|
||||||
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
ATTR_ADDONS_CUSTOM_LIST,
|
|
||||||
ATTR_DEBUG,
|
|
||||||
ATTR_DEBUG_BLOCK,
|
|
||||||
ATTR_LAST_BOOT,
|
|
||||||
ATTR_LOGGING,
|
|
||||||
ATTR_TIMEZONE,
|
|
||||||
ATTR_WAIT_BOOT,
|
|
||||||
FILE_HASSIO_CONFIG,
|
|
||||||
HASSIO_DATA,
|
|
||||||
)
|
|
||||||
from .utils.dt import parse_datetime
|
|
||||||
from .utils.json import JsonConfig
|
|
||||||
from .validate import SCHEMA_HASSIO_CONFIG
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
|
||||||
|
|
||||||
HASSIO_SSL = PurePath("ssl")
|
|
||||||
|
|
||||||
ADDONS_CORE = PurePath("addons/core")
|
|
||||||
ADDONS_LOCAL = PurePath("addons/local")
|
|
||||||
ADDONS_GIT = PurePath("addons/git")
|
|
||||||
ADDONS_DATA = PurePath("addons/data")
|
|
||||||
|
|
||||||
BACKUP_DATA = PurePath("backup")
|
|
||||||
SHARE_DATA = PurePath("share")
|
|
||||||
TMP_DATA = PurePath("tmp")
|
|
||||||
APPARMOR_DATA = PurePath("apparmor")
|
|
||||||
|
|
||||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
|
||||||
|
|
||||||
RE_TIMEZONE = re.compile(r"time_zone: (?P<timezone>[\w/\-+]+)")
|
|
||||||
|
|
||||||
|
|
||||||
class CoreConfig(JsonConfig):
|
|
||||||
"""Hold all core config data."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize config object."""
|
|
||||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def timezone(self):
|
|
||||||
"""Return system timezone."""
|
|
||||||
config_file = Path(self.path_homeassistant, "configuration.yaml")
|
|
||||||
try:
|
|
||||||
assert config_file.exists()
|
|
||||||
configuration = config_file.read_text()
|
|
||||||
|
|
||||||
data = RE_TIMEZONE.search(configuration)
|
|
||||||
assert data
|
|
||||||
|
|
||||||
timezone = data.group("timezone")
|
|
||||||
pytz.timezone(timezone)
|
|
||||||
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
|
|
||||||
_LOGGER.debug("Can't parse Home Assistant timezone")
|
|
||||||
return self._data[ATTR_TIMEZONE]
|
|
||||||
|
|
||||||
return timezone
|
|
||||||
|
|
||||||
@timezone.setter
|
|
||||||
def timezone(self, value):
|
|
||||||
"""Set system timezone."""
|
|
||||||
self._data[ATTR_TIMEZONE] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def wait_boot(self) -> int:
|
|
||||||
"""Return wait time for auto boot stages."""
|
|
||||||
return self._data[ATTR_WAIT_BOOT]
|
|
||||||
|
|
||||||
@wait_boot.setter
|
|
||||||
def wait_boot(self, value: int):
|
|
||||||
"""Set wait boot time."""
|
|
||||||
self._data[ATTR_WAIT_BOOT] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def debug(self) -> bool:
|
|
||||||
"""Return True if ptvsd is enabled."""
|
|
||||||
return self._data[ATTR_DEBUG]
|
|
||||||
|
|
||||||
@debug.setter
|
|
||||||
def debug(self, value: bool):
|
|
||||||
"""Set debug mode."""
|
|
||||||
self._data[ATTR_DEBUG] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def debug_block(self) -> bool:
|
|
||||||
"""Return True if ptvsd should waiting."""
|
|
||||||
return self._data[ATTR_DEBUG_BLOCK]
|
|
||||||
|
|
||||||
@debug_block.setter
|
|
||||||
def debug_block(self, value: bool):
|
|
||||||
"""Set debug wait mode."""
|
|
||||||
self._data[ATTR_DEBUG_BLOCK] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def logging(self) -> str:
|
|
||||||
"""Return log level of system."""
|
|
||||||
return self._data[ATTR_LOGGING]
|
|
||||||
|
|
||||||
@logging.setter
|
|
||||||
def logging(self, value: str):
|
|
||||||
"""Set system log level."""
|
|
||||||
self._data[ATTR_LOGGING] = value
|
|
||||||
self.modify_log_level()
|
|
||||||
|
|
||||||
def modify_log_level(self) -> None:
|
|
||||||
"""Change log level."""
|
|
||||||
lvl = getattr(logging, self.logging.upper())
|
|
||||||
logging.basicConfig(level=lvl)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_boot(self):
|
|
||||||
"""Return last boot datetime."""
|
|
||||||
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
|
||||||
|
|
||||||
boot_time = parse_datetime(boot_str)
|
|
||||||
if not boot_time:
|
|
||||||
return datetime.utcfromtimestamp(1)
|
|
||||||
return boot_time
|
|
||||||
|
|
||||||
@last_boot.setter
|
|
||||||
def last_boot(self, value):
|
|
||||||
"""Set last boot datetime."""
|
|
||||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_hassio(self):
|
|
||||||
"""Return Hass.io data path."""
|
|
||||||
return HASSIO_DATA
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_hassio(self):
|
|
||||||
"""Return Hass.io data path external for Docker."""
|
|
||||||
return PurePath(os.environ["SUPERVISOR_SHARE"])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_homeassistant(self):
|
|
||||||
"""Return config path external for Docker."""
|
|
||||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_homeassistant(self):
|
|
||||||
"""Return config path inside supervisor."""
|
|
||||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_ssl(self):
|
|
||||||
"""Return SSL path external for Docker."""
|
|
||||||
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_ssl(self):
|
|
||||||
"""Return SSL path inside supervisor."""
|
|
||||||
return Path(HASSIO_DATA, HASSIO_SSL)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_addons_core(self):
|
|
||||||
"""Return git path for core Add-ons."""
|
|
||||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_addons_git(self):
|
|
||||||
"""Return path for Git Add-on."""
|
|
||||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_addons_local(self):
|
|
||||||
"""Return path for custom Add-ons."""
|
|
||||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_addons_local(self):
|
|
||||||
"""Return path for custom Add-ons."""
|
|
||||||
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_addons_data(self):
|
|
||||||
"""Return root Add-on data folder."""
|
|
||||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_addons_data(self):
|
|
||||||
"""Return root add-on data folder external for Docker."""
|
|
||||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_tmp(self):
|
|
||||||
"""Return Hass.io temp folder."""
|
|
||||||
return Path(HASSIO_DATA, TMP_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_tmp(self):
|
|
||||||
"""Return Hass.io temp folder for Docker."""
|
|
||||||
return PurePath(self.path_extern_hassio, TMP_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_backup(self):
|
|
||||||
"""Return root backup data folder."""
|
|
||||||
return Path(HASSIO_DATA, BACKUP_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_backup(self):
|
|
||||||
"""Return root backup data folder external for Docker."""
|
|
||||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_share(self):
|
|
||||||
"""Return root share data folder."""
|
|
||||||
return Path(HASSIO_DATA, SHARE_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_apparmor(self):
|
|
||||||
"""Return root Apparmor profile folder."""
|
|
||||||
return Path(HASSIO_DATA, APPARMOR_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_share(self):
|
|
||||||
"""Return root share data folder external for Docker."""
|
|
||||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def addons_repositories(self):
|
|
||||||
"""Return list of custom Add-on repositories."""
|
|
||||||
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
|
||||||
|
|
||||||
def add_addon_repository(self, repo):
|
|
||||||
"""Add a custom repository to list."""
|
|
||||||
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
|
||||||
|
|
||||||
def drop_addon_repository(self, repo):
|
|
||||||
"""Remove a custom repository from list."""
|
|
||||||
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
|
313
hassio/const.py
313
hassio/const.py
@@ -1,313 +0,0 @@
|
|||||||
"""Constants file for Hass.io."""
|
|
||||||
from pathlib import Path
|
|
||||||
from ipaddress import ip_network
|
|
||||||
|
|
||||||
|
|
||||||
HASSIO_VERSION = "163"
|
|
||||||
|
|
||||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
|
||||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
|
||||||
URL_HASSIO_APPARMOR = "https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
|
||||||
|
|
||||||
URL_HASSOS_OTA = (
|
|
||||||
"https://github.com/home-assistant/hassos/releases/download/"
|
|
||||||
"{version}/hassos_{board}-{version}.raucb"
|
|
||||||
)
|
|
||||||
|
|
||||||
HASSIO_DATA = Path("/data")
|
|
||||||
|
|
||||||
FILE_HASSIO_AUTH = Path(HASSIO_DATA, "auth.json")
|
|
||||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
|
||||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
|
||||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
|
||||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
|
||||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
|
||||||
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
|
||||||
FILE_HASSIO_INGRESS = Path(HASSIO_DATA, "ingress.json")
|
|
||||||
|
|
||||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
|
||||||
|
|
||||||
DOCKER_NETWORK = "hassio"
|
|
||||||
DOCKER_NETWORK_MASK = ip_network("172.30.32.0/23")
|
|
||||||
DOCKER_NETWORK_RANGE = ip_network("172.30.33.0/24")
|
|
||||||
|
|
||||||
LABEL_VERSION = "io.hass.version"
|
|
||||||
LABEL_ARCH = "io.hass.arch"
|
|
||||||
LABEL_TYPE = "io.hass.type"
|
|
||||||
LABEL_MACHINE = "io.hass.machine"
|
|
||||||
|
|
||||||
META_ADDON = "addon"
|
|
||||||
META_SUPERVISOR = "supervisor"
|
|
||||||
META_HOMEASSISTANT = "homeassistant"
|
|
||||||
|
|
||||||
JSON_RESULT = "result"
|
|
||||||
JSON_DATA = "data"
|
|
||||||
JSON_MESSAGE = "message"
|
|
||||||
|
|
||||||
RESULT_ERROR = "error"
|
|
||||||
RESULT_OK = "ok"
|
|
||||||
|
|
||||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
|
||||||
CONTENT_TYPE_PNG = "image/png"
|
|
||||||
CONTENT_TYPE_JSON = "application/json"
|
|
||||||
CONTENT_TYPE_TEXT = "text/plain"
|
|
||||||
CONTENT_TYPE_TAR = "application/tar"
|
|
||||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
|
||||||
HEADER_HA_ACCESS = "X-Ha-Access"
|
|
||||||
HEADER_TOKEN = "X-Hassio-Key"
|
|
||||||
COOKIE_INGRESS = "ingress_session"
|
|
||||||
|
|
||||||
ENV_TOKEN = "HASSIO_TOKEN"
|
|
||||||
ENV_TIME = "TZ"
|
|
||||||
|
|
||||||
REQUEST_FROM = "HASSIO_FROM"
|
|
||||||
|
|
||||||
ATTR_MACHINE = "machine"
|
|
||||||
ATTR_WAIT_BOOT = "wait_boot"
|
|
||||||
ATTR_DEPLOYMENT = "deployment"
|
|
||||||
ATTR_WATCHDOG = "watchdog"
|
|
||||||
ATTR_CHANGELOG = "changelog"
|
|
||||||
ATTR_LOGGING = "logging"
|
|
||||||
ATTR_DATE = "date"
|
|
||||||
ATTR_ARCH = "arch"
|
|
||||||
ATTR_LONG_DESCRIPTION = "long_description"
|
|
||||||
ATTR_HOSTNAME = "hostname"
|
|
||||||
ATTR_TIMEZONE = "timezone"
|
|
||||||
ATTR_ARGS = "args"
|
|
||||||
ATTR_OPERATING_SYSTEM = "operating_system"
|
|
||||||
ATTR_CHASSIS = "chassis"
|
|
||||||
ATTR_TYPE = "type"
|
|
||||||
ATTR_SOURCE = "source"
|
|
||||||
ATTR_FEATURES = "features"
|
|
||||||
ATTR_ADDONS = "addons"
|
|
||||||
ATTR_PROVIDERS = "providers"
|
|
||||||
ATTR_VERSION = "version"
|
|
||||||
ATTR_VERSION_LATEST = "version_latest"
|
|
||||||
ATTR_AUTO_UART = "auto_uart"
|
|
||||||
ATTR_LAST_BOOT = "last_boot"
|
|
||||||
ATTR_LAST_VERSION = "last_version"
|
|
||||||
ATTR_CHANNEL = "channel"
|
|
||||||
ATTR_NAME = "name"
|
|
||||||
ATTR_SLUG = "slug"
|
|
||||||
ATTR_DESCRIPTON = "description"
|
|
||||||
ATTR_STARTUP = "startup"
|
|
||||||
ATTR_BOOT = "boot"
|
|
||||||
ATTR_PORTS = "ports"
|
|
||||||
ATTR_PORTS_DESCRIPTION = "ports_description"
|
|
||||||
ATTR_PORT = "port"
|
|
||||||
ATTR_SSL = "ssl"
|
|
||||||
ATTR_MAP = "map"
|
|
||||||
ATTR_WEBUI = "webui"
|
|
||||||
ATTR_OPTIONS = "options"
|
|
||||||
ATTR_INSTALLED = "installed"
|
|
||||||
ATTR_DETACHED = "detached"
|
|
||||||
ATTR_STATE = "state"
|
|
||||||
ATTR_SCHEMA = "schema"
|
|
||||||
ATTR_IMAGE = "image"
|
|
||||||
ATTR_ICON = "icon"
|
|
||||||
ATTR_LOGO = "logo"
|
|
||||||
ATTR_STDIN = "stdin"
|
|
||||||
ATTR_ADDONS_REPOSITORIES = "addons_repositories"
|
|
||||||
ATTR_REPOSITORY = "repository"
|
|
||||||
ATTR_REPOSITORIES = "repositories"
|
|
||||||
ATTR_URL = "url"
|
|
||||||
ATTR_MAINTAINER = "maintainer"
|
|
||||||
ATTR_PASSWORD = "password"
|
|
||||||
ATTR_TOTP = "totp"
|
|
||||||
ATTR_INITIALIZE = "initialize"
|
|
||||||
ATTR_LOCATON = "location"
|
|
||||||
ATTR_BUILD = "build"
|
|
||||||
ATTR_DEVICES = "devices"
|
|
||||||
ATTR_ENVIRONMENT = "environment"
|
|
||||||
ATTR_HOST_NETWORK = "host_network"
|
|
||||||
ATTR_HOST_PID = "host_pid"
|
|
||||||
ATTR_HOST_IPC = "host_ipc"
|
|
||||||
ATTR_HOST_DBUS = "host_dbus"
|
|
||||||
ATTR_NETWORK = "network"
|
|
||||||
ATTR_NETWORK_DESCRIPTION = "network_description"
|
|
||||||
ATTR_TMPFS = "tmpfs"
|
|
||||||
ATTR_PRIVILEGED = "privileged"
|
|
||||||
ATTR_USER = "user"
|
|
||||||
ATTR_SYSTEM = "system"
|
|
||||||
ATTR_SNAPSHOTS = "snapshots"
|
|
||||||
ATTR_HOMEASSISTANT = "homeassistant"
|
|
||||||
ATTR_HASSIO = "hassio"
|
|
||||||
ATTR_HASSIO_API = "hassio_api"
|
|
||||||
ATTR_HOMEASSISTANT_API = "homeassistant_api"
|
|
||||||
ATTR_UUID = "uuid"
|
|
||||||
ATTR_FOLDERS = "folders"
|
|
||||||
ATTR_SIZE = "size"
|
|
||||||
ATTR_TYPE = "type"
|
|
||||||
ATTR_TIMEOUT = "timeout"
|
|
||||||
ATTR_AUTO_UPDATE = "auto_update"
|
|
||||||
ATTR_CUSTOM = "custom"
|
|
||||||
ATTR_AUDIO = "audio"
|
|
||||||
ATTR_AUDIO_INPUT = "audio_input"
|
|
||||||
ATTR_AUDIO_OUTPUT = "audio_output"
|
|
||||||
ATTR_INPUT = "input"
|
|
||||||
ATTR_OUTPUT = "output"
|
|
||||||
ATTR_DISK = "disk"
|
|
||||||
ATTR_SERIAL = "serial"
|
|
||||||
ATTR_SECURITY = "security"
|
|
||||||
ATTR_BUILD_FROM = "build_from"
|
|
||||||
ATTR_SQUASH = "squash"
|
|
||||||
ATTR_GPIO = "gpio"
|
|
||||||
ATTR_LEGACY = "legacy"
|
|
||||||
ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list"
|
|
||||||
ATTR_CPU_PERCENT = "cpu_percent"
|
|
||||||
ATTR_NETWORK_RX = "network_rx"
|
|
||||||
ATTR_NETWORK_TX = "network_tx"
|
|
||||||
ATTR_MEMORY_LIMIT = "memory_limit"
|
|
||||||
ATTR_MEMORY_USAGE = "memory_usage"
|
|
||||||
ATTR_BLK_READ = "blk_read"
|
|
||||||
ATTR_BLK_WRITE = "blk_write"
|
|
||||||
ATTR_ADDON = "addon"
|
|
||||||
ATTR_AVAILABLE = "available"
|
|
||||||
ATTR_HOST = "host"
|
|
||||||
ATTR_USERNAME = "username"
|
|
||||||
ATTR_DISCOVERY = "discovery"
|
|
||||||
ATTR_CONFIG = "config"
|
|
||||||
ATTR_SERVICES = "services"
|
|
||||||
ATTR_SERVICE = "service"
|
|
||||||
ATTR_DISCOVERY = "discovery"
|
|
||||||
ATTR_PROTECTED = "protected"
|
|
||||||
ATTR_CRYPTO = "crypto"
|
|
||||||
ATTR_BRANCH = "branch"
|
|
||||||
ATTR_KERNEL = "kernel"
|
|
||||||
ATTR_APPARMOR = "apparmor"
|
|
||||||
ATTR_DEVICETREE = "devicetree"
|
|
||||||
ATTR_CPE = "cpe"
|
|
||||||
ATTR_BOARD = "board"
|
|
||||||
ATTR_HASSOS = "hassos"
|
|
||||||
ATTR_HASSOS_CLI = "hassos_cli"
|
|
||||||
ATTR_VERSION_CLI = "version_cli"
|
|
||||||
ATTR_VERSION_CLI_LATEST = "version_cli_latest"
|
|
||||||
ATTR_REFRESH_TOKEN = "refresh_token"
|
|
||||||
ATTR_ACCESS_TOKEN = "access_token"
|
|
||||||
ATTR_DOCKER_API = "docker_api"
|
|
||||||
ATTR_FULL_ACCESS = "full_access"
|
|
||||||
ATTR_PROTECTED = "protected"
|
|
||||||
ATTR_RATING = "rating"
|
|
||||||
ATTR_HASSIO_ROLE = "hassio_role"
|
|
||||||
ATTR_SUPERVISOR = "supervisor"
|
|
||||||
ATTR_AUTH_API = "auth_api"
|
|
||||||
ATTR_KERNEL_MODULES = "kernel_modules"
|
|
||||||
ATTR_SUPPORTED_ARCH = "supported_arch"
|
|
||||||
ATTR_INGRESS = "ingress"
|
|
||||||
ATTR_INGRESS_PORT = "ingress_port"
|
|
||||||
ATTR_INGRESS_ENTRY = "ingress_entry"
|
|
||||||
ATTR_INGRESS_TOKEN = "ingress_token"
|
|
||||||
ATTR_INGRESS_URL = "ingress_url"
|
|
||||||
ATTR_INGRESS_PANEL = "ingress_panel"
|
|
||||||
ATTR_PANEL_ICON = "panel_icon"
|
|
||||||
ATTR_PANEL_TITLE = "panel_title"
|
|
||||||
ATTR_PANEL_ADMIN = "panel_admin"
|
|
||||||
ATTR_TITLE = "title"
|
|
||||||
ATTR_ENABLE = "enable"
|
|
||||||
ATTR_IP_ADDRESS = "ip_address"
|
|
||||||
ATTR_SESSION = "session"
|
|
||||||
ATTR_ADMIN = "admin"
|
|
||||||
ATTR_PANELS = "panels"
|
|
||||||
ATTR_DEBUG = "debug"
|
|
||||||
ATTR_DEBUG_BLOCK = "debug_block"
|
|
||||||
|
|
||||||
PROVIDE_SERVICE = "provide"
|
|
||||||
NEED_SERVICE = "need"
|
|
||||||
WANT_SERVICE = "want"
|
|
||||||
|
|
||||||
STARTUP_INITIALIZE = "initialize"
|
|
||||||
STARTUP_SYSTEM = "system"
|
|
||||||
STARTUP_SERVICES = "services"
|
|
||||||
STARTUP_APPLICATION = "application"
|
|
||||||
STARTUP_ONCE = "once"
|
|
||||||
|
|
||||||
STARTUP_ALL = [
|
|
||||||
STARTUP_ONCE,
|
|
||||||
STARTUP_INITIALIZE,
|
|
||||||
STARTUP_SYSTEM,
|
|
||||||
STARTUP_SERVICES,
|
|
||||||
STARTUP_APPLICATION,
|
|
||||||
]
|
|
||||||
|
|
||||||
BOOT_AUTO = "auto"
|
|
||||||
BOOT_MANUAL = "manual"
|
|
||||||
|
|
||||||
STATE_STARTED = "started"
|
|
||||||
STATE_STOPPED = "stopped"
|
|
||||||
STATE_NONE = "none"
|
|
||||||
|
|
||||||
MAP_CONFIG = "config"
|
|
||||||
MAP_SSL = "ssl"
|
|
||||||
MAP_ADDONS = "addons"
|
|
||||||
MAP_BACKUP = "backup"
|
|
||||||
MAP_SHARE = "share"
|
|
||||||
|
|
||||||
ARCH_ARMHF = "armhf"
|
|
||||||
ARCH_ARMV7 = "armv7"
|
|
||||||
ARCH_AARCH64 = "aarch64"
|
|
||||||
ARCH_AMD64 = "amd64"
|
|
||||||
ARCH_I386 = "i386"
|
|
||||||
|
|
||||||
ARCH_ALL = [ARCH_ARMHF, ARCH_ARMV7, ARCH_AARCH64, ARCH_AMD64, ARCH_I386]
|
|
||||||
|
|
||||||
CHANNEL_STABLE = "stable"
|
|
||||||
CHANNEL_BETA = "beta"
|
|
||||||
CHANNEL_DEV = "dev"
|
|
||||||
|
|
||||||
REPOSITORY_CORE = "core"
|
|
||||||
REPOSITORY_LOCAL = "local"
|
|
||||||
|
|
||||||
FOLDER_HOMEASSISTANT = "homeassistant"
|
|
||||||
FOLDER_SHARE = "share"
|
|
||||||
FOLDER_ADDONS = "addons/local"
|
|
||||||
FOLDER_SSL = "ssl"
|
|
||||||
|
|
||||||
SNAPSHOT_FULL = "full"
|
|
||||||
SNAPSHOT_PARTIAL = "partial"
|
|
||||||
|
|
||||||
CRYPTO_AES128 = "aes128"
|
|
||||||
|
|
||||||
SECURITY_PROFILE = "profile"
|
|
||||||
SECURITY_DEFAULT = "default"
|
|
||||||
SECURITY_DISABLE = "disable"
|
|
||||||
|
|
||||||
PRIVILEGED_NET_ADMIN = "NET_ADMIN"
|
|
||||||
PRIVILEGED_SYS_ADMIN = "SYS_ADMIN"
|
|
||||||
PRIVILEGED_SYS_RAWIO = "SYS_RAWIO"
|
|
||||||
PRIVILEGED_IPC_LOCK = "IPC_LOCK"
|
|
||||||
PRIVILEGED_SYS_TIME = "SYS_TIME"
|
|
||||||
PRIVILEGED_SYS_NICE = "SYS_NICE"
|
|
||||||
PRIVILEGED_SYS_MODULE = "SYS_MODULE"
|
|
||||||
PRIVILEGED_SYS_RESOURCE = "SYS_RESOURCE"
|
|
||||||
PRIVILEGED_SYS_PTRACE = "SYS_PTRACE"
|
|
||||||
PRIVILEGED_DAC_READ_SEARCH = "DAC_READ_SEARCH"
|
|
||||||
|
|
||||||
PRIVILEGED_ALL = [
|
|
||||||
PRIVILEGED_NET_ADMIN,
|
|
||||||
PRIVILEGED_SYS_ADMIN,
|
|
||||||
PRIVILEGED_SYS_RAWIO,
|
|
||||||
PRIVILEGED_IPC_LOCK,
|
|
||||||
PRIVILEGED_SYS_TIME,
|
|
||||||
PRIVILEGED_SYS_NICE,
|
|
||||||
PRIVILEGED_SYS_RESOURCE,
|
|
||||||
PRIVILEGED_SYS_PTRACE,
|
|
||||||
PRIVILEGED_SYS_MODULE,
|
|
||||||
PRIVILEGED_DAC_READ_SEARCH,
|
|
||||||
]
|
|
||||||
|
|
||||||
FEATURES_SHUTDOWN = "shutdown"
|
|
||||||
FEATURES_REBOOT = "reboot"
|
|
||||||
FEATURES_HASSOS = "hassos"
|
|
||||||
FEATURES_HOSTNAME = "hostname"
|
|
||||||
FEATURES_SERVICES = "services"
|
|
||||||
|
|
||||||
ROLE_DEFAULT = "default"
|
|
||||||
ROLE_HOMEASSISTANT = "homeassistant"
|
|
||||||
ROLE_BACKUP = "backup"
|
|
||||||
ROLE_MANAGER = "manager"
|
|
||||||
ROLE_ADMIN = "admin"
|
|
||||||
|
|
||||||
ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN]
|
|
||||||
|
|
||||||
CHAN_ID = "chan_id"
|
|
||||||
CHAN_TYPE = "chan_type"
|
|
158
hassio/core.py
158
hassio/core.py
@@ -1,158 +0,0 @@
|
|||||||
"""Main file for Hass.io."""
|
|
||||||
from contextlib import suppress
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from .coresys import CoreSysAttributes
|
|
||||||
from .const import (
|
|
||||||
STARTUP_SYSTEM,
|
|
||||||
STARTUP_SERVICES,
|
|
||||||
STARTUP_APPLICATION,
|
|
||||||
STARTUP_INITIALIZE,
|
|
||||||
)
|
|
||||||
from .exceptions import HassioError, HomeAssistantError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class HassIO(CoreSysAttributes):
|
|
||||||
"""Main object of Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize Hass.io object."""
|
|
||||||
self.coresys = coresys
|
|
||||||
|
|
||||||
async def setup(self):
|
|
||||||
"""Setup HassIO orchestration."""
|
|
||||||
# Load Supervisor
|
|
||||||
await self.sys_supervisor.load()
|
|
||||||
|
|
||||||
# Load DBus
|
|
||||||
await self.sys_dbus.load()
|
|
||||||
|
|
||||||
# Load Host
|
|
||||||
await self.sys_host.load()
|
|
||||||
|
|
||||||
# Load Home Assistant
|
|
||||||
await self.sys_homeassistant.load()
|
|
||||||
|
|
||||||
# Load CPU/Arch
|
|
||||||
await self.sys_arch.load()
|
|
||||||
|
|
||||||
# Load HassOS
|
|
||||||
await self.sys_hassos.load()
|
|
||||||
|
|
||||||
# Load Stores
|
|
||||||
await self.sys_store.load()
|
|
||||||
|
|
||||||
# Load Add-ons
|
|
||||||
await self.sys_addons.load()
|
|
||||||
|
|
||||||
# rest api views
|
|
||||||
await self.sys_api.load()
|
|
||||||
|
|
||||||
# load last available data
|
|
||||||
await self.sys_updater.load()
|
|
||||||
|
|
||||||
# load last available data
|
|
||||||
await self.sys_snapshots.load()
|
|
||||||
|
|
||||||
# load services
|
|
||||||
await self.sys_services.load()
|
|
||||||
|
|
||||||
# Load discovery
|
|
||||||
await self.sys_discovery.load()
|
|
||||||
|
|
||||||
# Load ingress
|
|
||||||
await self.sys_ingress.load()
|
|
||||||
|
|
||||||
# start dns forwarding
|
|
||||||
self.sys_create_task(self.sys_dns.start())
|
|
||||||
|
|
||||||
async def start(self):
|
|
||||||
"""Start Hass.io orchestration."""
|
|
||||||
# on release channel, try update itself
|
|
||||||
if self.sys_supervisor.need_update:
|
|
||||||
if self.sys_dev:
|
|
||||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
|
||||||
elif await self.sys_supervisor.update():
|
|
||||||
return
|
|
||||||
|
|
||||||
# start api
|
|
||||||
await self.sys_api.start()
|
|
||||||
|
|
||||||
# start addon mark as initialize
|
|
||||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# HomeAssistant is already running / supervisor have only reboot
|
|
||||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
|
||||||
_LOGGER.info("Hass.io reboot detected")
|
|
||||||
return
|
|
||||||
|
|
||||||
# reset register services / discovery
|
|
||||||
self.sys_services.reset()
|
|
||||||
|
|
||||||
# start addon mark as system
|
|
||||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
|
||||||
|
|
||||||
# start addon mark as services
|
|
||||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
|
||||||
|
|
||||||
# run HomeAssistant
|
|
||||||
if self.sys_homeassistant.boot:
|
|
||||||
with suppress(HomeAssistantError):
|
|
||||||
await self.sys_homeassistant.start()
|
|
||||||
|
|
||||||
# start addon mark as application
|
|
||||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
|
||||||
|
|
||||||
# store new last boot
|
|
||||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
|
||||||
self.sys_config.save_data()
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# Add core tasks into scheduler
|
|
||||||
await self.sys_tasks.load()
|
|
||||||
|
|
||||||
# If landingpage / run upgrade in background
|
|
||||||
if self.sys_homeassistant.version == "landingpage":
|
|
||||||
self.sys_create_task(self.sys_homeassistant.install())
|
|
||||||
|
|
||||||
_LOGGER.info("Hass.io is up and running")
|
|
||||||
|
|
||||||
async def stop(self):
|
|
||||||
"""Stop a running orchestration."""
|
|
||||||
# don't process scheduler anymore
|
|
||||||
self.sys_scheduler.suspend = True
|
|
||||||
|
|
||||||
# process async stop tasks
|
|
||||||
try:
|
|
||||||
with async_timeout.timeout(10):
|
|
||||||
await asyncio.wait(
|
|
||||||
[
|
|
||||||
self.sys_api.stop(),
|
|
||||||
self.sys_dns.stop(),
|
|
||||||
self.sys_websession.close(),
|
|
||||||
self.sys_websession_ssl.close(),
|
|
||||||
self.sys_ingress.unload(),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
_LOGGER.warning("Force Shutdown!")
|
|
||||||
|
|
||||||
_LOGGER.info("Hass.io is down")
|
|
||||||
|
|
||||||
async def shutdown(self):
|
|
||||||
"""Shutdown all running containers in correct order."""
|
|
||||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
|
||||||
|
|
||||||
# Close Home Assistant
|
|
||||||
with suppress(HassioError):
|
|
||||||
await self.sys_homeassistant.stop()
|
|
||||||
|
|
||||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
|
||||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
|
||||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
|
@@ -1,39 +0,0 @@
|
|||||||
"""D-Bus interface objects."""
|
|
||||||
|
|
||||||
from .systemd import Systemd
|
|
||||||
from .hostname import Hostname
|
|
||||||
from .rauc import Rauc
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
|
|
||||||
class DBusManager(CoreSysAttributes):
|
|
||||||
"""A DBus Interface handler."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize D-Bus interface."""
|
|
||||||
self.coresys = coresys
|
|
||||||
|
|
||||||
self._systemd = Systemd()
|
|
||||||
self._hostname = Hostname()
|
|
||||||
self._rauc = Rauc()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def systemd(self):
|
|
||||||
"""Return the systemd interface."""
|
|
||||||
return self._systemd
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hostname(self):
|
|
||||||
"""Return the hostname interface."""
|
|
||||||
return self._hostname
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rauc(self):
|
|
||||||
"""Return the rauc interface."""
|
|
||||||
return self._rauc
|
|
||||||
|
|
||||||
async def load(self):
|
|
||||||
"""Connect interfaces to D-Bus."""
|
|
||||||
await self.systemd.connect()
|
|
||||||
await self.hostname.connect()
|
|
||||||
await self.rauc.connect()
|
|
@@ -1,39 +0,0 @@
|
|||||||
"""D-Bus interface for hostname."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .interface import DBusInterface
|
|
||||||
from .utils import dbus_connected
|
|
||||||
from ..exceptions import DBusError
|
|
||||||
from ..utils.gdbus import DBus
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DBUS_NAME = 'org.freedesktop.hostname1'
|
|
||||||
DBUS_OBJECT = '/org/freedesktop/hostname1'
|
|
||||||
|
|
||||||
|
|
||||||
class Hostname(DBusInterface):
|
|
||||||
"""Handle D-Bus interface for hostname/system."""
|
|
||||||
|
|
||||||
async def connect(self):
|
|
||||||
"""Connect to system's D-Bus."""
|
|
||||||
try:
|
|
||||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
|
||||||
except DBusError:
|
|
||||||
_LOGGER.warning("Can't connect to hostname")
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def set_static_hostname(self, hostname):
|
|
||||||
"""Change local hostname.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.SetStaticHostname(hostname, False)
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def get_properties(self):
|
|
||||||
"""Return local host informations.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.get_properties(DBUS_NAME)
|
|
@@ -1,18 +0,0 @@
|
|||||||
"""Interface class for D-Bus wrappers."""
|
|
||||||
|
|
||||||
|
|
||||||
class DBusInterface:
|
|
||||||
"""Handle D-Bus interface for hostname/system."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize systemd."""
|
|
||||||
self.dbus = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_connected(self):
|
|
||||||
"""Return True, if they is connected to D-Bus."""
|
|
||||||
return self.dbus is not None
|
|
||||||
|
|
||||||
async def connect(self):
|
|
||||||
"""Connect to D-Bus."""
|
|
||||||
raise NotImplementedError()
|
|
@@ -1,55 +0,0 @@
|
|||||||
"""D-Bus interface for rauc."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .interface import DBusInterface
|
|
||||||
from .utils import dbus_connected
|
|
||||||
from ..exceptions import DBusError
|
|
||||||
from ..utils.gdbus import DBus
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DBUS_NAME = 'de.pengutronix.rauc'
|
|
||||||
DBUS_OBJECT = '/'
|
|
||||||
|
|
||||||
|
|
||||||
class Rauc(DBusInterface):
|
|
||||||
"""Handle D-Bus interface for rauc."""
|
|
||||||
|
|
||||||
async def connect(self):
|
|
||||||
"""Connect to D-Bus."""
|
|
||||||
try:
|
|
||||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
|
||||||
except DBusError:
|
|
||||||
_LOGGER.warning("Can't connect to rauc")
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def install(self, raucb_file):
|
|
||||||
"""Install rauc bundle file.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.Installer.Install(raucb_file)
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def get_slot_status(self):
|
|
||||||
"""Get slot status.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.Installer.GetSlotStatus()
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def get_properties(self):
|
|
||||||
"""Return rauc informations.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def signal_completed(self):
|
|
||||||
"""Return a signal wrapper for completed signal.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
|
@@ -1,137 +0,0 @@
|
|||||||
"""Init file for Hass.io Docker object."""
|
|
||||||
import logging
|
|
||||||
from contextlib import suppress
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
import attr
|
|
||||||
import docker
|
|
||||||
|
|
||||||
from ..const import SOCKET_DOCKER
|
|
||||||
from ..exceptions import DockerAPIError
|
|
||||||
from .network import DockerNetwork
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(frozen=True)
|
|
||||||
class CommandReturn:
|
|
||||||
"""Return object from command run."""
|
|
||||||
|
|
||||||
exit_code: int = attr.ib()
|
|
||||||
output: bytes = attr.ib()
|
|
||||||
|
|
||||||
|
|
||||||
class DockerAPI:
|
|
||||||
"""Docker Hass.io wrapper.
|
|
||||||
|
|
||||||
This class is not AsyncIO safe!
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize Docker base wrapper."""
|
|
||||||
self.docker: docker.DockerClient = docker.DockerClient(
|
|
||||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version="auto", timeout=900
|
|
||||||
)
|
|
||||||
self.network: DockerNetwork = DockerNetwork(self.docker)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def images(self) -> docker.models.images.ImageCollection:
|
|
||||||
"""Return API images."""
|
|
||||||
return self.docker.images
|
|
||||||
|
|
||||||
@property
|
|
||||||
def containers(self) -> docker.models.containers.ContainerCollection:
|
|
||||||
"""Return API containers."""
|
|
||||||
return self.docker.containers
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api(self) -> docker.APIClient:
|
|
||||||
"""Return API containers."""
|
|
||||||
return self.docker.api
|
|
||||||
|
|
||||||
def run(
|
|
||||||
self, image: str, **kwargs: Dict[str, Any]
|
|
||||||
) -> docker.models.containers.Container:
|
|
||||||
""""Create a Docker container and run it.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
name = kwargs.get("name", image)
|
|
||||||
network_mode = kwargs.get("network_mode")
|
|
||||||
hostname = kwargs.get("hostname")
|
|
||||||
|
|
||||||
# Setup network
|
|
||||||
kwargs["dns_search"] = ["."]
|
|
||||||
if network_mode:
|
|
||||||
kwargs["dns"] = [str(self.network.supervisor)]
|
|
||||||
kwargs["dns_opt"] = ["ndots:0"]
|
|
||||||
else:
|
|
||||||
kwargs["network"] = None
|
|
||||||
|
|
||||||
# Create container
|
|
||||||
try:
|
|
||||||
container = self.docker.containers.create(
|
|
||||||
image, use_config_proxy=False, **kwargs
|
|
||||||
)
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
# Attach network
|
|
||||||
if not network_mode:
|
|
||||||
alias = [hostname] if hostname else None
|
|
||||||
try:
|
|
||||||
self.network.attach_container(container, alias=alias)
|
|
||||||
except DockerAPIError:
|
|
||||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
|
||||||
else:
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
self.network.detach_default_bridge(container)
|
|
||||||
|
|
||||||
# Run container
|
|
||||||
try:
|
|
||||||
container.start()
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't start %s: %s", name, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
# Update metadata
|
|
||||||
with suppress(docker.errors.DockerException):
|
|
||||||
container.reload()
|
|
||||||
|
|
||||||
return container
|
|
||||||
|
|
||||||
def run_command(
|
|
||||||
self, image: str, command: Optional[str] = None, **kwargs: Dict[str, Any]
|
|
||||||
) -> CommandReturn:
|
|
||||||
"""Create a temporary container and run command.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
stdout = kwargs.get("stdout", True)
|
|
||||||
stderr = kwargs.get("stderr", True)
|
|
||||||
|
|
||||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
|
||||||
try:
|
|
||||||
container = self.docker.containers.run(
|
|
||||||
image,
|
|
||||||
command=command,
|
|
||||||
network=self.network.name,
|
|
||||||
use_config_proxy=False,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# wait until command is done
|
|
||||||
result = container.wait()
|
|
||||||
output = container.logs(stdout=stdout, stderr=stderr)
|
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't execute command: %s", err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# cleanup container
|
|
||||||
with suppress(docker.errors.DockerException):
|
|
||||||
container.remove(force=True)
|
|
||||||
|
|
||||||
return CommandReturn(result.get("StatusCode"), output)
|
|
@@ -1,467 +0,0 @@
|
|||||||
"""Init file for Hass.io add-on Docker object."""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from contextlib import suppress
|
|
||||||
from ipaddress import IPv4Address, ip_address
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import TYPE_CHECKING, Dict, List, Optional, Union, Awaitable
|
|
||||||
|
|
||||||
import docker
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from ..addons.build import AddonBuild
|
|
||||||
from ..const import (
|
|
||||||
ENV_TIME,
|
|
||||||
ENV_TOKEN,
|
|
||||||
MAP_ADDONS,
|
|
||||||
MAP_BACKUP,
|
|
||||||
MAP_CONFIG,
|
|
||||||
MAP_SHARE,
|
|
||||||
MAP_SSL,
|
|
||||||
SECURITY_DISABLE,
|
|
||||||
SECURITY_PROFILE,
|
|
||||||
)
|
|
||||||
from ..coresys import CoreSys
|
|
||||||
from ..exceptions import DockerAPIError
|
|
||||||
from ..utils import process_lock
|
|
||||||
from .interface import DockerInterface
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ..addons.addon import Addon
|
|
||||||
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
|
||||||
|
|
||||||
|
|
||||||
class DockerAddon(DockerInterface):
|
|
||||||
"""Docker Hass.io wrapper for Home Assistant."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys, addon: Addon):
|
|
||||||
"""Initialize Docker Home Assistant wrapper."""
|
|
||||||
super().__init__(coresys)
|
|
||||||
self.addon = addon
|
|
||||||
|
|
||||||
@property
|
|
||||||
def image(self) -> str:
|
|
||||||
"""Return name of Docker image."""
|
|
||||||
return self.addon.image
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ip_address(self) -> IPv4Address:
|
|
||||||
"""Return IP address of this container."""
|
|
||||||
if self.addon.host_network:
|
|
||||||
return self.sys_docker.network.gateway
|
|
||||||
|
|
||||||
# Extract IP-Address
|
|
||||||
try:
|
|
||||||
return ip_address(
|
|
||||||
self._meta["NetworkSettings"]["Networks"]["hassio"]["IPAddress"])
|
|
||||||
except (KeyError, TypeError, ValueError):
|
|
||||||
return ip_address("0.0.0.0")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def timeout(self) -> int:
|
|
||||||
"""Return timeout for Docker actions."""
|
|
||||||
return self.addon.timeout
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version(self) -> str:
|
|
||||||
"""Return version of Docker image."""
|
|
||||||
if self.addon.legacy:
|
|
||||||
return self.addon.version
|
|
||||||
return super().version
|
|
||||||
|
|
||||||
@property
|
|
||||||
def arch(self) -> str:
|
|
||||||
"""Return arch of Docker image."""
|
|
||||||
if self.addon.legacy:
|
|
||||||
return self.sys_arch.default
|
|
||||||
return super().arch
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
"""Return name of Docker container."""
|
|
||||||
return f"addon_{self.addon.slug}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ipc(self) -> Optional[str]:
|
|
||||||
"""Return the IPC namespace."""
|
|
||||||
if self.addon.host_ipc:
|
|
||||||
return "host"
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def full_access(self) -> bool:
|
|
||||||
"""Return True if full access is enabled."""
|
|
||||||
return not self.addon.protected and self.addon.with_full_access
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hostname(self) -> str:
|
|
||||||
"""Return slug/id of add-on."""
|
|
||||||
return self.addon.slug.replace("_", "-")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def environment(self) -> Dict[str, str]:
|
|
||||||
"""Return environment for Docker add-on."""
|
|
||||||
addon_env = self.addon.environment or {}
|
|
||||||
|
|
||||||
# Provide options for legacy add-ons
|
|
||||||
if self.addon.legacy:
|
|
||||||
for key, value in self.addon.options.items():
|
|
||||||
if isinstance(value, (int, str)):
|
|
||||||
addon_env[key] = value
|
|
||||||
else:
|
|
||||||
_LOGGER.warning("Can not set nested option %s as Docker env", key)
|
|
||||||
|
|
||||||
return {
|
|
||||||
**addon_env,
|
|
||||||
ENV_TIME: self.sys_timezone,
|
|
||||||
ENV_TOKEN: self.addon.hassio_token,
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def devices(self) -> List[str]:
|
|
||||||
"""Return needed devices."""
|
|
||||||
devices = []
|
|
||||||
|
|
||||||
# Extend add-on config
|
|
||||||
if self.addon.devices:
|
|
||||||
devices.extend(self.addon.devices)
|
|
||||||
|
|
||||||
# Use audio devices
|
|
||||||
if self.addon.with_audio and self.sys_hardware.support_audio:
|
|
||||||
devices.append(AUDIO_DEVICE)
|
|
||||||
|
|
||||||
# Auto mapping UART devices
|
|
||||||
if self.addon.auto_uart:
|
|
||||||
for device in self.sys_hardware.serial_devices:
|
|
||||||
devices.append(f"{device}:{device}:rwm")
|
|
||||||
|
|
||||||
# Return None if no devices is present
|
|
||||||
return devices or None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ports(self) -> Optional[Dict[str, Union[str, int, None]]]:
|
|
||||||
"""Filter None from add-on ports."""
|
|
||||||
if self.addon.host_network or not self.addon.ports:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return {
|
|
||||||
container_port: host_port
|
|
||||||
for container_port, host_port in self.addon.ports.items()
|
|
||||||
if host_port
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def security_opt(self) -> List[str]:
|
|
||||||
"""Controlling security options."""
|
|
||||||
security = []
|
|
||||||
|
|
||||||
# AppArmor
|
|
||||||
apparmor = self.sys_host.apparmor.available
|
|
||||||
if not apparmor or self.addon.apparmor == SECURITY_DISABLE:
|
|
||||||
security.append("apparmor:unconfined")
|
|
||||||
elif self.addon.apparmor == SECURITY_PROFILE:
|
|
||||||
security.append(f"apparmor={self.addon.slug}")
|
|
||||||
|
|
||||||
# Disable Seccomp / We don't support it official and it
|
|
||||||
# make troubles on some kind of host systems.
|
|
||||||
security.append("seccomp=unconfined")
|
|
||||||
|
|
||||||
return security
|
|
||||||
|
|
||||||
@property
|
|
||||||
def tmpfs(self) -> Optional[Dict[str, str]]:
|
|
||||||
"""Return tmpfs for Docker add-on."""
|
|
||||||
options = self.addon.tmpfs
|
|
||||||
if options:
|
|
||||||
return {"/tmpfs": f"{options}"}
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def network_mapping(self) -> Dict[str, str]:
|
|
||||||
"""Return hosts mapping."""
|
|
||||||
return {
|
|
||||||
"homeassistant": self.sys_docker.network.gateway,
|
|
||||||
"hassio": self.sys_docker.network.supervisor,
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def network_mode(self) -> Optional[str]:
|
|
||||||
"""Return network mode for add-on."""
|
|
||||||
if self.addon.host_network:
|
|
||||||
return "host"
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pid_mode(self) -> Optional[str]:
|
|
||||||
"""Return PID mode for add-on."""
|
|
||||||
if not self.addon.protected and self.addon.host_pid:
|
|
||||||
return "host"
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def volumes(self) -> Dict[str, Dict[str, str]]:
|
|
||||||
"""Generate volumes for mappings."""
|
|
||||||
volumes = {str(self.addon.path_extern_data): {"bind": "/data", "mode": "rw"}}
|
|
||||||
|
|
||||||
addon_mapping = self.addon.map_volumes
|
|
||||||
|
|
||||||
# setup config mappings
|
|
||||||
if MAP_CONFIG in addon_mapping:
|
|
||||||
volumes.update(
|
|
||||||
{
|
|
||||||
str(self.sys_config.path_extern_homeassistant): {
|
|
||||||
"bind": "/config",
|
|
||||||
"mode": addon_mapping[MAP_CONFIG],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if MAP_SSL in addon_mapping:
|
|
||||||
volumes.update(
|
|
||||||
{
|
|
||||||
str(self.sys_config.path_extern_ssl): {
|
|
||||||
"bind": "/ssl",
|
|
||||||
"mode": addon_mapping[MAP_SSL],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if MAP_ADDONS in addon_mapping:
|
|
||||||
volumes.update(
|
|
||||||
{
|
|
||||||
str(self.sys_config.path_extern_addons_local): {
|
|
||||||
"bind": "/addons",
|
|
||||||
"mode": addon_mapping[MAP_ADDONS],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if MAP_BACKUP in addon_mapping:
|
|
||||||
volumes.update(
|
|
||||||
{
|
|
||||||
str(self.sys_config.path_extern_backup): {
|
|
||||||
"bind": "/backup",
|
|
||||||
"mode": addon_mapping[MAP_BACKUP],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if MAP_SHARE in addon_mapping:
|
|
||||||
volumes.update(
|
|
||||||
{
|
|
||||||
str(self.sys_config.path_extern_share): {
|
|
||||||
"bind": "/share",
|
|
||||||
"mode": addon_mapping[MAP_SHARE],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Init other hardware mappings
|
|
||||||
|
|
||||||
# GPIO support
|
|
||||||
if self.addon.with_gpio and self.sys_hardware.support_gpio:
|
|
||||||
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
|
|
||||||
volumes.update({gpio_path: {"bind": gpio_path, "mode": "rw"}})
|
|
||||||
|
|
||||||
# DeviceTree support
|
|
||||||
if self.addon.with_devicetree:
|
|
||||||
volumes.update(
|
|
||||||
{
|
|
||||||
"/sys/firmware/devicetree/base": {
|
|
||||||
"bind": "/device-tree",
|
|
||||||
"mode": "ro",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Kernel Modules support
|
|
||||||
if self.addon.with_kernel_modules:
|
|
||||||
volumes.update({"/lib/modules": {"bind": "/lib/modules", "mode": "ro"}})
|
|
||||||
|
|
||||||
# Docker API support
|
|
||||||
if not self.addon.protected and self.addon.access_docker_api:
|
|
||||||
volumes.update(
|
|
||||||
{"/var/run/docker.sock": {"bind": "/var/run/docker.sock", "mode": "ro"}}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Host D-Bus system
|
|
||||||
if self.addon.host_dbus:
|
|
||||||
volumes.update({"/var/run/dbus": {"bind": "/var/run/dbus", "mode": "rw"}})
|
|
||||||
|
|
||||||
# ALSA configuration
|
|
||||||
if self.addon.with_audio:
|
|
||||||
volumes.update(
|
|
||||||
{
|
|
||||||
str(self.addon.path_extern_asound): {
|
|
||||||
"bind": "/etc/asound.conf",
|
|
||||||
"mode": "ro",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return volumes
|
|
||||||
|
|
||||||
def _run(self) -> None:
|
|
||||||
"""Run Docker image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
if self._is_running():
|
|
||||||
return
|
|
||||||
|
|
||||||
# Security check
|
|
||||||
if not self.addon.protected:
|
|
||||||
_LOGGER.warning("%s run with disabled protected mode!", self.addon.name)
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
self._stop()
|
|
||||||
|
|
||||||
# Create & Run container
|
|
||||||
docker_container = self.sys_docker.run(
|
|
||||||
self.image,
|
|
||||||
name=self.name,
|
|
||||||
hostname=self.hostname,
|
|
||||||
detach=True,
|
|
||||||
init=True,
|
|
||||||
privileged=self.full_access,
|
|
||||||
ipc_mode=self.ipc,
|
|
||||||
stdin_open=self.addon.with_stdin,
|
|
||||||
network_mode=self.network_mode,
|
|
||||||
pid_mode=self.pid_mode,
|
|
||||||
ports=self.ports,
|
|
||||||
extra_hosts=self.network_mapping,
|
|
||||||
devices=self.devices,
|
|
||||||
cap_add=self.addon.privileged,
|
|
||||||
security_opt=self.security_opt,
|
|
||||||
environment=self.environment,
|
|
||||||
volumes=self.volumes,
|
|
||||||
tmpfs=self.tmpfs,
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image, self.version)
|
|
||||||
self._meta = docker_container.attrs
|
|
||||||
|
|
||||||
def _install(self, tag: str, image: Optional[str] = None) -> None:
|
|
||||||
"""Pull Docker image or build it.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
if self.addon.need_build:
|
|
||||||
self._build(tag)
|
|
||||||
else:
|
|
||||||
super()._install(tag, image)
|
|
||||||
|
|
||||||
def _build(self, tag: str) -> None:
|
|
||||||
"""Build a Docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
build_env = AddonBuild(self.coresys, self.addon)
|
|
||||||
|
|
||||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
|
||||||
try:
|
|
||||||
image, log = self.sys_docker.images.build(
|
|
||||||
use_config_proxy=False, **build_env.get_docker_args(tag)
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
|
||||||
image.tag(self.image, tag="latest")
|
|
||||||
|
|
||||||
# Update meta data
|
|
||||||
self._meta = image.attrs
|
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
def export_image(self, tar_file: Path) -> Awaitable[None]:
|
|
||||||
"""Export current images into a tar file."""
|
|
||||||
return self.sys_run_in_executor(self._export_image, tar_file)
|
|
||||||
|
|
||||||
def _export_image(self, tar_file: Path) -> None:
|
|
||||||
"""Export current images into a tar file.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
image = self.sys_docker.api.get_image(self.image)
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
|
||||||
try:
|
|
||||||
with tar_file.open("wb") as write_tar:
|
|
||||||
for chunk in image:
|
|
||||||
write_tar.write(chunk)
|
|
||||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
|
||||||
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
_LOGGER.info("Export image %s done", self.image)
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
def import_image(self, tar_file: Path, tag: str) -> Awaitable[None]:
|
|
||||||
"""Import a tar file as image."""
|
|
||||||
return self.sys_run_in_executor(self._import_image, tar_file, tag)
|
|
||||||
|
|
||||||
def _import_image(self, tar_file: Path, tag: str) -> None:
|
|
||||||
"""Import a tar file as image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
with tar_file.open("rb") as read_tar:
|
|
||||||
self.sys_docker.api.load_image(read_tar, quiet=True)
|
|
||||||
|
|
||||||
docker_image = self.sys_docker.images.get(self.image)
|
|
||||||
docker_image.tag(self.image, tag=tag)
|
|
||||||
except (docker.errors.DockerException, OSError) as err:
|
|
||||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
|
||||||
self._meta = docker_image.attrs
|
|
||||||
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
self._cleanup()
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
def write_stdin(self, data: bytes) -> Awaitable[None]:
|
|
||||||
"""Write to add-on stdin."""
|
|
||||||
return self.sys_run_in_executor(self._write_stdin, data)
|
|
||||||
|
|
||||||
def _write_stdin(self, data: bytes) -> None:
|
|
||||||
"""Write to add-on stdin.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
if not self._is_running():
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Load needed docker objects
|
|
||||||
container = self.sys_docker.containers.get(self.name)
|
|
||||||
socket = container.attach_socket(params={"stdin": 1, "stream": 1})
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Write to stdin
|
|
||||||
data += b"\n"
|
|
||||||
os.write(socket.fileno(), data)
|
|
||||||
socket.close()
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
|
||||||
raise DockerAPIError() from None
|
|
@@ -1,38 +0,0 @@
|
|||||||
"""HassOS Cli docker object."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import docker
|
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from .interface import DockerInterface
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
|
||||||
"""Docker Hass.io wrapper for HassOS Cli."""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def image(self):
|
|
||||||
"""Return name of HassOS CLI image."""
|
|
||||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
|
||||||
|
|
||||||
def _stop(self, remove_container=True):
|
|
||||||
"""Don't need stop."""
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _attach(self):
|
|
||||||
"""Attach to running Docker container.
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
image = self.sys_docker.images.get(self.image)
|
|
||||||
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
|
|
||||||
|
|
||||||
else:
|
|
||||||
self._meta = image.attrs
|
|
||||||
_LOGGER.info(
|
|
||||||
"Found HassOS CLI %s with version %s", self.image, self.version
|
|
||||||
)
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user