mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-19 22:19:21 +00:00
Compare commits
805 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
15a6f38ebb | ||
![]() |
227f2e5a21 | ||
![]() |
517d6ee981 | ||
![]() |
184eeb7f49 | ||
![]() |
a3555c74e8 | ||
![]() |
657bafd458 | ||
![]() |
2ad5df420c | ||
![]() |
b24d489ec5 | ||
![]() |
6bb0210f1f | ||
![]() |
c1de50266a | ||
![]() |
562e02bc64 | ||
![]() |
f71ec7913a | ||
![]() |
d98baaf660 | ||
![]() |
72db591576 | ||
![]() |
509a37fc04 | ||
![]() |
17f62b6e86 | ||
![]() |
b09aee7644 | ||
![]() |
babcc0de0c | ||
![]() |
5cc47c9222 | ||
![]() |
833559a3b3 | ||
![]() |
b8a976b344 | ||
![]() |
10b14132b9 | ||
![]() |
18953f0b7c | ||
![]() |
19f5fba3aa | ||
![]() |
636bc3e61a | ||
![]() |
521037e1a6 | ||
![]() |
e024c3e38d | ||
![]() |
6a0206c1e7 | ||
![]() |
69a8a83528 | ||
![]() |
0307d700fa | ||
![]() |
919c383b41 | ||
![]() |
d331af4d5a | ||
![]() |
b5467d3c23 | ||
![]() |
3ef0040d66 | ||
![]() |
ec4dfd2172 | ||
![]() |
8f54d7c8e9 | ||
![]() |
b59e709dc0 | ||
![]() |
b236e6c886 | ||
![]() |
8acbb7d6f0 | ||
![]() |
49e4bc9381 | ||
![]() |
36106cc08d | ||
![]() |
8f1763abe2 | ||
![]() |
480eebc6cb | ||
![]() |
dccfffd979 | ||
![]() |
1a978f4762 | ||
![]() |
1fbc8f4060 | ||
![]() |
db3fc1421c | ||
![]() |
9ecd03db0e | ||
![]() |
f111ccb1b6 | ||
![]() |
a32341cc5d | ||
![]() |
f73e277230 | ||
![]() |
8d8587ca29 | ||
![]() |
88eb9511bf | ||
![]() |
e1068997ea | ||
![]() |
560e04c64a | ||
![]() |
621ec03971 | ||
![]() |
d14a47d3f7 | ||
![]() |
3e9de0c210 | ||
![]() |
01a6e074a5 | ||
![]() |
1434077f4e | ||
![]() |
3922175af1 | ||
![]() |
ec6852a8d7 | ||
![]() |
b0b908b4ae | ||
![]() |
5a00336ef1 | ||
![]() |
d53f5e21f4 | ||
![]() |
bd173fa333 | ||
![]() |
6b32fa31b6 | ||
![]() |
d1dba89e39 | ||
![]() |
b2f2806465 | ||
![]() |
3b70cd58a3 | ||
![]() |
6769bfd824 | ||
![]() |
bff4af2534 | ||
![]() |
aabf575ac5 | ||
![]() |
4aacaf6bd6 | ||
![]() |
268070e89c | ||
![]() |
4fa2134cc6 | ||
![]() |
97c35de49a | ||
![]() |
32fb550969 | ||
![]() |
3457441929 | ||
![]() |
32f0fc7a46 | ||
![]() |
c891a8f164 | ||
![]() |
991764af94 | ||
![]() |
1df447272e | ||
![]() |
f032ae757b | ||
![]() |
e529b2859e | ||
![]() |
d1cb2368fa | ||
![]() |
7b812184d1 | ||
![]() |
4f7ce1c6ee | ||
![]() |
44a5ac63db | ||
![]() |
0989ee88cc | ||
![]() |
ba8b72c2c8 | ||
![]() |
2dbb4583f4 | ||
![]() |
81ad42e029 | ||
![]() |
5d3695f8ba | ||
![]() |
c771694aa0 | ||
![]() |
1ac0fd4c10 | ||
![]() |
631ff8caef | ||
![]() |
7382182132 | ||
![]() |
4ff9da68ef | ||
![]() |
dee2998ee3 | ||
![]() |
1d43236211 | ||
![]() |
792bc610a3 | ||
![]() |
7a51c828c2 | ||
![]() |
fab6fcd5ac | ||
![]() |
c8e00ba160 | ||
![]() |
6245b6d823 | ||
![]() |
0c55bf20fc | ||
![]() |
f8fd7b5933 | ||
![]() |
6462eea2ef | ||
![]() |
3d79891249 | ||
![]() |
80763c4bbf | ||
![]() |
59102afd45 | ||
![]() |
0b085354db | ||
![]() |
e2a473baa3 | ||
![]() |
06e10fdd3c | ||
![]() |
fb4386a7ad | ||
![]() |
2d294f6841 | ||
![]() |
e09a839148 | ||
![]() |
d9c4dae739 | ||
![]() |
49853e92a4 | ||
![]() |
19620d6808 | ||
![]() |
f6bf44de1c | ||
![]() |
06fae59fc8 | ||
![]() |
5c25fcd84c | ||
![]() |
aa5297026f | ||
![]() |
841520b75e | ||
![]() |
d9e20307de | ||
![]() |
fda1b523ba | ||
![]() |
7cccbc682c | ||
![]() |
621eb4c4c0 | ||
![]() |
056926242f | ||
![]() |
84294f286f | ||
![]() |
b6bc6b8498 | ||
![]() |
845c935b39 | ||
![]() |
19d8de89df | ||
![]() |
cfae20a3ec | ||
![]() |
6db6ab96e6 | ||
![]() |
48695c6805 | ||
![]() |
d74908e3b5 | ||
![]() |
7e94537e36 | ||
![]() |
1427e0ae96 | ||
![]() |
01e27dfa2f | ||
![]() |
f48249c9d1 | ||
![]() |
e607d4feeb | ||
![]() |
5367ac257e | ||
![]() |
46dc6dc63b | ||
![]() |
a59ea72c66 | ||
![]() |
2daf46c444 | ||
![]() |
1bf38bdc99 | ||
![]() |
131909973c | ||
![]() |
ecdf4e53b8 | ||
![]() |
7aa039d162 | ||
![]() |
3dd3340e35 | ||
![]() |
2f9fc39b72 | ||
![]() |
80f4309799 | ||
![]() |
550fca4bcd | ||
![]() |
4b500ef873 | ||
![]() |
476f021fbf | ||
![]() |
8393ca5b23 | ||
![]() |
4eb7a60b88 | ||
![]() |
2040102e21 | ||
![]() |
7ee5737f75 | ||
![]() |
8d499753a0 | ||
![]() |
028ec277eb | ||
![]() |
5552b1da49 | ||
![]() |
06ab7e904f | ||
![]() |
e4bf820038 | ||
![]() |
c209d2fa8d | ||
![]() |
784c5d3b7c | ||
![]() |
a18b706f99 | ||
![]() |
cd34a40dd8 | ||
![]() |
ced72e1273 | ||
![]() |
5416eda1d6 | ||
![]() |
c76a4ff422 | ||
![]() |
d558ad2d76 | ||
![]() |
5f2d183b1d | ||
![]() |
46e92036ec | ||
![]() |
280d423bfe | ||
![]() |
c4847ad10d | ||
![]() |
0a7c75830b | ||
![]() |
7aceb21123 | ||
![]() |
9264d437b1 | ||
![]() |
edc8d8960f | ||
![]() |
0f4f196dc9 | ||
![]() |
a027f4b5fc | ||
![]() |
f025d1df05 | ||
![]() |
4c560d7c54 | ||
![]() |
a976ef6e67 | ||
![]() |
e1b9d754af | ||
![]() |
ee49935b7d | ||
![]() |
36694c9ef0 | ||
![]() |
bd786811a3 | ||
![]() |
ffaeb2b96d | ||
![]() |
517e6cb437 | ||
![]() |
9479672b88 | ||
![]() |
934e59596a | ||
![]() |
8f4ac10361 | ||
![]() |
50e0fd159f | ||
![]() |
28344ff5f3 | ||
![]() |
608ae14246 | ||
![]() |
c0c0d44c2d | ||
![]() |
5e947348ae | ||
![]() |
1f4032f56f | ||
![]() |
336ab0d2b1 | ||
![]() |
0f9d80dde4 | ||
![]() |
0fcab4d92b | ||
![]() |
abd35b62c8 | ||
![]() |
7b721ad8c6 | ||
![]() |
37eaaf356d | ||
![]() |
8cbb4b510b | ||
![]() |
f71549e3df | ||
![]() |
fe15bb6a30 | ||
![]() |
50d36b857a | ||
![]() |
db260dfbde | ||
![]() |
a0c99615aa | ||
![]() |
c66c806e6e | ||
![]() |
a432d28ee3 | ||
![]() |
742bc43500 | ||
![]() |
223e2f1df5 | ||
![]() |
ed9aea6219 | ||
![]() |
aa7b68d4d5 | ||
![]() |
8e57cd2751 | ||
![]() |
64229a188e | ||
![]() |
10cbbcc2de | ||
![]() |
5318e4fbcd | ||
![]() |
007251a04c | ||
![]() |
fd4b3ee539 | ||
![]() |
976ae96633 | ||
![]() |
042bdcdf37 | ||
![]() |
c423e9cf8e | ||
![]() |
1f13d6aa91 | ||
![]() |
78c09a0fa6 | ||
![]() |
e6d6f2ee8c | ||
![]() |
2918ef6225 | ||
![]() |
35b626a1c5 | ||
![]() |
6f26536d97 | ||
![]() |
01064564b4 | ||
![]() |
0c6c6a6620 | ||
![]() |
be166d533f | ||
![]() |
d3e5535221 | ||
![]() |
7206213cd8 | ||
![]() |
605782d707 | ||
![]() |
2c387349c9 | ||
![]() |
e9c9f98168 | ||
![]() |
a98c7819b0 | ||
![]() |
7c42c5758d | ||
![]() |
c555146094 | ||
![]() |
f48c9b5774 | ||
![]() |
8d1732e5eb | ||
![]() |
f2843db421 | ||
![]() |
b513512551 | ||
![]() |
1a59839b1b | ||
![]() |
45861617b9 | ||
![]() |
353544085e | ||
![]() |
144d3921f7 | ||
![]() |
e44d22880e | ||
![]() |
c7692b43e8 | ||
![]() |
9c53caae80 | ||
![]() |
7a1d85ca2b | ||
![]() |
e684223f32 | ||
![]() |
9744f3354b | ||
![]() |
c6e3787681 | ||
![]() |
eab76a6d1d | ||
![]() |
6549a10935 | ||
![]() |
530d40dbbd | ||
![]() |
50f2d8e7d8 | ||
![]() |
7a9aac491e | ||
![]() |
7dcb609fd5 | ||
![]() |
d119e99001 | ||
![]() |
fe0e41adec | ||
![]() |
034393bd42 | ||
![]() |
02e72726a5 | ||
![]() |
d599c3ad76 | ||
![]() |
b00f7c44df | ||
![]() |
028b170cff | ||
![]() |
8da686fc34 | ||
![]() |
3f6453aa89 | ||
![]() |
7967254673 | ||
![]() |
0f60fdd20b | ||
![]() |
ccb8e5fe06 | ||
![]() |
ba576d8748 | ||
![]() |
edcd9ca6e6 | ||
![]() |
ac4277cd7b | ||
![]() |
4c525de5e2 | ||
![]() |
cb751e0397 | ||
![]() |
ac457c1c28 | ||
![]() |
caa77b9337 | ||
![]() |
96d8785349 | ||
![]() |
e4f57d2269 | ||
![]() |
f946de1e46 | ||
![]() |
d588987b8b | ||
![]() |
4925b5fa97 | ||
![]() |
aa3f6390d3 | ||
![]() |
6ba413f452 | ||
![]() |
10b6706e4a | ||
![]() |
17559bfc8e | ||
![]() |
9dc2f43ffb | ||
![]() |
38db375fea | ||
![]() |
f35b6d0b00 | ||
![]() |
8d75583a07 | ||
![]() |
361fc51477 | ||
![]() |
f6019b4e68 | ||
![]() |
998dd5387b | ||
![]() |
3b7776ca01 | ||
![]() |
5788d1dd32 | ||
![]() |
ff04d339f4 | ||
![]() |
e9d03c5c8e | ||
![]() |
ab4b98470e | ||
![]() |
c4f0702595 | ||
![]() |
736c9cb2bd | ||
![]() |
f24e8535d3 | ||
![]() |
8e4f3e0526 | ||
![]() |
a9abd933b5 | ||
![]() |
f1121fe66f | ||
![]() |
c26a2e399c | ||
![]() |
1af90721cc | ||
![]() |
9274a0fa17 | ||
![]() |
9443032c2a | ||
![]() |
8deb1cf2e6 | ||
![]() |
13c7ce6a0a | ||
![]() |
0f54824cdb | ||
![]() |
10cd722806 | ||
![]() |
4aca056c5b | ||
![]() |
15a8f40f6f | ||
![]() |
2ca2701f7a | ||
![]() |
78f63380f2 | ||
![]() |
7633d26806 | ||
![]() |
15cae6562f | ||
![]() |
c3546eb566 | ||
![]() |
2a77a29f48 | ||
![]() |
77be115eec | ||
![]() |
64a6c2e07c | ||
![]() |
045a3ba416 | ||
![]() |
4da2715d14 | ||
![]() |
2f0e99d420 | ||
![]() |
8694eaaf1a | ||
![]() |
0761885ebb | ||
![]() |
ca60a69b22 | ||
![]() |
c9db42583b | ||
![]() |
052a691a4d | ||
![]() |
2bcb0e5195 | ||
![]() |
745845db19 | ||
![]() |
de064d1d9c | ||
![]() |
3b2351af0b | ||
![]() |
5cf833e3d6 | ||
![]() |
409b53109b | ||
![]() |
da83edf231 | ||
![]() |
7be508214a | ||
![]() |
8b4a137252 | ||
![]() |
4565b01eeb | ||
![]() |
0675f66ee6 | ||
![]() |
b60d57c3a0 | ||
![]() |
0e3d95cac0 | ||
![]() |
548737a559 | ||
![]() |
598108d294 | ||
![]() |
a0261dbbcc | ||
![]() |
2418122b46 | ||
![]() |
f104e60afa | ||
![]() |
ed45f27f3e | ||
![]() |
40aa5c9caf | ||
![]() |
14b1ea4eb0 | ||
![]() |
5052a339e3 | ||
![]() |
2321890dde | ||
![]() |
4cb5770ee0 | ||
![]() |
3a35561d1d | ||
![]() |
6fbec53f8a | ||
![]() |
c707934018 | ||
![]() |
efd8efa248 | ||
![]() |
979861b764 | ||
![]() |
cdc53a159c | ||
![]() |
a203ed9cc5 | ||
![]() |
5cab5f0c08 | ||
![]() |
25ea80e169 | ||
![]() |
f43b4e9e24 | ||
![]() |
160fbb2589 | ||
![]() |
c85aa664e1 | ||
![]() |
51dcbf5db7 | ||
![]() |
fa114a4a03 | ||
![]() |
d7fd58bdb9 | ||
![]() |
38b0aea8e2 | ||
![]() |
41eade9325 | ||
![]() |
e64cf41aec | ||
![]() |
02872b5e75 | ||
![]() |
e4d49bb459 | ||
![]() |
d38b7d5a82 | ||
![]() |
537c5d3197 | ||
![]() |
575df2fcf6 | ||
![]() |
c08c3c6b37 | ||
![]() |
2acf28609e | ||
![]() |
bb59d0431e | ||
![]() |
1c7b1f1462 | ||
![]() |
f32d17d924 | ||
![]() |
928a4d8dce | ||
![]() |
dd3ba93308 | ||
![]() |
7e1b179cdd | ||
![]() |
a9a2c35f06 | ||
![]() |
58b88a6919 | ||
![]() |
f937876a1b | ||
![]() |
8193f43634 | ||
![]() |
1d3f880f82 | ||
![]() |
ef2fa8d2e2 | ||
![]() |
51997b3e7c | ||
![]() |
98785b00e2 | ||
![]() |
8d3694884d | ||
![]() |
a2821a98ad | ||
![]() |
8d552ae15c | ||
![]() |
6db4c60f47 | ||
![]() |
805c0385a0 | ||
![]() |
cea6e7a9f2 | ||
![]() |
127073c01b | ||
![]() |
30fe36ae05 | ||
![]() |
58bd677832 | ||
![]() |
1a3b369dd7 | ||
![]() |
6e38216abd | ||
![]() |
efcfc1f841 | ||
![]() |
8dea50ce83 | ||
![]() |
7a5a01bdcc | ||
![]() |
bd1450a682 | ||
![]() |
c538c1ce7f | ||
![]() |
b6d59c4f64 | ||
![]() |
a758ccaf5c | ||
![]() |
e8b04cc20a | ||
![]() |
9bcb15dbc0 | ||
![]() |
1e953167b6 | ||
![]() |
979586cdb2 | ||
![]() |
cd31fad56d | ||
![]() |
ff57d88e2a | ||
![]() |
06cb5e171e | ||
![]() |
a8b70a2e13 | ||
![]() |
948019ccee | ||
![]() |
89ed109505 | ||
![]() |
fae246c503 | ||
![]() |
2411b4287d | ||
![]() |
b3308ecbe0 | ||
![]() |
3541cbff5e | ||
![]() |
838ba7ff36 | ||
![]() |
e9802f92c9 | ||
![]() |
016fd24859 | ||
![]() |
d315e81ab2 | ||
![]() |
97c38b8534 | ||
![]() |
011e2b3df5 | ||
![]() |
e3ee9a299f | ||
![]() |
d73c10f874 | ||
![]() |
9e448b46ba | ||
![]() |
9f09c46789 | ||
![]() |
fe6634551a | ||
![]() |
22a7931a7c | ||
![]() |
94f112512f | ||
![]() |
b6509dca1f | ||
![]() |
620234e708 | ||
![]() |
d50e866cec | ||
![]() |
76ad6dca02 | ||
![]() |
cdb1520a63 | ||
![]() |
bbef706a33 | ||
![]() |
835509901f | ||
![]() |
b51f9586c4 | ||
![]() |
fc83cb9559 | ||
![]() |
f5f5f829ac | ||
![]() |
930eed4500 | ||
![]() |
01a8b58054 | ||
![]() |
eba1d01fc2 | ||
![]() |
84755836c9 | ||
![]() |
c9585033cb | ||
![]() |
2d312c276f | ||
![]() |
3b0d0e9928 | ||
![]() |
8307b153e3 | ||
![]() |
dfaffe3ec5 | ||
![]() |
8d7b15cbeb | ||
![]() |
00969a67ac | ||
![]() |
a374d4e817 | ||
![]() |
f5dda39f63 | ||
![]() |
fb5d54d5fe | ||
![]() |
d392b35fdd | ||
![]() |
3ceec006ac | ||
![]() |
62a574c6bd | ||
![]() |
821c10b2bd | ||
![]() |
fa3269a098 | ||
![]() |
a9bdab4b49 | ||
![]() |
0df5b7d87b | ||
![]() |
4861fc70ce | ||
![]() |
47c443bb92 | ||
![]() |
9cb4b49597 | ||
![]() |
865523fd37 | ||
![]() |
1df35a6fe1 | ||
![]() |
e70c9d8a30 | ||
![]() |
7d6b00ea4a | ||
![]() |
e5fc985915 | ||
![]() |
71ccaa2bd0 | ||
![]() |
e127f23a08 | ||
![]() |
495f9f2373 | ||
![]() |
27274286db | ||
![]() |
85ba886029 | ||
![]() |
2f3a868e42 | ||
![]() |
a51b80f456 | ||
![]() |
f27a426879 | ||
![]() |
19ca485c28 | ||
![]() |
7deed55c2d | ||
![]() |
4c5c6f072c | ||
![]() |
f174e08ad6 | ||
![]() |
2658f95347 | ||
![]() |
311c981d1a | ||
![]() |
d6d3bf0583 | ||
![]() |
a1a601a4d3 | ||
![]() |
14776eae76 | ||
![]() |
bef4034ab8 | ||
![]() |
ad988f2a24 | ||
![]() |
6599ae0ee0 | ||
![]() |
4f1ed690cd | ||
![]() |
4ffaee6013 | ||
![]() |
e1ce19547e | ||
![]() |
039040b972 | ||
![]() |
7a1af3d346 | ||
![]() |
1e98774b62 | ||
![]() |
4b4d6c6866 | ||
![]() |
65ff83d359 | ||
![]() |
e509c804ae | ||
![]() |
992827e225 | ||
![]() |
083e97add8 | ||
![]() |
05378d18c0 | ||
![]() |
3dd465acc9 | ||
![]() |
8f6e36f781 | ||
![]() |
85fe56db57 | ||
![]() |
8e07429e47 | ||
![]() |
ced6d702b9 | ||
![]() |
25d7de4dfa | ||
![]() |
82754c0dfe | ||
![]() |
e604b022ee | ||
![]() |
6b29022822 | ||
![]() |
2e671cc5ee | ||
![]() |
f25692b98c | ||
![]() |
c4a011b261 | ||
![]() |
a935bac20b | ||
![]() |
0a3a98cb42 | ||
![]() |
adb39ca93f | ||
![]() |
5fdc340e58 | ||
![]() |
bb64dca6e6 | ||
![]() |
685788bcdf | ||
![]() |
e949aa35f3 | ||
![]() |
fc80bf0df4 | ||
![]() |
bd9740e866 | ||
![]() |
3a260a8fd9 | ||
![]() |
c87e6a5a42 | ||
![]() |
8bc3319523 | ||
![]() |
bdfcf1a2df | ||
![]() |
7f4284f2af | ||
![]() |
fd69120aa6 | ||
![]() |
5df60b17e8 | ||
![]() |
cb835b5ae6 | ||
![]() |
9eab92513a | ||
![]() |
29e8f50ab8 | ||
![]() |
aa0496b236 | ||
![]() |
06e9cec21a | ||
![]() |
0fe27088df | ||
![]() |
54d226116d | ||
![]() |
4b37e30680 | ||
![]() |
7c5f710deb | ||
![]() |
5a3ebaf683 | ||
![]() |
233da0e48f | ||
![]() |
96380d8d28 | ||
![]() |
c84a0edf20 | ||
![]() |
a3cf445c93 | ||
![]() |
3f31979f66 | ||
![]() |
44416edfd2 | ||
![]() |
351c45da75 | ||
![]() |
e27c5dad15 | ||
![]() |
dc510f22ac | ||
![]() |
1b78011f8b | ||
![]() |
a908828bf4 | ||
![]() |
55b7eb62f6 | ||
![]() |
10e8fcf3b9 | ||
![]() |
f1b0c05447 | ||
![]() |
de22bd688e | ||
![]() |
9fe35b4fb5 | ||
![]() |
f13d08d37a | ||
![]() |
a0ecb46584 | ||
![]() |
0c57df0c8e | ||
![]() |
9c902c5c69 | ||
![]() |
af412c3105 | ||
![]() |
ec43448163 | ||
![]() |
9f7e0ecd55 | ||
![]() |
e50515a17c | ||
![]() |
7c345db6fe | ||
![]() |
51c2268c1e | ||
![]() |
51feca05a5 | ||
![]() |
3889504292 | ||
![]() |
7bd6ff374a | ||
![]() |
44fa34203a | ||
![]() |
ff351c7f6d | ||
![]() |
960b00d85a | ||
![]() |
18e3eacd7f | ||
![]() |
f4a1da33c4 | ||
![]() |
49de5be44e | ||
![]() |
383657e8ce | ||
![]() |
3af970ead6 | ||
![]() |
6caec79958 | ||
![]() |
33bbd92d9b | ||
![]() |
9dba78fbcd | ||
![]() |
630d85ec78 | ||
![]() |
f0d46e8671 | ||
![]() |
db0593f0b2 | ||
![]() |
1d83c0c77a | ||
![]() |
5e5fd3a79b | ||
![]() |
c61995aab8 | ||
![]() |
37c393f857 | ||
![]() |
8e043a01c1 | ||
![]() |
c7b6b2ddb3 | ||
![]() |
522f68bf68 | ||
![]() |
7d4866234f | ||
![]() |
7aa5bcfc7c | ||
![]() |
04b59f0896 | ||
![]() |
796f9a203e | ||
![]() |
22c8cda0d7 | ||
![]() |
1cf534ccc5 | ||
![]() |
6d8c821148 | ||
![]() |
264e9665b0 | ||
![]() |
53fa8e48c0 | ||
![]() |
e406aa4144 | ||
![]() |
4953ba5077 | ||
![]() |
0a97ac0578 | ||
![]() |
56af4752f4 | ||
![]() |
81413d08ed | ||
![]() |
2bc2a476d9 | ||
![]() |
4d070a65c6 | ||
![]() |
6185fbaf26 | ||
![]() |
698a126b93 | ||
![]() |
acf921f55d | ||
![]() |
f5a78c88f8 | ||
![]() |
206ece1575 | ||
![]() |
a8028dbe10 | ||
![]() |
c605af6ccc | ||
![]() |
b7b8e6c40e | ||
![]() |
3fcb1de419 | ||
![]() |
12034fe5fc | ||
![]() |
56959d781a | ||
![]() |
9a2f025646 | ||
![]() |
12cc163058 | ||
![]() |
74971d9753 | ||
![]() |
a9157e3a9f | ||
![]() |
b96697b708 | ||
![]() |
81e6896391 | ||
![]() |
2dcaa3608d | ||
![]() |
e21671ec5e | ||
![]() |
7841f14163 | ||
![]() |
cc9f594ab4 | ||
![]() |
ebfaaeaa6b | ||
![]() |
ffa91e150d | ||
![]() |
06fa9f9a9e | ||
![]() |
9f203c42ec | ||
![]() |
5d0d34a4af | ||
![]() |
c2cfc0d3d4 | ||
![]() |
0f4810d41f | ||
![]() |
175848f2a8 | ||
![]() |
472bd66f4d | ||
![]() |
168ea32d2c | ||
![]() |
e82d6b1ea4 | ||
![]() |
6c60ca088c | ||
![]() |
83e8f935fd | ||
![]() |
71867302a4 | ||
![]() |
8bcc402c5f | ||
![]() |
72b7d2a123 | ||
![]() |
20c1183450 | ||
![]() |
0bbfbd2544 | ||
![]() |
350bd9c32f | ||
![]() |
dcca8b0a9a | ||
![]() |
f77b479e45 | ||
![]() |
216565affb | ||
![]() |
6f235c2a11 | ||
![]() |
27a770bd1d | ||
![]() |
ef15b67571 | ||
![]() |
6aad966c52 | ||
![]() |
9811f11859 | ||
![]() |
13148ec7fb | ||
![]() |
b2d7464790 | ||
![]() |
ce84e185ad | ||
![]() |
c3f5ee43b6 | ||
![]() |
e2dc1a4471 | ||
![]() |
e787e59b49 | ||
![]() |
f0ed2eba2b | ||
![]() |
2364e1e652 | ||
![]() |
cc56944d75 | ||
![]() |
69cea9fc96 | ||
![]() |
fcebc9d1ed | ||
![]() |
9350e4f961 | ||
![]() |
387e0ad03e | ||
![]() |
61fec8b290 | ||
![]() |
1228baebf4 | ||
![]() |
a30063e85c | ||
![]() |
524cebac4d | ||
![]() |
c94114a566 | ||
![]() |
b6ec7a9e64 | ||
![]() |
69be7a6d22 | ||
![]() |
58155c35f9 | ||
![]() |
7b2377291f | ||
![]() |
657ee84e39 | ||
![]() |
2e4b545265 | ||
![]() |
2de1d35dd1 | ||
![]() |
2b082b362d | ||
![]() |
dfdd0d6b4b | ||
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a | ||
![]() |
39baea759a | ||
![]() |
80ddb1d262 | ||
![]() |
e24987a610 | ||
![]() |
9e5c276e3b | ||
![]() |
c33d31996d | ||
![]() |
aa1f08fe8a | ||
![]() |
d78689554a | ||
![]() |
5bee1d851c | ||
![]() |
ddb8eef4d1 | ||
![]() |
da513e7347 | ||
![]() |
4279d7fd16 | ||
![]() |
934eab2e8c | ||
![]() |
2a31edc768 | ||
![]() |
fcdd66dc6e | ||
![]() |
a65d3222b9 | ||
![]() |
36179596a0 | ||
![]() |
c083c850c1 | ||
![]() |
ff903d7b5a | ||
![]() |
dd603e1ec2 | ||
![]() |
a2f06b1553 | ||
![]() |
8115d2b3d3 | ||
![]() |
4f97bb9e0b | ||
![]() |
84d24a2c4d | ||
![]() |
b709061656 | ||
![]() |
cd9034b3f1 | ||
![]() |
25d324c73a | ||
![]() |
3a834d1a73 | ||
![]() |
e9fecb817d | ||
![]() |
56e70d7ec4 | ||
![]() |
2e73a85aa9 | ||
![]() |
1e119e9c03 | ||
![]() |
6f6e5c97df | ||
![]() |
6ef99974cf | ||
![]() |
8984b9aef6 | ||
![]() |
63e08b15bc | ||
![]() |
319b2b5d4c | ||
![]() |
bae7bb8ce4 | ||
![]() |
0b44df366c | ||
![]() |
f253c797af | ||
![]() |
0a8b1c2797 |
@@ -1,7 +1,12 @@
|
||||
FROM python:3.7
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Set Docker daemon config
|
||||
RUN \
|
||||
mkdir -p /etc/docker \
|
||||
&& echo '{"storage-driver": "vfs"}' > /etc/docker/daemon.json
|
||||
|
||||
# Install Node/Yarn for Frontent
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
@@ -13,7 +18,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
nodejs \
|
||||
yarn \
|
||||
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV NVM_DIR /root/.nvm
|
||||
|
||||
@@ -33,11 +38,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
containerd.io \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install tools
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
jq \
|
||||
dbus \
|
||||
network-manager \
|
||||
libpulse0 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt ./
|
||||
RUN pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
RUN pip3 install -U setuptools pip \
|
||||
&& pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
&& pip3 install tox \
|
||||
&& rm -f requirements.txt requirements_tests.txt
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL /bin/bash
|
||||
|
@@ -1,31 +1,33 @@
|
||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||
{
|
||||
"name": "Hass.io dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"runArgs": [
|
||||
"-e",
|
||||
"GIT_EDITOR=code --wait",
|
||||
"--privileged"
|
||||
],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": [
|
||||
"--target-version",
|
||||
"py37"
|
||||
],
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"postCreateCommand": "pre-commit install",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"containerEnv": {"NVM_DIR":"/usr/local/share/nvm"},
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||
}
|
||||
}
|
||||
|
@@ -14,10 +14,10 @@
|
||||
# virtualenv
|
||||
venv/
|
||||
|
||||
# HA
|
||||
home-assistant-polymer/*
|
||||
misc/*
|
||||
script/*
|
||||
# Data
|
||||
home-assistant-polymer/
|
||||
script/
|
||||
tests/
|
||||
|
||||
# Test ENV
|
||||
data/
|
||||
|
29
.github/ISSUE_TEMPLATE.md
vendored
29
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,29 +0,0 @@
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with a Add-on, make a issue on there repository.
|
||||
-->
|
||||
|
||||
**Home Assistant release with the issue:**
|
||||
<!--
|
||||
- Frontend -> Developer tools -> Info
|
||||
- Or use this command: hass --version
|
||||
-->
|
||||
|
||||
**Operating environment (HassOS/Generic):**
|
||||
<!--
|
||||
Please provide details about your environment.
|
||||
-->
|
||||
|
||||
**Supervisor logs:**
|
||||
<!--
|
||||
- Frontend -> Hass.io -> System
|
||||
- Or use this command: hassio su logs
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
69
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
69
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
name: Report a bug with the Supervisor on a supported System
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
---
|
||||
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with an add-on, make an issue in it's repository.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Important: You can only fill a bug repport for an supported system! If you run an unsupported installation. This report would be closed without comment.
|
||||
-->
|
||||
|
||||
### Describe the issue
|
||||
|
||||
<!-- Provide as many details as possible. -->
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
<!-- What do you do to encounter the issue. -->
|
||||
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
|
||||
### Enviroment details
|
||||
|
||||
<!-- You can find these details in the system tab of the supervisor panel, or by using the `ha` CLI. -->
|
||||
|
||||
- **Operating System:**: xxx
|
||||
- **Supervisor version:**: xxx
|
||||
- **Home Assistant version**: xxx
|
||||
|
||||
### Supervisor logs
|
||||
|
||||
<details>
|
||||
<summary>Supervisor logs</summary>
|
||||
<!--
|
||||
- Frontend -> Supervisor -> System
|
||||
- Or use this command: ha supervisor logs
|
||||
- Logs are more than just errors, even if you don't think it's important, it is.
|
||||
-->
|
||||
|
||||
```
|
||||
Paste supervisor logs here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### System Information
|
||||
|
||||
<details>
|
||||
<summary>System Information</summary>
|
||||
<!--
|
||||
- Use this command: ha info
|
||||
-->
|
||||
|
||||
```
|
||||
Paste system info here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
25
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
25
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Report a bug/issues with an unsupported Supervisor
|
||||
url: https://community.home-assistant.io
|
||||
about: The Community guide can help or was updated to solve your issue
|
||||
|
||||
- name: Report a bug for the Supervisor panel
|
||||
url: https://github.com/home-assistant/frontend/issues
|
||||
about: The Supervisor panel is a part of the Home Assistant frontend
|
||||
|
||||
- name: Report incorrect or missing information on our developer documentation
|
||||
url: https://github.com/home-assistant/developers.home-assistant.io/issues
|
||||
about: Our documentation has its own issue tracker. Please report issues with the website there.
|
||||
|
||||
- name: Request a feature for the Supervisor
|
||||
url: https://community.home-assistant.io/c/feature-requests
|
||||
about: Request an new feature for the Supervisor.
|
||||
|
||||
- name: I have a question or need support
|
||||
url: https://www.home-assistant.io/help
|
||||
about: We use GitHub for tracking bugs, check our website for resources on getting help.
|
||||
|
||||
- name: I'm unsure where to go?
|
||||
url: https://www.home-assistant.io/join-chat
|
||||
about: If you are unsure where to go, then joining our chat is recommended; Just ask!
|
80
.github/ISSUE_TEMPLATE/z_bug_report_form.yml
vendored
Normal file
80
.github/ISSUE_TEMPLATE/z_bug_report_form.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
name: Bug Report Form
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
title: ""
|
||||
issue_body: true
|
||||
inputs:
|
||||
- type: description
|
||||
attributes:
|
||||
value: |
|
||||
This issue form is for reporting bugs with **supported** setups only!
|
||||
|
||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
||||
[fr]: https://community.home-assistant.io/c/feature-requests
|
||||
- type: input
|
||||
attributes:
|
||||
label: What is the version of the Supervisor used?
|
||||
required: true
|
||||
placeholder: supervisor-
|
||||
description: >
|
||||
Can be found in the Supervisor panel -> System tab. Starts with
|
||||
`supervisor-....`.
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: What type of installation are you running?
|
||||
required: true
|
||||
description: >
|
||||
If you don't know, you can find it in: Configuration panel -> Info.
|
||||
choices:
|
||||
- Home Assistant OS
|
||||
- Home Assistant Supervised
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Which operating system are you running on?
|
||||
required: true
|
||||
choices:
|
||||
- Home Assistant Operating System
|
||||
- Debian
|
||||
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
|
||||
- type: input
|
||||
attributes:
|
||||
label: What is the version of your installed operating system?
|
||||
required: true
|
||||
placeholder: 5.10
|
||||
description: Can be found in the Supervisor panel -> System tab.
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Home Assistant Core is installed?
|
||||
required: true
|
||||
placeholder: core-
|
||||
description: >
|
||||
Can be found in the Supervisor panel -> System tab. Starts with
|
||||
`core-....`.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the issue you are experiencing
|
||||
required: true
|
||||
description: Provide a clear and concise description of what the bug is.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
required: true
|
||||
description: |
|
||||
Please tell us exactly how to reproduce your issue.
|
||||
Provide clear and concise step by step instructions and add code snippets if needed.
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
...
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything in the Supervisor logs that might be useful for us?
|
||||
required: false
|
||||
description: >
|
||||
The Supervisor logs can be found in the Supervisor panel -> System tab.
|
||||
- type: description
|
||||
attributes:
|
||||
value: |
|
||||
If you have any additional information for us, use the field below.
|
||||
Please note, you can attach screenshots or screen recordings here.
|
68
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
68
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
<!--
|
||||
You are amazing! Thanks for contributing to our project!
|
||||
Please, DO NOT DELETE ANY TEXT from this template! (unless instructed).
|
||||
-->
|
||||
|
||||
## Proposed change
|
||||
|
||||
<!--
|
||||
Describe the big picture of your changes here to communicate to the
|
||||
maintainers why we should accept this pull request. If it fixes a bug
|
||||
or resolves a feature request, be sure to link to that issue in the
|
||||
additional information section.
|
||||
-->
|
||||
|
||||
## Type of change
|
||||
|
||||
<!--
|
||||
What type of change does your PR introduce to Home Assistant?
|
||||
NOTE: Please, check only 1! box!
|
||||
If your PR requires multiple boxes to be checked, you'll most likely need to
|
||||
split it into multiple PRs. This makes things easier and faster to code review.
|
||||
-->
|
||||
|
||||
- [ ] Dependency upgrade
|
||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (which adds functionality to the supervisor)
|
||||
- [ ] Breaking change (fix/feature causing existing functionality to break)
|
||||
- [ ] Code quality improvements to existing code or addition of tests
|
||||
|
||||
## Additional information
|
||||
|
||||
<!--
|
||||
Details are important, and help maintainers processing your PR.
|
||||
Please be sure to fill out additional details, if applicable.
|
||||
-->
|
||||
|
||||
- This PR fixes or closes issue: fixes #
|
||||
- This PR is related to issue:
|
||||
- Link to documentation pull request:
|
||||
|
||||
## Checklist
|
||||
|
||||
<!--
|
||||
Put an `x` in the boxes that apply. You can also fill these out after
|
||||
creating the PR. If you're unsure about any of them, don't hesitate to ask.
|
||||
We're here to help! This is simply a reminder of what we are going to look
|
||||
for before merging your code.
|
||||
-->
|
||||
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] I have followed the [development checklist][dev-checklist]
|
||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
|
||||
If API endpoints of add-on configuration are added/changed:
|
||||
|
||||
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
||||
|
||||
<!--
|
||||
Thank you for contributing <3
|
||||
|
||||
Below, some useful links you could explore:
|
||||
-->
|
||||
|
||||
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
||||
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
27
.github/lock.yml
vendored
27
.github/lock.yml
vendored
@@ -1,27 +0,0 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2020-01-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
47
.github/release-drafter.yml
vendored
47
.github/release-drafter.yml
vendored
@@ -1,4 +1,49 @@
|
||||
change-template: "- #$NUMBER $TITLE @$AUTHOR"
|
||||
sort-direction: ascending
|
||||
|
||||
categories:
|
||||
- title: ":boom: Breaking Changes"
|
||||
label: "breaking-change"
|
||||
|
||||
- title: ":wrench: Build"
|
||||
label: "build"
|
||||
|
||||
- title: ":boar: Chore"
|
||||
label: "chore"
|
||||
|
||||
- title: ":sparkles: New Features"
|
||||
label: "new-feature"
|
||||
|
||||
- title: ":zap: Performance"
|
||||
label: "performance"
|
||||
|
||||
- title: ":recycle: Refactor"
|
||||
label: "refactor"
|
||||
|
||||
- title: ":green_heart: CI"
|
||||
label: "ci"
|
||||
|
||||
- title: ":bug: Bug Fixes"
|
||||
label: "bugfix"
|
||||
|
||||
- title: ":white_check_mark: Test"
|
||||
label: "test"
|
||||
|
||||
- title: ":arrow_up: Dependency Updates"
|
||||
label: "dependencies"
|
||||
|
||||
include-labels:
|
||||
- "breaking-change"
|
||||
- "build"
|
||||
- "chore"
|
||||
- "performance"
|
||||
- "refactor"
|
||||
- "new-feature"
|
||||
- "bugfix"
|
||||
- "dependencies"
|
||||
- "test"
|
||||
- "ci"
|
||||
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
$CHANGES
|
||||
|
17
.github/stale.yml
vendored
17
.github/stale.yml
vendored
@@ -1,17 +0,0 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
215
.github/workflows/builder.yml
vendored
Normal file
215
.github/workflows/builder.yml
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
name: Build supervisor
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
channel:
|
||||
description: "Channel"
|
||||
required: true
|
||||
default: "dev"
|
||||
version:
|
||||
description: "Version"
|
||||
required: true
|
||||
publish:
|
||||
description: "Publish"
|
||||
required: true
|
||||
default: "false"
|
||||
stable:
|
||||
description: "Stable"
|
||||
required: true
|
||||
default: "false"
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
release:
|
||||
types: ["published"]
|
||||
push:
|
||||
branches: ["main"]
|
||||
paths:
|
||||
- "rootfs/**"
|
||||
- "supervisor/**"
|
||||
- build.json
|
||||
- Dockerfile
|
||||
- requirements.txt
|
||||
- setup.py
|
||||
|
||||
env:
|
||||
BUILD_NAME: supervisor
|
||||
BUILD_TYPE: supervisor
|
||||
WHEELS_TAG: 3.8-alpine3.12
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Initialize build
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@master
|
||||
|
||||
- name: Get version
|
||||
id: version
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
if: steps.version.outputs.publish == 'false'
|
||||
uses: jitterbit/get-changed-files@v1
|
||||
|
||||
- name: Check if requirements files changed
|
||||
id: requirements
|
||||
run: |
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ requirements.txt ]]; then
|
||||
echo "::set-output name=changed::true"
|
||||
fi
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.arch }} supervisor
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@master
|
||||
with:
|
||||
tag: ${{ env.WHEELS_TAG }}
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-host: ${{ secrets.WHEELS_HOST }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
wheels-user: wheels
|
||||
apk: "build-base;libffi-dev;openssl-dev"
|
||||
skip-binary: aiohttp
|
||||
requirements: "requirements.txt"
|
||||
|
||||
- name: Set version
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set build arguments
|
||||
if: needs.init.outputs.publish == 'false'
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2021.01.1
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
|
||||
version:
|
||||
name: Update version
|
||||
needs: ["init", "run_supervisor"]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Initialize git
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
with:
|
||||
name: ${{ secrets.GIT_NAME }}
|
||||
email: ${{ secrets.GIT_EMAIL }}
|
||||
token: ${{ secrets.GIT_TOKEN }}
|
||||
|
||||
- name: Update version file
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version-push@master
|
||||
with:
|
||||
key: ${{ env.BUILD_NAME }}
|
||||
version: ${{ needs.init.outputs.version }}
|
||||
channel: ${{ needs.init.outputs.channel }}
|
||||
|
||||
run_supervisor:
|
||||
runs-on: ubuntu-latest
|
||||
name: Run the Supervisor
|
||||
needs: ["build"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build the Supervisor
|
||||
uses: home-assistant/builder@2021.01.1
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
--amd64 \
|
||||
--target /data \
|
||||
--generic runner
|
||||
|
||||
- name: Create the Supervisor
|
||||
run: |
|
||||
mkdir -p /tmp/supervisor/data
|
||||
docker create --name hassio_supervisor \
|
||||
--privileged \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v /run/dbus:/run/dbus \
|
||||
-v /tmp/supervisor/data:/data \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-e SUPERVISOR_SHARE="/tmp/supervisor/data" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||
homeassistant/amd64-hassio-supervisor:runner
|
||||
|
||||
- name: Start the Supervisor
|
||||
run: docker start hassio_supervisor
|
||||
|
||||
- name: Wait for Supervisor to come up
|
||||
run: |
|
||||
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
|
||||
ping="error"
|
||||
while [ "$ping" != "ok" ]; do
|
||||
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r .result)
|
||||
sleep 5
|
||||
done
|
||||
|
||||
- name: Check the Supervisor
|
||||
run: |
|
||||
echo "Checking supervisor info"
|
||||
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r .result)
|
||||
if [ "$test" != "ok" ];then
|
||||
docker logs hassio_supervisor
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Checking supervisor network info"
|
||||
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r .result)
|
||||
if [ "$test" != "ok" ];then
|
||||
docker logs hassio_supervisor
|
||||
exit 1
|
||||
fi
|
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Check PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
types: [labeled, unlabeled, synchronize]
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Check labels
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check labels
|
||||
run: |
|
||||
labels=$(jq -r '.pull_request.labels[] | .name' ${{github.event_path }})
|
||||
echo "$labels"
|
||||
if [ "$labels" == "cla-signed" ]; then
|
||||
exit 1
|
||||
fi
|
431
.github/workflows/ci.yaml
vendored
Normal file
431
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,431 @@
|
||||
name: CI
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.8
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
|
||||
jobs:
|
||||
# Separate job to pre-populate the base dependency cache
|
||||
# This prevent upcoming jobs to do the same individually
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
pip install -U pip setuptools
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pre-commit-
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-black:
|
||||
name: Check black
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run black
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
black --target-version py38 --check supervisor tests setup.py
|
||||
|
||||
lint-dockerfile:
|
||||
name: Check Dockerfile
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
- name: Check Dockerfile
|
||||
uses: docker://hadolint/hadolint:v1.18.0
|
||||
with:
|
||||
args: hadolint Dockerfile
|
||||
|
||||
lint-executable-shebangs:
|
||||
name: Check executables
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 supervisor tests
|
||||
|
||||
lint-isort:
|
||||
name: Check isort
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run isort
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files
|
||||
|
||||
lint-pylint:
|
||||
name: Check pylint
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||
- name: Run pylint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
lint-pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Run tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Install additional system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Install Pytest Annotation plugin
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Ideally this should be part of our dependencies
|
||||
# However this plugin is fairly new and doesn't run correctly
|
||||
# on a non-GitHub environment.
|
||||
pip install pytest-github-actions-annotate-failures
|
||||
- name: Run pytest
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pytest \
|
||||
-qq \
|
||||
--timeout=10 \
|
||||
--durations=10 \
|
||||
--cov supervisor \
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v2.2.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
|
||||
coverage:
|
||||
name: Process test coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
coverage combine coverage*/.coverage*
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1.2.1
|
20
.github/workflows/lock.yml
vendored
Normal file
20
.github/workflows/lock.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: Lock
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v2.0.3
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-lock-inactive-days: "30"
|
||||
issue-exclude-created-before: "2020-10-01T00:00:00Z"
|
||||
issue-lock-reason: ""
|
||||
pr-lock-inactive-days: "1"
|
||||
pr-exclude-created-before: "2020-11-01T00:00:00Z"
|
||||
pr-lock-reason: ""
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-executables-have-shebangs",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+)$",
|
||||
"file": 1,
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-json",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||
"file": 1,
|
||||
"message": 2,
|
||||
"line": 3,
|
||||
"column": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "hadolint",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3,
|
||||
"code": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "pylint-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "pylint-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "python",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||
"file": 1,
|
||||
"line": 2
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
44
.github/workflows/release-drafter.yml
vendored
Normal file
44
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Find Next Version
|
||||
id: version
|
||||
run: |
|
||||
declare -i newpost
|
||||
latest=$(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
latestpre=$(echo "$latest" | awk '{split($0,a,"."); print a[1] "." a[2]}')
|
||||
datepre=$(date --utc '+%Y.%m')
|
||||
|
||||
|
||||
if [[ "$latestpre" == "$datepre" ]]; then
|
||||
latestpost=$(echo "$latest" | awk '{split($0,a,"."); print a[3]}')
|
||||
newpost=$latestpost+1
|
||||
else
|
||||
newpost=0
|
||||
fi
|
||||
|
||||
echo Current version: $latest
|
||||
echo New target version: $datepre.$newpost
|
||||
echo "::set-output name=version::$datepre.$newpost"
|
||||
|
||||
- name: Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
21
.github/workflows/sentry.yaml
vendored
Normal file
21
.github/workflows/sentry.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Sentry Release
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
release:
|
||||
types: [published, prereleased]
|
||||
|
||||
jobs:
|
||||
createSentryRelease:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.1
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
environment: production
|
39
.github/workflows/stale.yml
vendored
Normal file
39
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: Stale
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v3.0.15
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
||||
stale-issue-message: >
|
||||
There hasn't been any activity on this issue recently. Due to the
|
||||
high number of incoming GitHub notifications, we have to clean some
|
||||
of the old issues, as many of them have already been resolved with
|
||||
the latest updates.
|
||||
|
||||
Please make sure to update to the latest version and check if that
|
||||
solves the issue. Let us know if that works for you by
|
||||
adding a comment 👍
|
||||
|
||||
This issue has now been marked as stale and will be closed if no
|
||||
further activity occurs. Thank you for your contributions.
|
||||
|
||||
stale-pr-label: "stale"
|
||||
exempt-pr-labels: "no-stale,pinned,rfc,security"
|
||||
stale-pr-message: >
|
||||
There hasn't been any activity on this pull request recently. This
|
||||
pull request has been automatically marked as stale because of that
|
||||
and will be closed if no further activity occurs within 7 days.
|
||||
|
||||
Thank you for your contributions.
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -95,3 +95,8 @@ ENV/
|
||||
.vscode/*
|
||||
!.vscode/cSpell.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
|
||||
# mypy
|
||||
/.mypy_cache/*
|
||||
/.dmypy.json
|
||||
|
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py38
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.3
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.5.0
|
||||
- pydocstyle==5.0.2
|
||||
files: ^(supervisor|script|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v4.3.21
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
18
.vscode/launch.json
vendored
Normal file
18
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Supervisor remote debug",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 33333,
|
||||
"host": "172.30.32.2",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "/usr/src/supervisor"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
194
.vscode/tasks.json
vendored
194
.vscode/tasks.json
vendored
@@ -1,92 +1,104 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 hassio tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint hassio",
|
||||
"dependsOn": [
|
||||
"Install all Requirements"
|
||||
],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Supervisor",
|
||||
"type": "shell",
|
||||
"command": "./scripts/run-supervisor.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Build Supervisor",
|
||||
"type": "shell",
|
||||
"command": "./scripts/build-supervisor.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Supervisor CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update Supervisor Panel",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint supervisor",
|
||||
"dependsOn": ["Install all Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
872
API.md
872
API.md
@@ -1,872 +0,0 @@
|
||||
# Hass.io
|
||||
|
||||
## Hass.io RESTful API
|
||||
|
||||
Interface for Home Assistant to control things from supervisor.
|
||||
|
||||
On error / Code 400:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "error",
|
||||
"message": ""
|
||||
}
|
||||
```
|
||||
|
||||
On success / Code 200:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "ok",
|
||||
"data": {}
|
||||
}
|
||||
```
|
||||
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with environment `HASSIO_TOKEN`.
|
||||
|
||||
### Hass.io
|
||||
|
||||
- GET `/supervisor/ping`
|
||||
|
||||
This API call don't need a token.
|
||||
|
||||
- GET `/supervisor/info`
|
||||
|
||||
The addons from `addons` are only installed one.
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"ip_address": "ip address",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"repository": "12345678|null",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"state": "started|stopped"
|
||||
}
|
||||
],
|
||||
"addons_repositories": ["REPO_URL"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"addons_repositories": ["REPO_URL"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/reload`
|
||||
|
||||
Reload addons/version.
|
||||
|
||||
- GET `/supervisor/logs`
|
||||
|
||||
Output is the raw docker log.
|
||||
|
||||
- GET `/supervisor/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/supervisor/repair`
|
||||
|
||||
Repair overlayfs issue and restore lost images
|
||||
|
||||
### Snapshot
|
||||
|
||||
- GET `/snapshots`
|
||||
|
||||
```json
|
||||
{
|
||||
"snapshots": [
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"date": "ISO",
|
||||
"name": "Custom name",
|
||||
"type": "full|partial",
|
||||
"protected": "bool"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- POST `/snapshots/new/upload`
|
||||
|
||||
return:
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional",
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- GET `/snapshots/{slug}/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": "SNAPSHOT ID",
|
||||
"type": "full|partial",
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"protected": "bool",
|
||||
"homeassistant": "version",
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION",
|
||||
"size": "SIZE_IN_MB"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
"folders": ["NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
|
||||
- GET `/snapshots/{slug}/download`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
- POST `/host/shutdown`
|
||||
|
||||
- POST `/host/reboot`
|
||||
|
||||
- GET `/host/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": "hostname|null",
|
||||
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
||||
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
||||
"kernel": "4.15.7|null",
|
||||
"chassis": "specific|null",
|
||||
"deployment": "stable|beta|dev|null",
|
||||
"cpe": "xy|null"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
#### Services
|
||||
|
||||
- GET `/host/services`
|
||||
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"name": "xy.service",
|
||||
"description": "XY ...",
|
||||
"state": "active|"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/service/{unit}/stop`
|
||||
|
||||
- POST `/host/service/{unit}/start`
|
||||
|
||||
- POST `/host/service/{unit}/reload`
|
||||
|
||||
### HassOS
|
||||
|
||||
- GET `/hassos/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "2.3",
|
||||
"version_cli": "7",
|
||||
"version_latest": "2.4",
|
||||
"version_cli_latest": "8",
|
||||
"board": "ova|rpi",
|
||||
"boot": "rauc boot slot"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update/cli`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/config/sync`
|
||||
|
||||
Load host configs from a USB stick.
|
||||
|
||||
### Hardware
|
||||
|
||||
- GET `/hardware/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"serial": ["/dev/xy"],
|
||||
"input": ["Input device name"],
|
||||
"disk": ["/dev/sdax"],
|
||||
"gpio": ["gpiochip0", "gpiochip100"],
|
||||
"audio": {
|
||||
"CARD_ID": {
|
||||
"name": "xy",
|
||||
"type": "microphone",
|
||||
"devices": [
|
||||
"chan_id": "channel ID",
|
||||
"chan_type": "type of device"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/hardware/audio`
|
||||
|
||||
```json
|
||||
{
|
||||
"audio": {
|
||||
"input": {
|
||||
"0,0": "Mic"
|
||||
},
|
||||
"output": {
|
||||
"1,0": "Jack",
|
||||
"1,1": "HDMI"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hardware/trigger`
|
||||
|
||||
Trigger an udev reload
|
||||
|
||||
### Home Assistant
|
||||
|
||||
- GET `/homeassistant/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "arch",
|
||||
"machine": "Image machine type",
|
||||
"ip_address": "ip address",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
"port": 8123,
|
||||
"ssl": "bool",
|
||||
"watchdog": "bool",
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/homeassistant/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/homeassistant/logs`
|
||||
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/homeassistant/restart`
|
||||
- POST `/homeassistant/check`
|
||||
- POST `/homeassistant/start`
|
||||
- POST `/homeassistant/stop`
|
||||
- POST `/homeassistant/rebuild`
|
||||
|
||||
- POST `/homeassistant/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"image": "Optional|null",
|
||||
"last_version": "Optional for custom image|null",
|
||||
"port": "port for access hass",
|
||||
"ssl": "bool",
|
||||
"refresh_token": "",
|
||||
"watchdog": "bool",
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
Image with `null` and last_version with `null` reset this options.
|
||||
|
||||
- POST/GET `/homeassistant/api`
|
||||
|
||||
Proxy to real home-assistant instance.
|
||||
|
||||
- GET `/homeassistant/websocket`
|
||||
|
||||
Proxy to real websocket instance.
|
||||
|
||||
- GET `/homeassistant/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### RESTful for API addons
|
||||
|
||||
If an add-on will call itself, you can use `/addons/self/...`.
|
||||
|
||||
- GET `/addons`
|
||||
|
||||
Get all available addons.
|
||||
|
||||
```json
|
||||
{
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"advanced": "bool",
|
||||
"repository": "core|local|REP_ID",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"available": "bool",
|
||||
"build": "bool",
|
||||
"url": "null|url",
|
||||
"icon": "bool",
|
||||
"logo": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
{
|
||||
"slug": "12345678",
|
||||
"name": "Repitory Name|unknown",
|
||||
"source": "URL_OF_REPOSITORY",
|
||||
"url": "WEBSITE|REPOSITORY",
|
||||
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/reload`
|
||||
- GET `/addons/{addon}/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xdssd_xybla",
|
||||
"hostname": "xdssd-xybla",
|
||||
"dns": [],
|
||||
"description": "description",
|
||||
"long_description": "null|markdown",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
"available": "bool",
|
||||
"advanced": "bool",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"machine": "[raspberrypi2, tinker]",
|
||||
"homeassistant": "null|min Home Assistant version",
|
||||
"repository": "12345678|null",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
"state": "none|started|stopped",
|
||||
"boot": "auto|manual",
|
||||
"build": "bool",
|
||||
"options": "{}",
|
||||
"schema": "{}|null",
|
||||
"network": "{}|null",
|
||||
"network_description": "{}|null",
|
||||
"host_network": "bool",
|
||||
"host_pid": "bool",
|
||||
"host_ipc": "bool",
|
||||
"host_dbus": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"apparmor": "disable|default|profile",
|
||||
"devices": ["/dev/xy"],
|
||||
"udev": "bool",
|
||||
"auto_uart": "bool",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"changelog": "bool",
|
||||
"documentation": "bool",
|
||||
"hassio_api": "bool",
|
||||
"hassio_role": "default|homeassistant|manager|admin",
|
||||
"homeassistant_api": "bool",
|
||||
"auth_api": "bool",
|
||||
"full_access": "bool",
|
||||
"protected": "bool",
|
||||
"rating": "1-6",
|
||||
"stdin": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"gpio": "bool",
|
||||
"kernel_modules": "bool",
|
||||
"devicetree": "bool",
|
||||
"docker_api": "bool",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0",
|
||||
"services_role": "['service:access']",
|
||||
"discovery": "['service']",
|
||||
"ip_address": "ip address",
|
||||
"ingress": "bool",
|
||||
"ingress_entry": "null|/api/hassio_ingress/slug",
|
||||
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
||||
"ingress_port": "null|int",
|
||||
"ingress_panel": "null|bool"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/icon`
|
||||
|
||||
- GET `/addons/{addon}/logo`
|
||||
|
||||
- GET `/addons/{addon}/changelog`
|
||||
|
||||
- GET `/addons/{addon}/documentation`
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"boot": "auto|manual",
|
||||
"auto_update": "bool",
|
||||
"network": {
|
||||
"CONTAINER": "port|[ip, port]"
|
||||
},
|
||||
"options": {},
|
||||
"audio_output": "null|0,0",
|
||||
"audio_input": "null|0,0",
|
||||
"ingress_panel": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
Reset custom network/audio/options, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/security`
|
||||
|
||||
This function is not callable by itself.
|
||||
|
||||
```json
|
||||
{
|
||||
"protected": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
- POST `/addons/{addon}/stop`
|
||||
|
||||
- POST `/addons/{addon}/install`
|
||||
|
||||
- POST `/addons/{addon}/uninstall`
|
||||
|
||||
- POST `/addons/{addon}/update`
|
||||
|
||||
- GET `/addons/{addon}/logs`
|
||||
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/addons/{addon}/restart`
|
||||
|
||||
- POST `/addons/{addon}/rebuild`
|
||||
|
||||
Only supported for local build addons
|
||||
|
||||
- POST `/addons/{addon}/stdin`
|
||||
|
||||
Write data to add-on stdin
|
||||
|
||||
- GET `/addons/{addon}/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### ingress
|
||||
|
||||
- POST `/ingress/session`
|
||||
|
||||
Create a new Session for access to ingress service.
|
||||
|
||||
```json
|
||||
{
|
||||
"session": "token"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/ingress/panels`
|
||||
|
||||
Return a list of enabled panels.
|
||||
|
||||
```json
|
||||
{
|
||||
"panels": {
|
||||
"addon_slug": {
|
||||
"enable": "boolean",
|
||||
"icon": "mdi:...",
|
||||
"title": "title",
|
||||
"admin": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- VIEW `/ingress/{token}`
|
||||
|
||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
||||
Need ingress session as cookie.
|
||||
|
||||
### discovery
|
||||
|
||||
- GET `/discovery`
|
||||
|
||||
```json
|
||||
{
|
||||
"discovery": [
|
||||
{
|
||||
"addon": "slug",
|
||||
"service": "name",
|
||||
"uuid": "uuid",
|
||||
"config": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/discovery/{UUID}`
|
||||
|
||||
```json
|
||||
{
|
||||
"addon": "slug",
|
||||
"service": "name",
|
||||
"uuid": "uuid",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/discovery`
|
||||
|
||||
```json
|
||||
{
|
||||
"service": "name",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
|
||||
```json
|
||||
{
|
||||
"uuid": "uuid"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/discovery/{UUID}`
|
||||
|
||||
### Services
|
||||
|
||||
- GET `/services`
|
||||
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"slug": "name",
|
||||
"available": "bool",
|
||||
"providers": "list"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### MQTT
|
||||
|
||||
- GET `/services/mqtt`
|
||||
|
||||
```json
|
||||
{
|
||||
"addon": "name",
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/mqtt`
|
||||
|
||||
```json
|
||||
{
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool|optional",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/mqtt`
|
||||
|
||||
#### MySQL
|
||||
|
||||
- GET `/services/mysql`
|
||||
|
||||
```json
|
||||
{
|
||||
"addon": "name",
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"username": "optional",
|
||||
"password": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/mysql`
|
||||
|
||||
```json
|
||||
{
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"username": "optional",
|
||||
"password": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/mysql`
|
||||
|
||||
### Misc
|
||||
|
||||
- GET `/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"supervisor": "version",
|
||||
"homeassistant": "version",
|
||||
"hassos": "null|version",
|
||||
"hostname": "name",
|
||||
"machine": "type",
|
||||
"arch": "arch",
|
||||
"supported_arch": ["arch1", "arch2"],
|
||||
"channel": "stable|beta|dev",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"timezone": "Europe/Zurich"
|
||||
}
|
||||
```
|
||||
|
||||
### DNS
|
||||
|
||||
- GET `/dns/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"host": "ip-address",
|
||||
"version": "1",
|
||||
"latest_version": "2",
|
||||
"servers": ["dns://8.8.8.8"],
|
||||
"locals": ["dns://xy"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/dns/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": ["dns://8.8.8.8"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/dns/update`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/dns/restart`
|
||||
|
||||
- POST `/dns/reset`
|
||||
|
||||
- GET `/dns/logs`
|
||||
|
||||
- GET `/dns/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### Auth / SSO API
|
||||
|
||||
You can use the user system on homeassistant. We handle this auth system on
|
||||
supervisor.
|
||||
|
||||
You can call post `/auth`
|
||||
|
||||
We support:
|
||||
|
||||
- Json `{ "user|name": "...", "password": "..." }`
|
||||
- application/x-www-form-urlencoded `user|name=...&password=...`
|
||||
- BasicAuth
|
||||
|
||||
* POST `/auth/reset`
|
||||
|
||||
```json
|
||||
{
|
||||
"username": "xy",
|
||||
"password": "new-password"
|
||||
}
|
||||
```
|
41
Dockerfile
41
Dockerfile
@@ -1,37 +1,40 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=10000 \
|
||||
SUPERVISOR_API=http://localhost
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
libffi \
|
||||
musl \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
eudev \
|
||||
eudev-libs
|
||||
RUN \
|
||||
apk add --no-cache \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
RUN \
|
||||
export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . hassio
|
||||
RUN pip3 install --no-cache-dir -e ./hassio \
|
||||
&& python3 -m compileall ./hassio/hassio
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
# Initialize udev daemon, handle CMD
|
||||
COPY entry.sh /bin/
|
||||
ENTRYPOINT ["/bin/entry.sh"]
|
||||
|
||||
WORKDIR /
|
||||
CMD [ "python3", "-m", "hassio" ]
|
||||
COPY rootfs /
|
||||
|
4
LICENSE
4
LICENSE
@@ -178,7 +178,7 @@
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017 Pascal Vizeli
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@@ -1,3 +1,3 @@
|
||||
include LICENSE.md
|
||||
graft hassio
|
||||
graft supervisor
|
||||
recursive-exclude * *.py[co]
|
||||
|
40
README.md
40
README.md
@@ -1,30 +1,32 @@
|
||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||
|
||||
# Hass.io
|
||||
# Home Assistant Supervisor
|
||||
|
||||
## First private cloud solution for home automation
|
||||
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||

|
||||
Home Assistant (former Hass.io) is a container-based system for managing your
|
||||
Home Assistant Core installation and related applications. The system is
|
||||
controlled via Home Assistant which communicates with the Supervisor. The
|
||||
Supervisor provides an API to manage the installation. This includes changing
|
||||
network settings or installing and updating software.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
Installation instructions can be found at https://home-assistant.io/getting-started.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
For small changes and bugfixes you can just follow this, but for significant changes open a RFC first.
|
||||
Development instructions can be found [here][development].
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
## Release
|
||||
|
||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||
Releases are done in 3 stages (channels) with this structure:
|
||||
|
||||
1. Pull requests are merged to the `main` branch.
|
||||
2. A new build is pushed to the `dev` stage.
|
||||
3. Releases are published.
|
||||
4. A new build is pushed to the `beta` stage.
|
||||
5. The [`stable.json`][stable] file is updated.
|
||||
6. The build that was pushed to `beta` will now be pushed to `stable`.
|
||||
|
||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||
|
@@ -1,154 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- "*"
|
||||
exclude:
|
||||
- untagged*
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: basePythonTag
|
||||
value: "3.7-alpine3.11"
|
||||
- name: versionHadolint
|
||||
value: "v1.16.3"
|
||||
- name: versionBuilder
|
||||
value: "6.9"
|
||||
- name: versionWheels
|
||||
value: "1.6-3.7-alpine3.11"
|
||||
- group: docker
|
||||
- group: wheels
|
||||
|
||||
stages:
|
||||
- stage: "Test"
|
||||
jobs:
|
||||
- job: "Tox"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- script: pip install tox
|
||||
displayName: "Install Tox"
|
||||
- script: tox
|
||||
displayName: "Run Tox"
|
||||
- job: "JQ"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: "Install JQ"
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: "Run JQ"
|
||||
- job: "Hadolint"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: "Install Hadolint"
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: "Run Hadolint"
|
||||
|
||||
- stage: "Wheels"
|
||||
jobs:
|
||||
- job: "Wheels"
|
||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: "amd64"
|
||||
i386:
|
||||
buildArch: "i386"
|
||||
armhf:
|
||||
buildArch: "armhf"
|
||||
armv7:
|
||||
buildArch: "armv7"
|
||||
aarch64:
|
||||
buildArch: "aarch64"
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support \
|
||||
curl
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: "Initial cross build"
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: "Install ssh key"
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: "Install wheels builder"
|
||||
- script: |
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;libffi-dev;openssl-dev" \
|
||||
--index $(wheelsIndex) \
|
||||
--requirement requirements.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: "Run wheels build"
|
||||
|
||||
- stage: "Deploy"
|
||||
jobs:
|
||||
- job: "VersionValidate"
|
||||
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/tags'), eq(variables['Build.SourceBranchName'], 'dev'))
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: "Check version of branch/tag"
|
||||
- job: "Release"
|
||||
dependsOn:
|
||||
- "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: "Docker hub login"
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: "Install Builder"
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
|
||||
--all -t /data --docker-hub homeassistant
|
||||
displayName: "Build Release"
|
13
build.json
Normal file
13
build.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.8-alpine3.12",
|
||||
"armhf": "homeassistant/armhf-base-python:3.8-alpine3.12",
|
||||
"armv7": "homeassistant/armv7-base-python:3.8-alpine3.12",
|
||||
"amd64": "homeassistant/amd64-base-python:3.8-alpine3.12",
|
||||
"i386": "homeassistant/i386-base-python:3.8-alpine3.12"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "supervisor"
|
||||
}
|
||||
}
|
11
codecov.yaml
Normal file
11
codecov.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
codecov:
|
||||
branch: dev
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 40
|
||||
threshold: 0.09
|
||||
comment: false
|
||||
github_checks:
|
||||
annotations: false
|
13
entry.sh
13
entry.sh
@@ -1,13 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
udevd --daemon
|
||||
udevadm trigger
|
||||
|
||||
if CMD="$(command -v "$1")"; then
|
||||
shift
|
||||
exec "$CMD" "$@"
|
||||
else
|
||||
echo "Command not found: $1"
|
||||
exit 1
|
||||
fi
|
@@ -1 +0,0 @@
|
||||
"""Init file for Hass.io."""
|
@@ -1,63 +0,0 @@
|
||||
"""Main file for Hass.io."""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from hassio import bootstrap
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_event_loop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import uvloop
|
||||
|
||||
uvloop.install()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return asyncio.get_event_loop()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
|
||||
# Init async event loop
|
||||
loop = initialize_event_loop()
|
||||
|
||||
# Check if all information are available to setup Hass.io
|
||||
if not bootstrap.check_environment():
|
||||
sys.exit(1)
|
||||
|
||||
# init executor pool
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hass.io setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
_LOGGER.info("Setup HassIO")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
|
||||
try:
|
||||
_LOGGER.info("Run Hass.io")
|
||||
loop.run_forever()
|
||||
finally:
|
||||
_LOGGER.info("Stopping Hass.io")
|
||||
loop.run_until_complete(coresys.core.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hass.io")
|
||||
sys.exit(0)
|
@@ -1,568 +0,0 @@
|
||||
"""Validate add-ons options schema."""
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
from typing import Any, Dict, List
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ARCH_ALL,
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_ARGS,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PANEL_ADMIN,
|
||||
ATTR_PANEL_ICON,
|
||||
ATTR_PANEL_TITLE,
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOT_EXCLUDE,
|
||||
ATTR_SQUASH,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
PRIVILEGED_ALL,
|
||||
ROLE_ALL,
|
||||
ROLE_DEFAULT,
|
||||
STARTUP_ALL,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..validate import (
|
||||
DOCKER_PORTS,
|
||||
DOCKER_PORTS_DESCRIPTION,
|
||||
alsa_device,
|
||||
network_port,
|
||||
token,
|
||||
uuid_match,
|
||||
)
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = "str"
|
||||
V_INT = "int"
|
||||
V_FLOAT = "float"
|
||||
V_BOOL = "bool"
|
||||
V_PASSWORD = "password"
|
||||
V_EMAIL = "email"
|
||||
V_URL = "url"
|
||||
V_PORT = "port"
|
||||
V_MATCH = "match"
|
||||
V_LIST = "list"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool|email|url|port"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r"|list\((?P<list>.+)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||
)
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
|
||||
|
||||
MACHINE_ALL = [
|
||||
"intel-nuc",
|
||||
"odroid-c2",
|
||||
"odroid-n2",
|
||||
"odroid-xu",
|
||||
"qemuarm-64",
|
||||
"qemuarm",
|
||||
"qemux86-64",
|
||||
"qemux86",
|
||||
"raspberrypi",
|
||||
"raspberrypi2",
|
||||
"raspberrypi3-64",
|
||||
"raspberrypi3",
|
||||
"raspberrypi4-64",
|
||||
"raspberrypi4",
|
||||
"tinker",
|
||||
]
|
||||
|
||||
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
if value == "before":
|
||||
return STARTUP_SERVICES
|
||||
if value == "after":
|
||||
return STARTUP_APPLICATION
|
||||
return value
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||
vol.Optional(ATTR_WEBUI): vol.Match(
|
||||
r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||
network_port, vol.Equal(0)
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [vol.Coerce(str)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
[
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
|
||||
)
|
||||
},
|
||||
)
|
||||
],
|
||||
vol.Schema(
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
)
|
||||
}
|
||||
),
|
||||
False,
|
||||
),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=10, max=120)
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
|
||||
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
|
||||
),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema(
|
||||
{vol.Coerce(str): vol.Coerce(str)}
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||
str
|
||||
),
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): alsa_device,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): alsa_device,
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDONS_FILE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def validate_options(coresys: CoreSys, raw_schema: Dict[str, Any]):
|
||||
"""Validate schema."""
|
||||
|
||||
def validate(struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in raw_schema:
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = _nested_validate_list(coresys, typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = _nested_validate_dict(coresys, typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(coresys, typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'")
|
||||
|
||||
# Lookup secret
|
||||
if str(value).startswith("!secret "):
|
||||
secret: str = value.partition(" ")[2]
|
||||
value = coresys.secrets.get(secret)
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Unknown secret {secret}")
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR) or typ.startswith(V_PASSWORD):
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(V_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(V_PORT):
|
||||
return network_port(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(V_LIST):
|
||||
return vol.In(match.group("list").split("|"))(str(value))
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||
|
||||
|
||||
def _nested_validate_list(coresys, typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = _nested_validate_dict(coresys, typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(_single_validate(coresys, typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _nested_validate_dict(coresys, typ, data_dict, key):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(
|
||||
coresys, typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = _single_validate(coresys, typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||
|
||||
|
||||
def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
_nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
_nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
_single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
|
||||
def _single_ui_option(
|
||||
ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(V_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(V_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(V_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(V_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(V_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(V_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _nested_ui_list(
|
||||
ui_schema: List[Dict[str, Any]], option_list: List[Any], key: str
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
_nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
_single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
|
||||
def _nested_ui_dict(
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_dict: Dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {"name": key, "type": "schema", "optional": True, "multiple": multiple}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
_nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
_single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
@@ -1,51 +0,0 @@
|
||||
"""Init file for Hass.io hardware RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_AUDIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(
|
||||
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||
),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show ALSA audio devices."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||
}
|
||||
}
|
||||
|
||||
@api_process
|
||||
def trigger(self, request: web.Request) -> None:
|
||||
"""Trigger a udev device reload."""
|
||||
return asyncio.shield(self.sys_hardware.udev_trigger())
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,20 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,38 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright 2018 Google Inc. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///./hassio/src/ingress-view/hassio-ingress-view.ts"],"names":["customElement","HassioIngressView","property","this","_addon","html","_templateObject2","name","ingress_url","_templateObject","changedProps","_get","_getPrototypeOf","prototype","call","has","addon","route","path","substr","oldRoute","get","oldAddon","undefined","_fetchData","addonSlug","_ref","_ref2","regeneratorRuntime","async","_context","prev","next","awrap","Promise","all","fetchHassioAddonInfo","hass","Error","createHassioSession","sent","_slicedToArray","ingress","t0","console","error","alert","message","history","back","stop","css","_templateObject3","LitElement"],"mappings":"2/RAmBCA,YAAc,0CACTC,smBACHC,kEACAA,mEACAA,4EAED,WACE,OAAKC,KAAKC,OAMHC,YAAPC,IAC0BH,KAAKC,OAAOG,KACpBJ,KAAKC,OAAOI,aAPrBH,YAAPI,0CAYJ,SAAkBC,GAGhB,GAFAC,EAAAC,EApBEX,EAoBFY,WAAA,eAAAV,MAAAW,KAAAX,KAAmBO,GAEdA,EAAaK,IAAI,SAAtB,CAIA,IAAMC,EAAQb,KAAKc,MAAMC,KAAKC,OAAO,GAE/BC,EAAWV,EAAaW,IAAI,SAC5BC,EAAWF,EAAWA,EAASF,KAAKC,OAAO,QAAKI,EAElDP,GAASA,IAAUM,GACrBnB,KAAKqB,WAAWR,4CAIpB,SAAyBS,GAAzB,IAAAC,EAAAC,EAAAX,EAAA,OAAAY,mBAAAC,MAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAC,KAAA,EAAAD,EAAAE,KAAA,EAAAJ,mBAAAK,MAE0BC,QAAQC,IAAI,CAChCC,YAAqBjC,KAAKkC,KAAMZ,GAAhC,MAAiD,WAC/C,MAAM,IAAIa,MAAM,iCAElBC,YAAoBpC,KAAKkC,MAAzB,MAAqC,WACnC,MAAM,IAAIC,MAAM,4CAPxB,UAAAZ,EAAAI,EAAAU,KAAAb,EAAAc,EAAAf,EAAA,IAEWV,EAFXW,EAAA,IAWee,QAXf,CAAAZ,EAAAE,KAAA,cAYY,IAAIM,MAAM,wCAZtB,OAeInC,KAAKC,OAASY,EAflBc,EAAAE,KAAA,iBAAAF,EAAAC,KAAA,GAAAD,EAAAa,GAAAb,EAAA,SAkBIc,QAAQC,MAARf,EAAAa,IACAG,MAAMhB,EAAAa,GAAII,SAAW,mCACrBC,QAAQC,OApBZ,yBAAAnB,EAAAoB,SAAA,KAAA/C,KAAA,qDAwBA,WACE,OAAOgD,YAAPC,UA7D4BC","file":"chunk.900c5d3fab8b6ebdcbc6.js","sourcesContent":["import {\n LitElement,\n customElement,\n property,\n TemplateResult,\n html,\n PropertyValues,\n CSSResult,\n css,\n} from \"lit-element\";\nimport { HomeAssistant, Route } from \"../../../src/types\";\nimport { createHassioSession } from \"../../../src/data/hassio/supervisor\";\nimport {\n HassioAddonDetails,\n fetchHassioAddonInfo,\n} from \"../../../src/data/hassio/addon\";\nimport \"../../../src/layouts/hass-loading-screen\";\nimport \"../../../src/layouts/hass-subpage\";\n\n@customElement(\"hassio-ingress-view\")\nclass HassioIngressView extends LitElement {\n @property() public hass!: HomeAssistant;\n @property() public route!: Route;\n @property() private _addon?: HassioAddonDetails;\n\n protected render(): TemplateResult {\n if (!this._addon) {\n return html`\n <hass-loading-screen></hass-loading-screen>\n `;\n }\n\n return html`\n <hass-subpage .header=${this._addon.name} hassio>\n <iframe src=${this._addon.ingress_url}></iframe>\n </hass-subpage>\n `;\n }\n\n protected updated(changedProps: PropertyValues) {\n super.firstUpdated(changedProps);\n\n if (!changedProps.has(\"route\")) {\n return;\n }\n\n const addon = this.route.path.substr(1);\n\n const oldRoute = changedProps.get(\"route\") as this[\"route\"] | undefined;\n const oldAddon = oldRoute ? oldRoute.path.substr(1) : undefined;\n\n if (addon && addon !== oldAddon) {\n this._fetchData(addon);\n }\n }\n\n private async _fetchData(addonSlug: string) {\n try {\n const [addon] = await Promise.all([\n fetchHassioAddonInfo(this.hass, addonSlug).catch(() => {\n throw new Error(\"Failed to fetch add-on info\");\n }),\n createHassioSession(this.hass).catch(() => {\n throw new Error(\"Failed to create an ingress session\");\n }),\n ]);\n\n if (!addon.ingress) {\n throw new Error(\"This add-on does not support ingress\");\n }\n\n this._addon = addon;\n } catch (err) {\n // tslint:disable-next-line\n console.error(err);\n alert(err.message || \"Unknown error starting ingress.\");\n history.back();\n }\n }\n\n static get styles(): CSSResult {\n return css`\n iframe {\n display: block;\n width: 100%;\n height: 100%;\n border: 0;\n }\n paper-icon-button {\n color: var(--text-primary-color);\n }\n `;\n }\n}\n\ndeclare global {\n interface HTMLElementTagNameMap {\n \"hassio-ingress-view\": HassioIngressView;\n }\n}\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,f=[];c<a.length;c++)o=a[c],Object.prototype.hasOwnProperty.call(r,o)&&r[o]&&f.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);f.length;)f.shift()()}var t={},r={3:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"50202a3f8d4670c9454d",1:"9cea224f33b375867edd",2:"c0a46a38d689ab648885",4:"b21a4609308c9b8ef180",5:"a1b6b616fc89c412f5b6",6:"900c5d3fab8b6ebdcbc6",7:"a4f9950b101883805252",8:"884d6e32c83f99e41040",9:"84aaaba4c4734f1c2e21",10:"12902324b918e12549ba"}[e]+".js"}(e);var u=new Error;i=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src;u.message="Loading chunk "+e+" failed.\n("+o+": "+a+")",u.name="ChunkLoadError",u.type=o,u.request=a,t[1](u)}r[e]=void 0}};var f=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=self.webpackJsonp=self.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(10),t.e(7)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
||||
//# sourceMappingURL=entrypoint.582baa2f.js.map
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,f=[];c<a.length;c++)o=a[c],Object.prototype.hasOwnProperty.call(r,o)&&r[o]&&f.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);f.length;)f.shift()()}var t={},r={3:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"50202a3f8d4670c9454d",1:"9cea224f33b375867edd",2:"c0a46a38d689ab648885",4:"b21a4609308c9b8ef180",5:"a1b6b616fc89c412f5b6",6:"900c5d3fab8b6ebdcbc6",7:"a4f9950b101883805252",8:"884d6e32c83f99e41040",9:"84aaaba4c4734f1c2e21",10:"12902324b918e12549ba"}[e]+".js"}(e);var u=new Error;i=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src;u.message="Loading chunk "+e+" failed.\n("+o+": "+a+")",u.name="ChunkLoadError",u.type=o,u.request=a,t[1](u)}r[e]=void 0}};var f=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=self.webpackJsonp=self.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(10),t.e(7)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
||||
//# sourceMappingURL=entrypoint.js.map
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,38 +0,0 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Hass.io</title>
|
||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||
<style>
|
||||
body {
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
||||
</head>
|
||||
<body>
|
||||
<hassio-app></hassio-app>
|
||||
<script>
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.write(e.outerHTML);
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
</script>
|
||||
<!--
|
||||
Disabled while we make Home Assistant able to serve the right files.
|
||||
<script src="./app.js"></script>
|
||||
-->
|
||||
<link rel='import' href='./hassio-app.html'>
|
||||
</body>
|
||||
</html>
|
Binary file not shown.
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"vendors~hassio-icons~hassio-main.js": "/api/hassio/app/chunk.50202a3f8d4670c9454d.js",
|
||||
"vendors~hassio-icons~hassio-main.js.map": "/api/hassio/app/chunk.50202a3f8d4670c9454d.js.map",
|
||||
"dialog-hassio-markdown.js": "/api/hassio/app/chunk.9cea224f33b375867edd.js",
|
||||
"dialog-hassio-markdown.js.map": "/api/hassio/app/chunk.9cea224f33b375867edd.js.map",
|
||||
"dialog-hassio-snapshot.js": "/api/hassio/app/chunk.c0a46a38d689ab648885.js",
|
||||
"dialog-hassio-snapshot.js.map": "/api/hassio/app/chunk.c0a46a38d689ab648885.js.map",
|
||||
"entrypoint.js": "/api/hassio/app/entrypoint.js",
|
||||
"entrypoint.js.map": "/api/hassio/app/entrypoint.js.map",
|
||||
"hassio-addon-view.js": "/api/hassio/app/chunk.b21a4609308c9b8ef180.js",
|
||||
"hassio-addon-view.js.map": "/api/hassio/app/chunk.b21a4609308c9b8ef180.js.map",
|
||||
"hassio-icons.js": "/api/hassio/app/chunk.a1b6b616fc89c412f5b6.js",
|
||||
"hassio-icons.js.map": "/api/hassio/app/chunk.a1b6b616fc89c412f5b6.js.map",
|
||||
"hassio-ingress-view.js": "/api/hassio/app/chunk.900c5d3fab8b6ebdcbc6.js",
|
||||
"hassio-ingress-view.js.map": "/api/hassio/app/chunk.900c5d3fab8b6ebdcbc6.js.map",
|
||||
"hassio-main.js": "/api/hassio/app/chunk.a4f9950b101883805252.js",
|
||||
"hassio-main.js.map": "/api/hassio/app/chunk.a4f9950b101883805252.js.map",
|
||||
"mdi-icons.js": "/api/hassio/app/chunk.884d6e32c83f99e41040.js",
|
||||
"mdi-icons.js.map": "/api/hassio/app/chunk.884d6e32c83f99e41040.js.map",
|
||||
"vendors~hassio-addon-view.js": "/api/hassio/app/chunk.84aaaba4c4734f1c2e21.js",
|
||||
"vendors~hassio-addon-view.js.map": "/api/hassio/app/chunk.84aaaba4c4734f1c2e21.js.map",
|
||||
"vendors~hassio-main.js": "/api/hassio/app/chunk.12902324b918e12549ba.js",
|
||||
"vendors~hassio-main.js.map": "/api/hassio/app/chunk.12902324b918e12549ba.js.map",
|
||||
"201359fd5a526afe13ef.worker.js": "/api/hassio/app/201359fd5a526afe13ef.worker.js",
|
||||
"201359fd5a526afe13ef.worker.js.map": "/api/hassio/app/201359fd5a526afe13ef.worker.js.map",
|
||||
"chunk.12902324b918e12549ba.js.LICENSE": "/api/hassio/app/chunk.12902324b918e12549ba.js.LICENSE",
|
||||
"chunk.50202a3f8d4670c9454d.js.LICENSE": "/api/hassio/app/chunk.50202a3f8d4670c9454d.js.LICENSE",
|
||||
"chunk.84aaaba4c4734f1c2e21.js.LICENSE": "/api/hassio/app/chunk.84aaaba4c4734f1c2e21.js.LICENSE",
|
||||
"chunk.9cea224f33b375867edd.js.LICENSE": "/api/hassio/app/chunk.9cea224f33b375867edd.js.LICENSE",
|
||||
"chunk.c0a46a38d689ab648885.js.LICENSE": "/api/hassio/app/chunk.c0a46a38d689ab648885.js.LICENSE"
|
||||
}
|
@@ -1,247 +0,0 @@
|
||||
"""Bootstrap Hass.io."""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import signal
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .arch import CpuArch
|
||||
from .auth import Auth
|
||||
from .const import CHANNEL_DEV, SOCKET_DOCKER
|
||||
from .core import HassIO
|
||||
from .coresys import CoreSys
|
||||
from .dbus import DBusManager
|
||||
from .discovery import Discovery
|
||||
from .dns import CoreDNS
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .store import StoreManager
|
||||
from .supervisor import Supervisor
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
from .secrets import SecretsManager
|
||||
from .utils.dt import fetch_timezone
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
ENV_SHARE = "SUPERVISOR_SHARE"
|
||||
ENV_NAME = "SUPERVISOR_NAME"
|
||||
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
|
||||
|
||||
MACHINE_ID = Path("/etc/machine-id")
|
||||
|
||||
|
||||
async def initialize_coresys():
|
||||
"""Initialize HassIO coresys/objects."""
|
||||
coresys = CoreSys()
|
||||
|
||||
# Initialize core objects
|
||||
coresys.core = HassIO(coresys)
|
||||
coresys.dns = CoreDNS(coresys)
|
||||
coresys.arch = CpuArch(coresys)
|
||||
coresys.auth = Auth(coresys)
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
coresys.homeassistant = HomeAssistant(coresys)
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.ingress = Ingress(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.store = StoreManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.hassos = HassOS(coresys)
|
||||
coresys.secrets = SecretsManager(coresys)
|
||||
|
||||
# bootstrap config
|
||||
initialize_system_data(coresys)
|
||||
|
||||
# Set Machine/Host ID
|
||||
if MACHINE_ID.exists():
|
||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
||||
|
||||
# Init TimeZone
|
||||
if coresys.config.timezone == "UTC":
|
||||
coresys.config.timezone = await fetch_timezone(coresys.websession)
|
||||
|
||||
return coresys
|
||||
|
||||
|
||||
def initialize_system_data(coresys: CoreSys):
|
||||
"""Set up the default configuration and create folders."""
|
||||
config = coresys.config
|
||||
|
||||
# Home Assistant configuration folder
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home Assistant configuration folder %s", config.path_homeassistant
|
||||
)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
if not config.path_ssl.is_dir():
|
||||
_LOGGER.info("Create Hass.io SSL/TLS folder %s", config.path_ssl)
|
||||
config.path_ssl.mkdir()
|
||||
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s", config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on local repository folder %s", config.path_addons_local
|
||||
)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on git repositories folder %s", config.path_addons_git
|
||||
)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
if not config.path_tmp.is_dir():
|
||||
_LOGGER.info("Create Hass.io temp folder %s", config.path_tmp)
|
||||
config.path_tmp.mkdir(parents=True)
|
||||
|
||||
# hassio backup folder
|
||||
if not config.path_backup.is_dir():
|
||||
_LOGGER.info("Create Hass.io backup folder %s", config.path_backup)
|
||||
config.path_backup.mkdir()
|
||||
|
||||
# share folder
|
||||
if not config.path_share.is_dir():
|
||||
_LOGGER.info("Create Hass.io share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
# apparmor folder
|
||||
if not config.path_apparmor.is_dir():
|
||||
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
||||
config.path_apparmor.mkdir()
|
||||
|
||||
# dns folder
|
||||
if not config.path_dns.is_dir():
|
||||
_LOGGER.info("Create Hass.io DNS folder %s", config.path_dns)
|
||||
config.path_dns.mkdir()
|
||||
|
||||
# Update log level
|
||||
coresys.config.modify_log_level()
|
||||
|
||||
# Check if ENV is in development mode
|
||||
if bool(os.environ.get("SUPERVISOR_DEV", 0)):
|
||||
_LOGGER.warning("SUPERVISOR_DEV is set")
|
||||
coresys.updater.channel = CHANNEL_DEV
|
||||
coresys.config.logging = "debug"
|
||||
coresys.config.debug = True
|
||||
|
||||
|
||||
def migrate_system_env(coresys: CoreSys):
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
if old_build.is_dir():
|
||||
try:
|
||||
old_build.rmdir()
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't cleanup old Add-on build directory")
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||
datefmt = "%y-%m-%d %H:%M:%S"
|
||||
|
||||
# suppress overly verbose logs from libraries that aren't helpful
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
|
||||
logging.getLogger().handlers[0].setFormatter(
|
||||
ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
"DEBUG": "cyan",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def check_environment():
|
||||
"""Check if all environment are exists."""
|
||||
# check environment variables
|
||||
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
|
||||
try:
|
||||
os.environ[key]
|
||||
except KeyError:
|
||||
_LOGGER.fatal("Can't find %s in env!", key)
|
||||
return False
|
||||
|
||||
# check docker socket
|
||||
if not SOCKET_DOCKER.is_socket():
|
||||
_LOGGER.fatal("Can't find Docker socket!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which("socat"):
|
||||
_LOGGER.fatal("Can't find socat!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which("gdbus"):
|
||||
_LOGGER.fatal("Can't find gdbus!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM and SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
||||
|
||||
def supervisor_debugger(coresys: CoreSys) -> None:
|
||||
"""Setup debugger if needed."""
|
||||
if not coresys.config.debug:
|
||||
return
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import ptvsd
|
||||
|
||||
_LOGGER.info("Initialize Hass.io debugger")
|
||||
|
||||
ptvsd.enable_attach(address=("0.0.0.0", 33333), redirect_output=True)
|
||||
if coresys.config.debug_block:
|
||||
_LOGGER.info("Wait until debugger is attached")
|
||||
ptvsd.wait_for_attach()
|
242
hassio/config.py
242
hassio/config.py
@@ -1,242 +0,0 @@
|
||||
"""Bootstrap Hass.io."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
from .const import (
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_LOGGING,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_WAIT_BOOT,
|
||||
FILE_HASSIO_CONFIG,
|
||||
HASSIO_DATA,
|
||||
)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
|
||||
ADDONS_CORE = PurePath("addons/core")
|
||||
ADDONS_LOCAL = PurePath("addons/local")
|
||||
ADDONS_GIT = PurePath("addons/git")
|
||||
ADDONS_DATA = PurePath("addons/data")
|
||||
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
APPARMOR_DATA = PurePath("apparmor")
|
||||
DNS_DATA = PurePath("dns")
|
||||
|
||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||
|
||||
|
||||
class CoreConfig(JsonConfig):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
return self._data[ATTR_TIMEZONE]
|
||||
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self) -> int:
|
||||
"""Return wait time for auto boot stages."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value: int):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def debug(self) -> bool:
|
||||
"""Return True if ptvsd is enabled."""
|
||||
return self._data[ATTR_DEBUG]
|
||||
|
||||
@debug.setter
|
||||
def debug(self, value: bool):
|
||||
"""Set debug mode."""
|
||||
self._data[ATTR_DEBUG] = value
|
||||
|
||||
@property
|
||||
def debug_block(self) -> bool:
|
||||
"""Return True if ptvsd should waiting."""
|
||||
return self._data[ATTR_DEBUG_BLOCK]
|
||||
|
||||
@debug_block.setter
|
||||
def debug_block(self, value: bool):
|
||||
"""Set debug wait mode."""
|
||||
self._data[ATTR_DEBUG_BLOCK] = value
|
||||
|
||||
@property
|
||||
def logging(self) -> str:
|
||||
"""Return log level of system."""
|
||||
return self._data[ATTR_LOGGING]
|
||||
|
||||
@logging.setter
|
||||
def logging(self, value: str):
|
||||
"""Set system log level."""
|
||||
self._data[ATTR_LOGGING] = value
|
||||
self.modify_log_level()
|
||||
|
||||
def modify_log_level(self) -> None:
|
||||
"""Change log level."""
|
||||
lvl = getattr(logging, self.logging.upper())
|
||||
logging.getLogger("hassio").setLevel(lvl)
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
"""Return last boot datetime."""
|
||||
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
||||
|
||||
boot_time = parse_datetime(boot_str)
|
||||
if not boot_time:
|
||||
return datetime.utcfromtimestamp(1)
|
||||
return boot_time
|
||||
|
||||
@last_boot.setter
|
||||
def last_boot(self, value):
|
||||
"""Set last boot datetime."""
|
||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||
|
||||
@property
|
||||
def path_hassio(self):
|
||||
"""Return Hass.io data path."""
|
||||
return HASSIO_DATA
|
||||
|
||||
@property
|
||||
def path_extern_hassio(self):
|
||||
"""Return Hass.io data path external for Docker."""
|
||||
return PurePath(os.environ["SUPERVISOR_SHARE"])
|
||||
|
||||
@property
|
||||
def path_extern_homeassistant(self):
|
||||
"""Return config path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
||||
|
||||
@property
|
||||
def path_homeassistant(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
||||
|
||||
@property
|
||||
def path_extern_ssl(self):
|
||||
"""Return SSL path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
||||
|
||||
@property
|
||||
def path_ssl(self):
|
||||
"""Return SSL path inside supervisor."""
|
||||
return Path(HASSIO_DATA, HASSIO_SSL)
|
||||
|
||||
@property
|
||||
def path_addons_core(self):
|
||||
"""Return git path for core Add-ons."""
|
||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
||||
|
||||
@property
|
||||
def path_addons_git(self):
|
||||
"""Return path for Git Add-on."""
|
||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
||||
|
||||
@property
|
||||
def path_addons_local(self):
|
||||
"""Return path for custom Add-ons."""
|
||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_extern_addons_local(self):
|
||||
"""Return path for custom Add-ons."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_addons_data(self):
|
||||
"""Return root Add-on data folder."""
|
||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_addons_data(self):
|
||||
"""Return root add-on data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_tmp(self):
|
||||
"""Return Hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_tmp(self):
|
||||
"""Return Hass.io temp folder for Docker."""
|
||||
return PurePath(self.path_extern_hassio, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
return Path(HASSIO_DATA, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_backup(self):
|
||||
"""Return root backup data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_share(self):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return root Apparmor profile folder."""
|
||||
return Path(HASSIO_DATA, APPARMOR_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_dns(self):
|
||||
"""Return dns path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, DNS_DATA))
|
||||
|
||||
@property
|
||||
def path_dns(self):
|
||||
"""Return dns path inside supervisor."""
|
||||
return Path(HASSIO_DATA, DNS_DATA)
|
||||
|
||||
@property
|
||||
def addons_repositories(self):
|
||||
"""Return list of custom Add-on repositories."""
|
||||
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
||||
|
||||
def add_addon_repository(self, repo):
|
||||
"""Add a custom repository to list."""
|
||||
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
196
hassio/core.py
196
hassio/core.py
@@ -1,196 +0,0 @@
|
||||
"""Main file for Hass.io."""
|
||||
from contextlib import suppress
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
STARTUP_SYSTEM,
|
||||
STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_INITIALIZE,
|
||||
)
|
||||
from .exceptions import HassioError, HomeAssistantError, SupervisorUpdateError
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassIO(CoreSysAttributes):
|
||||
"""Main object of Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Hass.io object."""
|
||||
self.coresys = coresys
|
||||
|
||||
async def connect(self):
|
||||
"""Connect Supervisor container."""
|
||||
await self.sys_supervisor.load()
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# Load DBus
|
||||
await self.sys_dbus.load()
|
||||
|
||||
# Load Host
|
||||
await self.sys_host.load()
|
||||
|
||||
# Load CoreDNS
|
||||
await self.sys_dns.load()
|
||||
|
||||
# Load Home Assistant
|
||||
await self.sys_homeassistant.load()
|
||||
|
||||
# Load CPU/Arch
|
||||
await self.sys_arch.load()
|
||||
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Stores
|
||||
await self.sys_store.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
# rest api views
|
||||
await self.sys_api.load()
|
||||
|
||||
# load last available data
|
||||
await self.sys_updater.load()
|
||||
|
||||
# load last available data
|
||||
await self.sys_snapshots.load()
|
||||
|
||||
# load services
|
||||
await self.sys_services.load()
|
||||
|
||||
# Load discovery
|
||||
await self.sys_discovery.load()
|
||||
|
||||
# Load ingress
|
||||
await self.sys_ingress.load()
|
||||
|
||||
# Load secrets
|
||||
await self.sys_secrets.load()
|
||||
|
||||
async def start(self):
|
||||
"""Start Hass.io orchestration."""
|
||||
await self.sys_api.start()
|
||||
|
||||
# Mark booted partition as healthy
|
||||
if self.sys_hassos.available:
|
||||
await self.sys_hassos.mark_healthy()
|
||||
|
||||
# On release channel, try update itself
|
||||
if self.sys_supervisor.need_update:
|
||||
try:
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
||||
else:
|
||||
await self.sys_supervisor.update()
|
||||
except SupervisorUpdateError:
|
||||
_LOGGER.fatal(
|
||||
"Can't update supervisor! This will break some Add-ons or affect "
|
||||
"future version of Home Assistant!"
|
||||
)
|
||||
|
||||
# Start addon mark as initialize
|
||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
||||
_LOGGER.info("Hass.io reboot detected")
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self.sys_services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
if self.sys_homeassistant.boot:
|
||||
with suppress(HomeAssistantError):
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
# start addon mark as application
|
||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self._update_last_boot()
|
||||
|
||||
finally:
|
||||
# Add core tasks into scheduler
|
||||
await self.sys_tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.sys_homeassistant.version == "landingpage":
|
||||
self.sys_create_task(self.sys_homeassistant.install())
|
||||
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.sys_scheduler.suspend = True
|
||||
|
||||
# store new last boot / prevent time adjustments
|
||||
self._update_last_boot()
|
||||
|
||||
# process async stop tasks
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_api.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close(),
|
||||
self.sys_ingress.unload(),
|
||||
self.sys_dns.unload(),
|
||||
]
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Force Shutdown!")
|
||||
|
||||
_LOGGER.info("Hass.io is down")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown all running containers in correct order."""
|
||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
||||
|
||||
# Close Home Assistant
|
||||
with suppress(HassioError):
|
||||
await self.sys_homeassistant.stop()
|
||||
|
||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
||||
|
||||
def _update_last_boot(self):
|
||||
"""Update last boot time."""
|
||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
||||
self.sys_config.save_data()
|
||||
|
||||
async def repair(self):
|
||||
"""Repair system integrity."""
|
||||
_LOGGER.info("Start repairing of Hass.io Environment")
|
||||
await self.sys_run_in_executor(self.sys_docker.repair)
|
||||
|
||||
# Restore core functionality
|
||||
await self.sys_dns.repair()
|
||||
await self.sys_addons.repair()
|
||||
await self.sys_homeassistant.repair()
|
||||
|
||||
# Fix HassOS specific
|
||||
if self.sys_hassos.available:
|
||||
await self.sys_hassos.repair_cli()
|
||||
|
||||
# Tag version for latest
|
||||
await self.sys_supervisor.repair()
|
||||
_LOGGER.info("Finished repairing of Hass.io Environment")
|
@@ -1,17 +0,0 @@
|
||||
pcm.!default {
|
||||
type asym
|
||||
capture.pcm "mic"
|
||||
playback.pcm "speaker"
|
||||
}
|
||||
pcm.mic {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$input"
|
||||
}
|
||||
}
|
||||
pcm.speaker {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$output"
|
||||
}
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"raspberrypi3": {
|
||||
"bcm2835 - bcm2835 ALSA": {
|
||||
"0,0": "Raspberry Jack",
|
||||
"0,1": "Raspberry HDMI"
|
||||
},
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi2": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
}
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
.:53 {
|
||||
log
|
||||
errors
|
||||
hosts /config/hosts {
|
||||
fallthrough
|
||||
}
|
||||
template ANY AAAA local.hass.io hassio {
|
||||
rcode NOERROR
|
||||
}
|
||||
forward . $servers {
|
||||
except local.hass.io
|
||||
policy sequential
|
||||
health_check 10s
|
||||
}
|
||||
}
|
@@ -1,2 +0,0 @@
|
||||
$supervisor hassio supervisor.local.hass.io hassio.local.hass.io
|
||||
$homeassistant homeassistant homeassistant.local.hass.io home-assistant.local.hass.io
|
@@ -1,57 +0,0 @@
|
||||
"""D-Bus interface objects."""
|
||||
import logging
|
||||
|
||||
from .systemd import Systemd
|
||||
from .hostname import Hostname
|
||||
from .rauc import Rauc
|
||||
from .nmi_dns import NMIDnsManager
|
||||
from ..coresys import CoreSysAttributes, CoreSys
|
||||
from ..exceptions import DBusNotConnectedError
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DBusManager(CoreSysAttributes):
|
||||
"""A DBus Interface handler."""
|
||||
|
||||
def __init__(self, coresys: CoreSys) -> None:
|
||||
"""Initialize D-Bus interface."""
|
||||
self.coresys: CoreSys = coresys
|
||||
|
||||
self._systemd: Systemd = Systemd()
|
||||
self._hostname: Hostname = Hostname()
|
||||
self._rauc: Rauc = Rauc()
|
||||
self._nmi_dns: NMIDnsManager = NMIDnsManager()
|
||||
|
||||
@property
|
||||
def systemd(self) -> Systemd:
|
||||
"""Return the systemd interface."""
|
||||
return self._systemd
|
||||
|
||||
@property
|
||||
def hostname(self) -> Hostname:
|
||||
"""Return the hostname interface."""
|
||||
return self._hostname
|
||||
|
||||
@property
|
||||
def rauc(self) -> Rauc:
|
||||
"""Return the rauc interface."""
|
||||
return self._rauc
|
||||
|
||||
@property
|
||||
def nmi_dns(self) -> NMIDnsManager:
|
||||
"""Return NetworkManager DNS interface."""
|
||||
return self._nmi_dns
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Connect interfaces to D-Bus."""
|
||||
|
||||
try:
|
||||
await self.systemd.connect()
|
||||
await self.hostname.connect()
|
||||
await self.rauc.connect()
|
||||
await self.nmi_dns.connect()
|
||||
except DBusNotConnectedError:
|
||||
_LOGGER.error(
|
||||
"No DBus support from Host. Disabled any kind of host control!"
|
||||
)
|
@@ -1,19 +0,0 @@
|
||||
"""Interface class for D-Bus wrappers."""
|
||||
from typing import Optional
|
||||
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
|
||||
class DBusInterface:
|
||||
"""Handle D-Bus interface for hostname/system."""
|
||||
|
||||
dbus: Optional[DBus] = None
|
||||
|
||||
@property
|
||||
def is_connected(self):
|
||||
"""Return True, if they is connected to D-Bus."""
|
||||
return self.dbus is not None
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
raise NotImplementedError()
|
@@ -1,11 +0,0 @@
|
||||
"""Discovery service for Home Panel."""
|
||||
import voluptuous as vol
|
||||
|
||||
from hassio.validate import network_port
|
||||
|
||||
from ..const import ATTR_HOST, ATTR_PORT
|
||||
|
||||
|
||||
SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): network_port}
|
||||
)
|
@@ -1,187 +0,0 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from contextlib import suppress
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import attr
|
||||
import docker
|
||||
|
||||
from ..const import SOCKET_DOCKER, DNS_SUFFIX
|
||||
from ..exceptions import DockerAPIError
|
||||
from .network import DockerNetwork
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class CommandReturn:
|
||||
"""Return object from command run."""
|
||||
|
||||
exit_code: int = attr.ib()
|
||||
output: bytes = attr.ib()
|
||||
|
||||
|
||||
class DockerAPI:
|
||||
"""Docker Hass.io wrapper.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.docker: docker.DockerClient = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version="auto", timeout=900
|
||||
)
|
||||
self.network: DockerNetwork = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
def images(self) -> docker.models.images.ImageCollection:
|
||||
"""Return API images."""
|
||||
return self.docker.images
|
||||
|
||||
@property
|
||||
def containers(self) -> docker.models.containers.ContainerCollection:
|
||||
"""Return API containers."""
|
||||
return self.docker.containers
|
||||
|
||||
@property
|
||||
def api(self) -> docker.APIClient:
|
||||
"""Return API containers."""
|
||||
return self.docker.api
|
||||
|
||||
def run(
|
||||
self,
|
||||
image: str,
|
||||
version: str = "latest",
|
||||
dns: bool = True,
|
||||
ipv4: Optional[IPv4Address] = None,
|
||||
**kwargs: Dict[str, Any],
|
||||
) -> docker.models.containers.Container:
|
||||
""""Create a Docker container and run it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
name: str = kwargs.get("name")
|
||||
network_mode: str = kwargs.get("network_mode")
|
||||
hostname: str = kwargs.get("hostname")
|
||||
|
||||
# Setup DNS
|
||||
if dns:
|
||||
kwargs["dns"] = [str(self.network.dns)]
|
||||
kwargs["dns_search"] = [DNS_SUFFIX]
|
||||
kwargs["domainname"] = DNS_SUFFIX
|
||||
|
||||
# Setup network
|
||||
if not network_mode:
|
||||
kwargs["network"] = None
|
||||
|
||||
# Create container
|
||||
try:
|
||||
container = self.docker.containers.create(
|
||||
f"{image}:{version}", use_config_proxy=False, **kwargs
|
||||
)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
# Attach network
|
||||
if not network_mode:
|
||||
alias = [hostname] if hostname else None
|
||||
try:
|
||||
self.network.attach_container(container, alias=alias, ipv4=ipv4)
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||
else:
|
||||
with suppress(DockerAPIError):
|
||||
self.network.detach_default_bridge(container)
|
||||
|
||||
# Run container
|
||||
try:
|
||||
container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", name, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
# Update metadata
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.reload()
|
||||
|
||||
return container
|
||||
|
||||
def run_command(
|
||||
self,
|
||||
image: str,
|
||||
version: str = "latest",
|
||||
command: Optional[str] = None,
|
||||
**kwargs: Dict[str, Any],
|
||||
) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
stdout = kwargs.get("stdout", True)
|
||||
stderr = kwargs.get("stderr", True)
|
||||
|
||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||
try:
|
||||
container = self.docker.containers.run(
|
||||
f"{image}:{version}",
|
||||
command=command,
|
||||
network=self.network.name,
|
||||
use_config_proxy=False,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
result = container.wait()
|
||||
output = container.logs(stdout=stdout, stderr=stderr)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command: %s", err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
finally:
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return CommandReturn(result.get("StatusCode"), output)
|
||||
|
||||
def repair(self) -> None:
|
||||
"""Repair local docker overlayfs2 issues."""
|
||||
|
||||
_LOGGER.info("Prune stale containers")
|
||||
try:
|
||||
output = self.docker.api.prune_containers()
|
||||
_LOGGER.debug("Containers prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for containers prune: %s", err)
|
||||
|
||||
_LOGGER.info("Prune stale images")
|
||||
try:
|
||||
output = self.docker.api.prune_images(filters={"dangling": False})
|
||||
_LOGGER.debug("Images prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for images prune: %s", err)
|
||||
|
||||
_LOGGER.info("Prune stale builds")
|
||||
try:
|
||||
output = self.docker.api.prune_builds()
|
||||
_LOGGER.debug("Builds prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for builds prune: %s", err)
|
||||
|
||||
_LOGGER.info("Prune stale volumes")
|
||||
try:
|
||||
output = self.docker.api.prune_builds()
|
||||
_LOGGER.debug("Volumes prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for volumes prune: %s", err)
|
||||
|
||||
_LOGGER.info("Prune stale networks")
|
||||
try:
|
||||
output = self.docker.api.prune_networks()
|
||||
_LOGGER.debug("Networks prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for networks prune: %s", err)
|
@@ -1,38 +0,0 @@
|
||||
"""HassOS Cli docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for HassOS Cli."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS CLI image."""
|
||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
||||
|
||||
def _stop(self, remove_container=True):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
def _attach(self, tag: str):
|
||||
"""Attach to running Docker container.
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(f"{self.image}:{tag}")
|
||||
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info(
|
||||
"Found HassOS CLI %s with version %s", self.image, self.version
|
||||
)
|
@@ -1,441 +0,0 @@
|
||||
"""Interface class for Hass.io Docker object."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
import docker
|
||||
|
||||
from . import CommandReturn
|
||||
from ..const import LABEL_ARCH, LABEL_VERSION
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DockerAPIError
|
||||
from ..utils import process_lock
|
||||
from .stats import DockerStats
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerInterface(CoreSysAttributes):
|
||||
"""Docker Hass.io interface."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._meta: Optional[Dict[str, Any]] = None
|
||||
self.lock: asyncio.Lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
@property
|
||||
def timeout(self) -> str:
|
||||
"""Return timeout for Docker actions."""
|
||||
return 30
|
||||
|
||||
@property
|
||||
def name(self) -> Optional[str]:
|
||||
"""Return name of Docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def meta_config(self) -> Dict[str, Any]:
|
||||
"""Return meta data of configuration for container/image."""
|
||||
if not self._meta:
|
||||
return {}
|
||||
return self._meta.get("Config", {})
|
||||
|
||||
@property
|
||||
def meta_host(self) -> Dict[str, Any]:
|
||||
"""Return meta data of configuration for host."""
|
||||
if not self._meta:
|
||||
return {}
|
||||
return self._meta.get("HostConfig", {})
|
||||
|
||||
@property
|
||||
def meta_labels(self) -> Dict[str, str]:
|
||||
"""Return meta data of labels for container/image."""
|
||||
return self.meta_config.get("Labels") or {}
|
||||
|
||||
@property
|
||||
def image(self) -> Optional[str]:
|
||||
"""Return name of Docker image."""
|
||||
try:
|
||||
return self.meta_config["Image"].partition(":")[0]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
@property
|
||||
def version(self) -> Optional[str]:
|
||||
"""Return version of Docker image."""
|
||||
return self.meta_labels.get(LABEL_VERSION)
|
||||
|
||||
@property
|
||||
def arch(self) -> Optional[str]:
|
||||
"""Return arch of Docker image."""
|
||||
return self.meta_labels.get(LABEL_ARCH)
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool:
|
||||
"""Return True if a task is in progress."""
|
||||
return self.lock.locked()
|
||||
|
||||
@process_lock
|
||||
def install(self, tag: str, image: Optional[str] = None, latest: bool = False):
|
||||
"""Pull docker image."""
|
||||
return self.sys_run_in_executor(self._install, tag, image, latest)
|
||||
|
||||
def _install(
|
||||
self, tag: str, image: Optional[str] = None, latest: bool = False
|
||||
) -> None:
|
||||
"""Pull Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
image = image or self.image
|
||||
|
||||
_LOGGER.info("Pull image %s tag %s.", image, tag)
|
||||
try:
|
||||
docker_image = self.sys_docker.images.pull(f"{image}:{tag}")
|
||||
if latest:
|
||||
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
|
||||
docker_image.tag(image, tag="latest")
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", image, tag, err)
|
||||
raise DockerAPIError() from None
|
||||
else:
|
||||
self._meta = docker_image.attrs
|
||||
|
||||
def exists(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker image exists in local repository."""
|
||||
return self.sys_run_in_executor(self._exists)
|
||||
|
||||
def _exists(self) -> bool:
|
||||
"""Return True if Docker image exists in local repository.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.sys_docker.images.get(f"{self.image}:{self.version}")
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_running(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_running)
|
||||
|
||||
def _is_running(self) -> bool:
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != "running":
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def attach(self, tag: str):
|
||||
"""Attach to running Docker container."""
|
||||
return self.sys_run_in_executor(self._attach, tag)
|
||||
|
||||
def _attach(self, tag: str) -> None:
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
with suppress(docker.errors.DockerException):
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
if not self._meta and self.image:
|
||||
self._meta = self.sys_docker.images.get(f"{self.image}:{tag}").attrs
|
||||
|
||||
# Successfull?
|
||||
if not self._meta:
|
||||
raise DockerAPIError() from None
|
||||
_LOGGER.info("Attach to %s with version %s", self.image, self.version)
|
||||
|
||||
@process_lock
|
||||
def run(self) -> Awaitable[None]:
|
||||
"""Run Docker image."""
|
||||
return self.sys_run_in_executor(self._run)
|
||||
|
||||
def _run(self) -> None:
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@process_lock
|
||||
def stop(self, remove_container=True) -> Awaitable[None]:
|
||||
"""Stop/remove Docker container."""
|
||||
return self.sys_run_in_executor(self._stop, remove_container)
|
||||
|
||||
def _stop(self, remove_container=True) -> None:
|
||||
"""Stop/remove Docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
raise DockerAPIError() from None
|
||||
|
||||
if docker_container.status == "running":
|
||||
_LOGGER.info("Stop %s application", self.name)
|
||||
with suppress(docker.errors.DockerException):
|
||||
docker_container.stop(timeout=self.timeout)
|
||||
|
||||
if remove_container:
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Clean %s application", self.name)
|
||||
docker_container.remove(force=True)
|
||||
|
||||
@process_lock
|
||||
def start(self) -> Awaitable[None]:
|
||||
"""Start Docker container."""
|
||||
return self.sys_run_in_executor(self._start)
|
||||
|
||||
def _start(self) -> None:
|
||||
"""Start docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Start %s", self.image)
|
||||
try:
|
||||
docker_container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", self.image, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
@process_lock
|
||||
def remove(self) -> Awaitable[None]:
|
||||
"""Remove Docker images."""
|
||||
return self.sys_run_in_executor(self._remove)
|
||||
|
||||
def _remove(self) -> None:
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
# Cleanup container
|
||||
with suppress(DockerAPIError):
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove image %s with latest and %s", self.image, self.version)
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.sys_docker.images.remove(image=f"{self.image}:latest", force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:{self.version}", force=True
|
||||
)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
self._meta = None
|
||||
|
||||
@process_lock
|
||||
def update(
|
||||
self, tag: str, image: Optional[str] = None, latest: bool = False
|
||||
) -> Awaitable[None]:
|
||||
"""Update a Docker image."""
|
||||
return self.sys_run_in_executor(self._update, tag, image, latest)
|
||||
|
||||
def _update(
|
||||
self, tag: str, image: Optional[str] = None, latest: bool = False
|
||||
) -> None:
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
image = image or self.image
|
||||
|
||||
_LOGGER.info(
|
||||
"Update image %s:%s to %s:%s", self.image, self.version, image, tag
|
||||
)
|
||||
|
||||
# Update docker image
|
||||
self._install(tag, image=image, latest=latest)
|
||||
|
||||
# Stop container & cleanup
|
||||
with suppress(DockerAPIError):
|
||||
self._stop()
|
||||
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._logs)
|
||||
|
||||
def _logs(self) -> bytes:
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return docker_container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grep logs from %s: %s", self.image, err)
|
||||
|
||||
@process_lock
|
||||
def cleanup(self) -> Awaitable[None]:
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self.sys_run_in_executor(self._cleanup)
|
||||
|
||||
def _cleanup(self) -> None:
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
origin = self.sys_docker.images.get(f"{self.image}:{self.version}")
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
for image in self.sys_docker.images.list(name=self.image):
|
||||
if origin.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup images: %s", image.tags)
|
||||
self.sys_docker.images.remove(image.id, force=True)
|
||||
|
||||
@process_lock
|
||||
def restart(self) -> Awaitable[None]:
|
||||
"""Restart docker container."""
|
||||
return self.sys_loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self) -> None:
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
raise DockerAPIError() from None
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s: %s", self.image, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
@process_lock
|
||||
def execute_command(self, command: str) -> Awaitable[CommandReturn]:
|
||||
"""Create a temporary container and run command."""
|
||||
return self.sys_run_in_executor(self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command: str) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def stats(self) -> Awaitable[DockerStats]:
|
||||
"""Read and return stats from container."""
|
||||
return self.sys_run_in_executor(self._stats)
|
||||
|
||||
def _stats(self) -> DockerStats:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
raise DockerAPIError() from None
|
||||
|
||||
try:
|
||||
stats = docker_container.stats(stream=False)
|
||||
return DockerStats(stats)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker is failing state.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_fails)
|
||||
|
||||
def _is_fails(self) -> bool:
|
||||
"""Return True if Docker is failing state.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != "exited":
|
||||
return False
|
||||
|
||||
# Check return value
|
||||
if int(docker_container.attrs["State"]["ExitCode"]) != 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def get_latest_version(self, key: Any = int) -> Awaitable[str]:
|
||||
"""Return latest version of local Home Asssistant image."""
|
||||
return self.sys_run_in_executor(self._get_latest_version, key)
|
||||
|
||||
def _get_latest_version(self, key: Any = int) -> str:
|
||||
"""Return latest version of local Home Asssistant image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
available_version: List[str] = []
|
||||
try:
|
||||
for image in self.sys_docker.images.list(self.image):
|
||||
for tag in image.tags:
|
||||
version = tag.partition(":")[2]
|
||||
try:
|
||||
key(version)
|
||||
except (AttributeError, ValueError):
|
||||
continue
|
||||
available_version.append(version)
|
||||
|
||||
if not available_version:
|
||||
raise ValueError()
|
||||
|
||||
except (docker.errors.DockerException, ValueError):
|
||||
_LOGGER.debug("No version found for %s", self.image)
|
||||
raise DockerAPIError()
|
||||
else:
|
||||
_LOGGER.debug("Found HA versions: %s", available_version)
|
||||
|
||||
# Sort version and return latest version
|
||||
available_version.sort(key=key, reverse=True)
|
||||
return available_version[0]
|
@@ -1,77 +0,0 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
import os
|
||||
from typing import Awaitable
|
||||
|
||||
import docker
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import DockerAPIError
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for Supervisor."""
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of Docker container."""
|
||||
return os.environ["SUPERVISOR_NAME"]
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP address of this container."""
|
||||
return self.sys_docker.network.supervisor
|
||||
|
||||
@property
|
||||
def privileged(self) -> bool:
|
||||
"""Return True if the container run with Privileged."""
|
||||
return self.meta_host.get("Privileged", False)
|
||||
|
||||
def _attach(self, tag: str) -> None:
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
raise DockerAPIError() from None
|
||||
|
||||
self._meta = docker_container.attrs
|
||||
_LOGGER.info(
|
||||
"Attach to Supervisor %s with version %s",
|
||||
self.image,
|
||||
self.sys_supervisor.version,
|
||||
)
|
||||
|
||||
# If already attach
|
||||
if docker_container in self.sys_docker.network.containers:
|
||||
return
|
||||
|
||||
# Attach to network
|
||||
_LOGGER.info("Connect Supervisor to Hass.io Network")
|
||||
self.sys_docker.network.attach_container(
|
||||
docker_container, alias=["hassio"], ipv4=self.sys_docker.network.supervisor
|
||||
)
|
||||
|
||||
def retag(self) -> Awaitable[None]:
|
||||
"""Retag latest image to version."""
|
||||
return self.sys_run_in_executor(self._retag)
|
||||
|
||||
def _retag(self) -> None:
|
||||
"""Retag latest image to version.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
|
||||
docker_container.image.tag(self.image, tag=self.version)
|
||||
docker_container.image.tag(self.image, tag="latest")
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't retag supervisor version: %s", err)
|
||||
raise DockerAPIError() from None
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user