mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-18 13:39:21 +00:00
Compare commits
757 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
78e6a46318 | ||
![]() |
49ca923e51 | ||
![]() |
7ad22e0399 | ||
![]() |
bb8acc6065 | ||
![]() |
c0fa4a19e9 | ||
![]() |
3f1741dd18 | ||
![]() |
9ef02e4110 | ||
![]() |
15a6f38ebb | ||
![]() |
227f2e5a21 | ||
![]() |
517d6ee981 | ||
![]() |
184eeb7f49 | ||
![]() |
a3555c74e8 | ||
![]() |
657bafd458 | ||
![]() |
2ad5df420c | ||
![]() |
b24d489ec5 | ||
![]() |
6bb0210f1f | ||
![]() |
c1de50266a | ||
![]() |
562e02bc64 | ||
![]() |
f71ec7913a | ||
![]() |
d98baaf660 | ||
![]() |
72db591576 | ||
![]() |
509a37fc04 | ||
![]() |
17f62b6e86 | ||
![]() |
b09aee7644 | ||
![]() |
babcc0de0c | ||
![]() |
5cc47c9222 | ||
![]() |
833559a3b3 | ||
![]() |
b8a976b344 | ||
![]() |
10b14132b9 | ||
![]() |
18953f0b7c | ||
![]() |
19f5fba3aa | ||
![]() |
636bc3e61a | ||
![]() |
521037e1a6 | ||
![]() |
e024c3e38d | ||
![]() |
6a0206c1e7 | ||
![]() |
69a8a83528 | ||
![]() |
0307d700fa | ||
![]() |
919c383b41 | ||
![]() |
d331af4d5a | ||
![]() |
b5467d3c23 | ||
![]() |
3ef0040d66 | ||
![]() |
ec4dfd2172 | ||
![]() |
8f54d7c8e9 | ||
![]() |
b59e709dc0 | ||
![]() |
b236e6c886 | ||
![]() |
8acbb7d6f0 | ||
![]() |
49e4bc9381 | ||
![]() |
36106cc08d | ||
![]() |
8f1763abe2 | ||
![]() |
480eebc6cb | ||
![]() |
dccfffd979 | ||
![]() |
1a978f4762 | ||
![]() |
1fbc8f4060 | ||
![]() |
db3fc1421c | ||
![]() |
9ecd03db0e | ||
![]() |
f111ccb1b6 | ||
![]() |
a32341cc5d | ||
![]() |
f73e277230 | ||
![]() |
8d8587ca29 | ||
![]() |
88eb9511bf | ||
![]() |
e1068997ea | ||
![]() |
560e04c64a | ||
![]() |
621ec03971 | ||
![]() |
d14a47d3f7 | ||
![]() |
3e9de0c210 | ||
![]() |
01a6e074a5 | ||
![]() |
1434077f4e | ||
![]() |
3922175af1 | ||
![]() |
ec6852a8d7 | ||
![]() |
b0b908b4ae | ||
![]() |
5a00336ef1 | ||
![]() |
d53f5e21f4 | ||
![]() |
bd173fa333 | ||
![]() |
6b32fa31b6 | ||
![]() |
d1dba89e39 | ||
![]() |
b2f2806465 | ||
![]() |
3b70cd58a3 | ||
![]() |
6769bfd824 | ||
![]() |
bff4af2534 | ||
![]() |
aabf575ac5 | ||
![]() |
4aacaf6bd6 | ||
![]() |
268070e89c | ||
![]() |
4fa2134cc6 | ||
![]() |
97c35de49a | ||
![]() |
32fb550969 | ||
![]() |
3457441929 | ||
![]() |
32f0fc7a46 | ||
![]() |
c891a8f164 | ||
![]() |
991764af94 | ||
![]() |
1df447272e | ||
![]() |
f032ae757b | ||
![]() |
e529b2859e | ||
![]() |
d1cb2368fa | ||
![]() |
7b812184d1 | ||
![]() |
4f7ce1c6ee | ||
![]() |
44a5ac63db | ||
![]() |
0989ee88cc | ||
![]() |
ba8b72c2c8 | ||
![]() |
2dbb4583f4 | ||
![]() |
81ad42e029 | ||
![]() |
5d3695f8ba | ||
![]() |
c771694aa0 | ||
![]() |
1ac0fd4c10 | ||
![]() |
631ff8caef | ||
![]() |
7382182132 | ||
![]() |
4ff9da68ef | ||
![]() |
dee2998ee3 | ||
![]() |
1d43236211 | ||
![]() |
792bc610a3 | ||
![]() |
7a51c828c2 | ||
![]() |
fab6fcd5ac | ||
![]() |
c8e00ba160 | ||
![]() |
6245b6d823 | ||
![]() |
0c55bf20fc | ||
![]() |
f8fd7b5933 | ||
![]() |
6462eea2ef | ||
![]() |
3d79891249 | ||
![]() |
80763c4bbf | ||
![]() |
59102afd45 | ||
![]() |
0b085354db | ||
![]() |
e2a473baa3 | ||
![]() |
06e10fdd3c | ||
![]() |
fb4386a7ad | ||
![]() |
2d294f6841 | ||
![]() |
e09a839148 | ||
![]() |
d9c4dae739 | ||
![]() |
49853e92a4 | ||
![]() |
19620d6808 | ||
![]() |
f6bf44de1c | ||
![]() |
06fae59fc8 | ||
![]() |
5c25fcd84c | ||
![]() |
aa5297026f | ||
![]() |
841520b75e | ||
![]() |
d9e20307de | ||
![]() |
fda1b523ba | ||
![]() |
7cccbc682c | ||
![]() |
621eb4c4c0 | ||
![]() |
056926242f | ||
![]() |
84294f286f | ||
![]() |
b6bc6b8498 | ||
![]() |
845c935b39 | ||
![]() |
19d8de89df | ||
![]() |
cfae20a3ec | ||
![]() |
6db6ab96e6 | ||
![]() |
48695c6805 | ||
![]() |
d74908e3b5 | ||
![]() |
7e94537e36 | ||
![]() |
1427e0ae96 | ||
![]() |
01e27dfa2f | ||
![]() |
f48249c9d1 | ||
![]() |
e607d4feeb | ||
![]() |
5367ac257e | ||
![]() |
46dc6dc63b | ||
![]() |
a59ea72c66 | ||
![]() |
2daf46c444 | ||
![]() |
1bf38bdc99 | ||
![]() |
131909973c | ||
![]() |
ecdf4e53b8 | ||
![]() |
7aa039d162 | ||
![]() |
3dd3340e35 | ||
![]() |
2f9fc39b72 | ||
![]() |
80f4309799 | ||
![]() |
550fca4bcd | ||
![]() |
4b500ef873 | ||
![]() |
476f021fbf | ||
![]() |
8393ca5b23 | ||
![]() |
4eb7a60b88 | ||
![]() |
2040102e21 | ||
![]() |
7ee5737f75 | ||
![]() |
8d499753a0 | ||
![]() |
028ec277eb | ||
![]() |
5552b1da49 | ||
![]() |
06ab7e904f | ||
![]() |
e4bf820038 | ||
![]() |
c209d2fa8d | ||
![]() |
784c5d3b7c | ||
![]() |
a18b706f99 | ||
![]() |
cd34a40dd8 | ||
![]() |
ced72e1273 | ||
![]() |
5416eda1d6 | ||
![]() |
c76a4ff422 | ||
![]() |
d558ad2d76 | ||
![]() |
5f2d183b1d | ||
![]() |
46e92036ec | ||
![]() |
280d423bfe | ||
![]() |
c4847ad10d | ||
![]() |
0a7c75830b | ||
![]() |
7aceb21123 | ||
![]() |
9264d437b1 | ||
![]() |
edc8d8960f | ||
![]() |
0f4f196dc9 | ||
![]() |
a027f4b5fc | ||
![]() |
f025d1df05 | ||
![]() |
4c560d7c54 | ||
![]() |
a976ef6e67 | ||
![]() |
e1b9d754af | ||
![]() |
ee49935b7d | ||
![]() |
36694c9ef0 | ||
![]() |
bd786811a3 | ||
![]() |
ffaeb2b96d | ||
![]() |
517e6cb437 | ||
![]() |
9479672b88 | ||
![]() |
934e59596a | ||
![]() |
8f4ac10361 | ||
![]() |
50e0fd159f | ||
![]() |
28344ff5f3 | ||
![]() |
608ae14246 | ||
![]() |
c0c0d44c2d | ||
![]() |
5e947348ae | ||
![]() |
1f4032f56f | ||
![]() |
336ab0d2b1 | ||
![]() |
0f9d80dde4 | ||
![]() |
0fcab4d92b | ||
![]() |
abd35b62c8 | ||
![]() |
7b721ad8c6 | ||
![]() |
37eaaf356d | ||
![]() |
8cbb4b510b | ||
![]() |
f71549e3df | ||
![]() |
fe15bb6a30 | ||
![]() |
50d36b857a | ||
![]() |
db260dfbde | ||
![]() |
a0c99615aa | ||
![]() |
c66c806e6e | ||
![]() |
a432d28ee3 | ||
![]() |
742bc43500 | ||
![]() |
223e2f1df5 | ||
![]() |
ed9aea6219 | ||
![]() |
aa7b68d4d5 | ||
![]() |
8e57cd2751 | ||
![]() |
64229a188e | ||
![]() |
10cbbcc2de | ||
![]() |
5318e4fbcd | ||
![]() |
007251a04c | ||
![]() |
fd4b3ee539 | ||
![]() |
976ae96633 | ||
![]() |
042bdcdf37 | ||
![]() |
c423e9cf8e | ||
![]() |
1f13d6aa91 | ||
![]() |
78c09a0fa6 | ||
![]() |
e6d6f2ee8c | ||
![]() |
2918ef6225 | ||
![]() |
35b626a1c5 | ||
![]() |
6f26536d97 | ||
![]() |
01064564b4 | ||
![]() |
0c6c6a6620 | ||
![]() |
be166d533f | ||
![]() |
d3e5535221 | ||
![]() |
7206213cd8 | ||
![]() |
605782d707 | ||
![]() |
2c387349c9 | ||
![]() |
e9c9f98168 | ||
![]() |
a98c7819b0 | ||
![]() |
7c42c5758d | ||
![]() |
c555146094 | ||
![]() |
f48c9b5774 | ||
![]() |
8d1732e5eb | ||
![]() |
f2843db421 | ||
![]() |
b513512551 | ||
![]() |
1a59839b1b | ||
![]() |
45861617b9 | ||
![]() |
353544085e | ||
![]() |
144d3921f7 | ||
![]() |
e44d22880e | ||
![]() |
c7692b43e8 | ||
![]() |
9c53caae80 | ||
![]() |
7a1d85ca2b | ||
![]() |
e684223f32 | ||
![]() |
9744f3354b | ||
![]() |
c6e3787681 | ||
![]() |
eab76a6d1d | ||
![]() |
6549a10935 | ||
![]() |
530d40dbbd | ||
![]() |
50f2d8e7d8 | ||
![]() |
7a9aac491e | ||
![]() |
7dcb609fd5 | ||
![]() |
d119e99001 | ||
![]() |
fe0e41adec | ||
![]() |
034393bd42 | ||
![]() |
02e72726a5 | ||
![]() |
d599c3ad76 | ||
![]() |
b00f7c44df | ||
![]() |
028b170cff | ||
![]() |
8da686fc34 | ||
![]() |
3f6453aa89 | ||
![]() |
7967254673 | ||
![]() |
0f60fdd20b | ||
![]() |
ccb8e5fe06 | ||
![]() |
ba576d8748 | ||
![]() |
edcd9ca6e6 | ||
![]() |
ac4277cd7b | ||
![]() |
4c525de5e2 | ||
![]() |
cb751e0397 | ||
![]() |
ac457c1c28 | ||
![]() |
caa77b9337 | ||
![]() |
96d8785349 | ||
![]() |
e4f57d2269 | ||
![]() |
f946de1e46 | ||
![]() |
d588987b8b | ||
![]() |
4925b5fa97 | ||
![]() |
aa3f6390d3 | ||
![]() |
6ba413f452 | ||
![]() |
10b6706e4a | ||
![]() |
17559bfc8e | ||
![]() |
9dc2f43ffb | ||
![]() |
38db375fea | ||
![]() |
f35b6d0b00 | ||
![]() |
8d75583a07 | ||
![]() |
361fc51477 | ||
![]() |
f6019b4e68 | ||
![]() |
998dd5387b | ||
![]() |
3b7776ca01 | ||
![]() |
5788d1dd32 | ||
![]() |
ff04d339f4 | ||
![]() |
e9d03c5c8e | ||
![]() |
ab4b98470e | ||
![]() |
c4f0702595 | ||
![]() |
736c9cb2bd | ||
![]() |
f24e8535d3 | ||
![]() |
8e4f3e0526 | ||
![]() |
a9abd933b5 | ||
![]() |
f1121fe66f | ||
![]() |
c26a2e399c | ||
![]() |
1af90721cc | ||
![]() |
9274a0fa17 | ||
![]() |
9443032c2a | ||
![]() |
8deb1cf2e6 | ||
![]() |
13c7ce6a0a | ||
![]() |
0f54824cdb | ||
![]() |
10cd722806 | ||
![]() |
4aca056c5b | ||
![]() |
15a8f40f6f | ||
![]() |
2ca2701f7a | ||
![]() |
78f63380f2 | ||
![]() |
7633d26806 | ||
![]() |
15cae6562f | ||
![]() |
c3546eb566 | ||
![]() |
2a77a29f48 | ||
![]() |
77be115eec | ||
![]() |
64a6c2e07c | ||
![]() |
045a3ba416 | ||
![]() |
4da2715d14 | ||
![]() |
2f0e99d420 | ||
![]() |
8694eaaf1a | ||
![]() |
0761885ebb | ||
![]() |
ca60a69b22 | ||
![]() |
c9db42583b | ||
![]() |
052a691a4d | ||
![]() |
2bcb0e5195 | ||
![]() |
745845db19 | ||
![]() |
de064d1d9c | ||
![]() |
3b2351af0b | ||
![]() |
5cf833e3d6 | ||
![]() |
409b53109b | ||
![]() |
da83edf231 | ||
![]() |
7be508214a | ||
![]() |
8b4a137252 | ||
![]() |
4565b01eeb | ||
![]() |
0675f66ee6 | ||
![]() |
b60d57c3a0 | ||
![]() |
0e3d95cac0 | ||
![]() |
548737a559 | ||
![]() |
598108d294 | ||
![]() |
a0261dbbcc | ||
![]() |
2418122b46 | ||
![]() |
f104e60afa | ||
![]() |
ed45f27f3e | ||
![]() |
40aa5c9caf | ||
![]() |
14b1ea4eb0 | ||
![]() |
5052a339e3 | ||
![]() |
2321890dde | ||
![]() |
4cb5770ee0 | ||
![]() |
3a35561d1d | ||
![]() |
6fbec53f8a | ||
![]() |
c707934018 | ||
![]() |
efd8efa248 | ||
![]() |
979861b764 | ||
![]() |
cdc53a159c | ||
![]() |
a203ed9cc5 | ||
![]() |
5cab5f0c08 | ||
![]() |
25ea80e169 | ||
![]() |
f43b4e9e24 | ||
![]() |
160fbb2589 | ||
![]() |
c85aa664e1 | ||
![]() |
51dcbf5db7 | ||
![]() |
fa114a4a03 | ||
![]() |
d7fd58bdb9 | ||
![]() |
38b0aea8e2 | ||
![]() |
41eade9325 | ||
![]() |
e64cf41aec | ||
![]() |
02872b5e75 | ||
![]() |
e4d49bb459 | ||
![]() |
d38b7d5a82 | ||
![]() |
537c5d3197 | ||
![]() |
575df2fcf6 | ||
![]() |
c08c3c6b37 | ||
![]() |
2acf28609e | ||
![]() |
bb59d0431e | ||
![]() |
1c7b1f1462 | ||
![]() |
f32d17d924 | ||
![]() |
928a4d8dce | ||
![]() |
dd3ba93308 | ||
![]() |
7e1b179cdd | ||
![]() |
a9a2c35f06 | ||
![]() |
58b88a6919 | ||
![]() |
f937876a1b | ||
![]() |
8193f43634 | ||
![]() |
1d3f880f82 | ||
![]() |
ef2fa8d2e2 | ||
![]() |
51997b3e7c | ||
![]() |
98785b00e2 | ||
![]() |
8d3694884d | ||
![]() |
a2821a98ad | ||
![]() |
8d552ae15c | ||
![]() |
6db4c60f47 | ||
![]() |
805c0385a0 | ||
![]() |
cea6e7a9f2 | ||
![]() |
127073c01b | ||
![]() |
30fe36ae05 | ||
![]() |
58bd677832 | ||
![]() |
1a3b369dd7 | ||
![]() |
6e38216abd | ||
![]() |
efcfc1f841 | ||
![]() |
8dea50ce83 | ||
![]() |
7a5a01bdcc | ||
![]() |
bd1450a682 | ||
![]() |
c538c1ce7f | ||
![]() |
b6d59c4f64 | ||
![]() |
a758ccaf5c | ||
![]() |
e8b04cc20a | ||
![]() |
9bcb15dbc0 | ||
![]() |
1e953167b6 | ||
![]() |
979586cdb2 | ||
![]() |
cd31fad56d | ||
![]() |
ff57d88e2a | ||
![]() |
06cb5e171e | ||
![]() |
a8b70a2e13 | ||
![]() |
948019ccee | ||
![]() |
89ed109505 | ||
![]() |
fae246c503 | ||
![]() |
2411b4287d | ||
![]() |
b3308ecbe0 | ||
![]() |
3541cbff5e | ||
![]() |
838ba7ff36 | ||
![]() |
e9802f92c9 | ||
![]() |
016fd24859 | ||
![]() |
d315e81ab2 | ||
![]() |
97c38b8534 | ||
![]() |
011e2b3df5 | ||
![]() |
e3ee9a299f | ||
![]() |
d73c10f874 | ||
![]() |
9e448b46ba | ||
![]() |
9f09c46789 | ||
![]() |
fe6634551a | ||
![]() |
22a7931a7c | ||
![]() |
94f112512f | ||
![]() |
b6509dca1f | ||
![]() |
620234e708 | ||
![]() |
d50e866cec | ||
![]() |
76ad6dca02 | ||
![]() |
cdb1520a63 | ||
![]() |
bbef706a33 | ||
![]() |
835509901f | ||
![]() |
b51f9586c4 | ||
![]() |
fc83cb9559 | ||
![]() |
f5f5f829ac | ||
![]() |
930eed4500 | ||
![]() |
01a8b58054 | ||
![]() |
eba1d01fc2 | ||
![]() |
84755836c9 | ||
![]() |
c9585033cb | ||
![]() |
2d312c276f | ||
![]() |
3b0d0e9928 | ||
![]() |
8307b153e3 | ||
![]() |
dfaffe3ec5 | ||
![]() |
8d7b15cbeb | ||
![]() |
00969a67ac | ||
![]() |
a374d4e817 | ||
![]() |
f5dda39f63 | ||
![]() |
fb5d54d5fe | ||
![]() |
d392b35fdd | ||
![]() |
3ceec006ac | ||
![]() |
62a574c6bd | ||
![]() |
821c10b2bd | ||
![]() |
fa3269a098 | ||
![]() |
a9bdab4b49 | ||
![]() |
0df5b7d87b | ||
![]() |
4861fc70ce | ||
![]() |
47c443bb92 | ||
![]() |
9cb4b49597 | ||
![]() |
865523fd37 | ||
![]() |
1df35a6fe1 | ||
![]() |
e70c9d8a30 | ||
![]() |
7d6b00ea4a | ||
![]() |
e5fc985915 | ||
![]() |
71ccaa2bd0 | ||
![]() |
e127f23a08 | ||
![]() |
495f9f2373 | ||
![]() |
27274286db | ||
![]() |
85ba886029 | ||
![]() |
2f3a868e42 | ||
![]() |
a51b80f456 | ||
![]() |
f27a426879 | ||
![]() |
19ca485c28 | ||
![]() |
7deed55c2d | ||
![]() |
4c5c6f072c | ||
![]() |
f174e08ad6 | ||
![]() |
2658f95347 | ||
![]() |
311c981d1a | ||
![]() |
d6d3bf0583 | ||
![]() |
a1a601a4d3 | ||
![]() |
14776eae76 | ||
![]() |
bef4034ab8 | ||
![]() |
ad988f2a24 | ||
![]() |
6599ae0ee0 | ||
![]() |
4f1ed690cd | ||
![]() |
4ffaee6013 | ||
![]() |
e1ce19547e | ||
![]() |
039040b972 | ||
![]() |
7a1af3d346 | ||
![]() |
1e98774b62 | ||
![]() |
4b4d6c6866 | ||
![]() |
65ff83d359 | ||
![]() |
e509c804ae | ||
![]() |
992827e225 | ||
![]() |
083e97add8 | ||
![]() |
05378d18c0 | ||
![]() |
3dd465acc9 | ||
![]() |
8f6e36f781 | ||
![]() |
85fe56db57 | ||
![]() |
8e07429e47 | ||
![]() |
ced6d702b9 | ||
![]() |
25d7de4dfa | ||
![]() |
82754c0dfe | ||
![]() |
e604b022ee | ||
![]() |
6b29022822 | ||
![]() |
2e671cc5ee | ||
![]() |
f25692b98c | ||
![]() |
c4a011b261 | ||
![]() |
a935bac20b | ||
![]() |
0a3a98cb42 | ||
![]() |
adb39ca93f | ||
![]() |
5fdc340e58 | ||
![]() |
bb64dca6e6 | ||
![]() |
685788bcdf | ||
![]() |
e949aa35f3 | ||
![]() |
fc80bf0df4 | ||
![]() |
bd9740e866 | ||
![]() |
3a260a8fd9 | ||
![]() |
c87e6a5a42 | ||
![]() |
8bc3319523 | ||
![]() |
bdfcf1a2df | ||
![]() |
7f4284f2af | ||
![]() |
fd69120aa6 | ||
![]() |
5df60b17e8 | ||
![]() |
cb835b5ae6 | ||
![]() |
9eab92513a | ||
![]() |
29e8f50ab8 | ||
![]() |
aa0496b236 | ||
![]() |
06e9cec21a | ||
![]() |
0fe27088df | ||
![]() |
54d226116d | ||
![]() |
4b37e30680 | ||
![]() |
7c5f710deb | ||
![]() |
5a3ebaf683 | ||
![]() |
233da0e48f | ||
![]() |
96380d8d28 | ||
![]() |
c84a0edf20 | ||
![]() |
a3cf445c93 | ||
![]() |
3f31979f66 | ||
![]() |
44416edfd2 | ||
![]() |
351c45da75 | ||
![]() |
e27c5dad15 | ||
![]() |
dc510f22ac | ||
![]() |
1b78011f8b | ||
![]() |
a908828bf4 | ||
![]() |
55b7eb62f6 | ||
![]() |
10e8fcf3b9 | ||
![]() |
f1b0c05447 | ||
![]() |
de22bd688e | ||
![]() |
9fe35b4fb5 | ||
![]() |
f13d08d37a | ||
![]() |
a0ecb46584 | ||
![]() |
0c57df0c8e | ||
![]() |
9c902c5c69 | ||
![]() |
af412c3105 | ||
![]() |
ec43448163 | ||
![]() |
9f7e0ecd55 | ||
![]() |
e50515a17c | ||
![]() |
7c345db6fe | ||
![]() |
51c2268c1e | ||
![]() |
51feca05a5 | ||
![]() |
3889504292 | ||
![]() |
7bd6ff374a | ||
![]() |
44fa34203a | ||
![]() |
ff351c7f6d | ||
![]() |
960b00d85a | ||
![]() |
18e3eacd7f | ||
![]() |
f4a1da33c4 | ||
![]() |
49de5be44e | ||
![]() |
383657e8ce | ||
![]() |
3af970ead6 | ||
![]() |
6caec79958 | ||
![]() |
33bbd92d9b | ||
![]() |
9dba78fbcd | ||
![]() |
630d85ec78 | ||
![]() |
f0d46e8671 | ||
![]() |
db0593f0b2 | ||
![]() |
1d83c0c77a | ||
![]() |
5e5fd3a79b | ||
![]() |
c61995aab8 | ||
![]() |
37c393f857 | ||
![]() |
8e043a01c1 | ||
![]() |
c7b6b2ddb3 | ||
![]() |
522f68bf68 | ||
![]() |
7d4866234f | ||
![]() |
7aa5bcfc7c | ||
![]() |
04b59f0896 | ||
![]() |
796f9a203e | ||
![]() |
22c8cda0d7 | ||
![]() |
1cf534ccc5 | ||
![]() |
6d8c821148 | ||
![]() |
264e9665b0 | ||
![]() |
53fa8e48c0 | ||
![]() |
e406aa4144 | ||
![]() |
4953ba5077 | ||
![]() |
0a97ac0578 | ||
![]() |
56af4752f4 | ||
![]() |
81413d08ed | ||
![]() |
2bc2a476d9 | ||
![]() |
4d070a65c6 | ||
![]() |
6185fbaf26 | ||
![]() |
698a126b93 | ||
![]() |
acf921f55d | ||
![]() |
f5a78c88f8 | ||
![]() |
206ece1575 | ||
![]() |
a8028dbe10 | ||
![]() |
c605af6ccc | ||
![]() |
b7b8e6c40e | ||
![]() |
3fcb1de419 | ||
![]() |
12034fe5fc | ||
![]() |
56959d781a | ||
![]() |
9a2f025646 | ||
![]() |
12cc163058 | ||
![]() |
74971d9753 | ||
![]() |
a9157e3a9f | ||
![]() |
b96697b708 | ||
![]() |
81e6896391 | ||
![]() |
2dcaa3608d | ||
![]() |
e21671ec5e | ||
![]() |
7841f14163 | ||
![]() |
cc9f594ab4 | ||
![]() |
ebfaaeaa6b | ||
![]() |
ffa91e150d | ||
![]() |
06fa9f9a9e | ||
![]() |
9f203c42ec | ||
![]() |
5d0d34a4af | ||
![]() |
c2cfc0d3d4 | ||
![]() |
0f4810d41f | ||
![]() |
175848f2a8 | ||
![]() |
472bd66f4d | ||
![]() |
168ea32d2c | ||
![]() |
e82d6b1ea4 | ||
![]() |
6c60ca088c | ||
![]() |
83e8f935fd | ||
![]() |
71867302a4 | ||
![]() |
8bcc402c5f | ||
![]() |
72b7d2a123 | ||
![]() |
20c1183450 | ||
![]() |
0bbfbd2544 | ||
![]() |
350bd9c32f | ||
![]() |
dcca8b0a9a | ||
![]() |
f77b479e45 | ||
![]() |
216565affb | ||
![]() |
6f235c2a11 | ||
![]() |
27a770bd1d | ||
![]() |
ef15b67571 | ||
![]() |
6aad966c52 | ||
![]() |
9811f11859 | ||
![]() |
13148ec7fb | ||
![]() |
b2d7464790 | ||
![]() |
ce84e185ad | ||
![]() |
c3f5ee43b6 | ||
![]() |
e2dc1a4471 | ||
![]() |
e787e59b49 | ||
![]() |
f0ed2eba2b | ||
![]() |
2364e1e652 | ||
![]() |
cc56944d75 | ||
![]() |
69cea9fc96 | ||
![]() |
fcebc9d1ed | ||
![]() |
9350e4f961 | ||
![]() |
387e0ad03e | ||
![]() |
61fec8b290 | ||
![]() |
1228baebf4 | ||
![]() |
a30063e85c | ||
![]() |
524cebac4d | ||
![]() |
c94114a566 | ||
![]() |
b6ec7a9e64 | ||
![]() |
69be7a6d22 | ||
![]() |
58155c35f9 | ||
![]() |
7b2377291f | ||
![]() |
657ee84e39 | ||
![]() |
2e4b545265 | ||
![]() |
2de1d35dd1 | ||
![]() |
2b082b362d | ||
![]() |
dfdd0d6b4b | ||
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 |
@@ -1,7 +1,12 @@
|
||||
FROM python:3.7
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Set Docker daemon config
|
||||
RUN \
|
||||
mkdir -p /etc/docker \
|
||||
&& echo '{"storage-driver": "vfs"}' > /etc/docker/daemon.json
|
||||
|
||||
# Install Node/Yarn for Frontent
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
@@ -13,7 +18,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
nodejs \
|
||||
yarn \
|
||||
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV NVM_DIR /root/.nvm
|
||||
|
||||
@@ -43,9 +48,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt ./
|
||||
RUN pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
RUN pip3 install -U setuptools pip \
|
||||
&& pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
&& pip3 install tox \
|
||||
&& rm -f requirements.txt requirements_tests.txt
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL /bin/bash
|
||||
|
@@ -1,24 +1,33 @@
|
||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||
{
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"postCreateCommand": "pre-commit install",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"containerEnv": {"NVM_DIR":"/usr/local/share/nvm"},
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py37"],
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||
}
|
||||
}
|
||||
|
29
.github/ISSUE_TEMPLATE.md
vendored
29
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,29 +0,0 @@
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with a Add-on, make a issue on there repository.
|
||||
-->
|
||||
|
||||
**Home Assistant release with the issue:**
|
||||
<!--
|
||||
- Frontend -> Developer tools -> Info
|
||||
- Or use this command: hass --version
|
||||
-->
|
||||
|
||||
**Operating environment (HassOS/Generic):**
|
||||
<!--
|
||||
Please provide details about your environment.
|
||||
-->
|
||||
|
||||
**Supervisor logs:**
|
||||
<!--
|
||||
- Frontend -> Hass.io -> System
|
||||
- Or use this command: hassio su logs
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
69
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
69
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
name: Report a bug with the Supervisor on a supported System
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
---
|
||||
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with an add-on, make an issue in it's repository.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Important: You can only fill a bug repport for an supported system! If you run an unsupported installation. This report would be closed without comment.
|
||||
-->
|
||||
|
||||
### Describe the issue
|
||||
|
||||
<!-- Provide as many details as possible. -->
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
<!-- What do you do to encounter the issue. -->
|
||||
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
|
||||
### Enviroment details
|
||||
|
||||
<!-- You can find these details in the system tab of the supervisor panel, or by using the `ha` CLI. -->
|
||||
|
||||
- **Operating System:**: xxx
|
||||
- **Supervisor version:**: xxx
|
||||
- **Home Assistant version**: xxx
|
||||
|
||||
### Supervisor logs
|
||||
|
||||
<details>
|
||||
<summary>Supervisor logs</summary>
|
||||
<!--
|
||||
- Frontend -> Supervisor -> System
|
||||
- Or use this command: ha supervisor logs
|
||||
- Logs are more than just errors, even if you don't think it's important, it is.
|
||||
-->
|
||||
|
||||
```
|
||||
Paste supervisor logs here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### System Information
|
||||
|
||||
<details>
|
||||
<summary>System Information</summary>
|
||||
<!--
|
||||
- Use this command: ha info
|
||||
-->
|
||||
|
||||
```
|
||||
Paste system info here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
25
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
25
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Report a bug/issues with an unsupported Supervisor
|
||||
url: https://community.home-assistant.io
|
||||
about: The Community guide can help or was updated to solve your issue
|
||||
|
||||
- name: Report a bug for the Supervisor panel
|
||||
url: https://github.com/home-assistant/frontend/issues
|
||||
about: The Supervisor panel is a part of the Home Assistant frontend
|
||||
|
||||
- name: Report incorrect or missing information on our developer documentation
|
||||
url: https://github.com/home-assistant/developers.home-assistant.io/issues
|
||||
about: Our documentation has its own issue tracker. Please report issues with the website there.
|
||||
|
||||
- name: Request a feature for the Supervisor
|
||||
url: https://community.home-assistant.io/c/feature-requests
|
||||
about: Request an new feature for the Supervisor.
|
||||
|
||||
- name: I have a question or need support
|
||||
url: https://www.home-assistant.io/help
|
||||
about: We use GitHub for tracking bugs, check our website for resources on getting help.
|
||||
|
||||
- name: I'm unsure where to go?
|
||||
url: https://www.home-assistant.io/join-chat
|
||||
about: If you are unsure where to go, then joining our chat is recommended; Just ask!
|
80
.github/ISSUE_TEMPLATE/z_bug_report_form.yml
vendored
Normal file
80
.github/ISSUE_TEMPLATE/z_bug_report_form.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
name: Bug Report Form
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
title: ""
|
||||
issue_body: true
|
||||
inputs:
|
||||
- type: description
|
||||
attributes:
|
||||
value: |
|
||||
This issue form is for reporting bugs with **supported** setups only!
|
||||
|
||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
||||
[fr]: https://community.home-assistant.io/c/feature-requests
|
||||
- type: input
|
||||
attributes:
|
||||
label: What is the version of the Supervisor used?
|
||||
required: true
|
||||
placeholder: supervisor-
|
||||
description: >
|
||||
Can be found in the Supervisor panel -> System tab. Starts with
|
||||
`supervisor-....`.
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: What type of installation are you running?
|
||||
required: true
|
||||
description: >
|
||||
If you don't know, you can find it in: Configuration panel -> Info.
|
||||
choices:
|
||||
- Home Assistant OS
|
||||
- Home Assistant Supervised
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Which operating system are you running on?
|
||||
required: true
|
||||
choices:
|
||||
- Home Assistant Operating System
|
||||
- Debian
|
||||
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
|
||||
- type: input
|
||||
attributes:
|
||||
label: What is the version of your installed operating system?
|
||||
required: true
|
||||
placeholder: 5.10
|
||||
description: Can be found in the Supervisor panel -> System tab.
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Home Assistant Core is installed?
|
||||
required: true
|
||||
placeholder: core-
|
||||
description: >
|
||||
Can be found in the Supervisor panel -> System tab. Starts with
|
||||
`core-....`.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the issue you are experiencing
|
||||
required: true
|
||||
description: Provide a clear and concise description of what the bug is.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
required: true
|
||||
description: |
|
||||
Please tell us exactly how to reproduce your issue.
|
||||
Provide clear and concise step by step instructions and add code snippets if needed.
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
...
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything in the Supervisor logs that might be useful for us?
|
||||
required: false
|
||||
description: >
|
||||
The Supervisor logs can be found in the Supervisor panel -> System tab.
|
||||
- type: description
|
||||
attributes:
|
||||
value: |
|
||||
If you have any additional information for us, use the field below.
|
||||
Please note, you can attach screenshots or screen recordings here.
|
68
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
68
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
<!--
|
||||
You are amazing! Thanks for contributing to our project!
|
||||
Please, DO NOT DELETE ANY TEXT from this template! (unless instructed).
|
||||
-->
|
||||
|
||||
## Proposed change
|
||||
|
||||
<!--
|
||||
Describe the big picture of your changes here to communicate to the
|
||||
maintainers why we should accept this pull request. If it fixes a bug
|
||||
or resolves a feature request, be sure to link to that issue in the
|
||||
additional information section.
|
||||
-->
|
||||
|
||||
## Type of change
|
||||
|
||||
<!--
|
||||
What type of change does your PR introduce to Home Assistant?
|
||||
NOTE: Please, check only 1! box!
|
||||
If your PR requires multiple boxes to be checked, you'll most likely need to
|
||||
split it into multiple PRs. This makes things easier and faster to code review.
|
||||
-->
|
||||
|
||||
- [ ] Dependency upgrade
|
||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (which adds functionality to the supervisor)
|
||||
- [ ] Breaking change (fix/feature causing existing functionality to break)
|
||||
- [ ] Code quality improvements to existing code or addition of tests
|
||||
|
||||
## Additional information
|
||||
|
||||
<!--
|
||||
Details are important, and help maintainers processing your PR.
|
||||
Please be sure to fill out additional details, if applicable.
|
||||
-->
|
||||
|
||||
- This PR fixes or closes issue: fixes #
|
||||
- This PR is related to issue:
|
||||
- Link to documentation pull request:
|
||||
|
||||
## Checklist
|
||||
|
||||
<!--
|
||||
Put an `x` in the boxes that apply. You can also fill these out after
|
||||
creating the PR. If you're unsure about any of them, don't hesitate to ask.
|
||||
We're here to help! This is simply a reminder of what we are going to look
|
||||
for before merging your code.
|
||||
-->
|
||||
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] I have followed the [development checklist][dev-checklist]
|
||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
|
||||
If API endpoints of add-on configuration are added/changed:
|
||||
|
||||
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
||||
|
||||
<!--
|
||||
Thank you for contributing <3
|
||||
|
||||
Below, some useful links you could explore:
|
||||
-->
|
||||
|
||||
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
||||
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
27
.github/lock.yml
vendored
27
.github/lock.yml
vendored
@@ -1,27 +0,0 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2020-01-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
47
.github/release-drafter.yml
vendored
47
.github/release-drafter.yml
vendored
@@ -1,4 +1,49 @@
|
||||
change-template: "- #$NUMBER $TITLE @$AUTHOR"
|
||||
sort-direction: ascending
|
||||
|
||||
categories:
|
||||
- title: ":boom: Breaking Changes"
|
||||
label: "breaking-change"
|
||||
|
||||
- title: ":wrench: Build"
|
||||
label: "build"
|
||||
|
||||
- title: ":boar: Chore"
|
||||
label: "chore"
|
||||
|
||||
- title: ":sparkles: New Features"
|
||||
label: "new-feature"
|
||||
|
||||
- title: ":zap: Performance"
|
||||
label: "performance"
|
||||
|
||||
- title: ":recycle: Refactor"
|
||||
label: "refactor"
|
||||
|
||||
- title: ":green_heart: CI"
|
||||
label: "ci"
|
||||
|
||||
- title: ":bug: Bug Fixes"
|
||||
label: "bugfix"
|
||||
|
||||
- title: ":white_check_mark: Test"
|
||||
label: "test"
|
||||
|
||||
- title: ":arrow_up: Dependency Updates"
|
||||
label: "dependencies"
|
||||
|
||||
include-labels:
|
||||
- "breaking-change"
|
||||
- "build"
|
||||
- "chore"
|
||||
- "performance"
|
||||
- "refactor"
|
||||
- "new-feature"
|
||||
- "bugfix"
|
||||
- "dependencies"
|
||||
- "test"
|
||||
- "ci"
|
||||
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
$CHANGES
|
||||
|
17
.github/stale.yml
vendored
17
.github/stale.yml
vendored
@@ -1,17 +0,0 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
215
.github/workflows/builder.yml
vendored
Normal file
215
.github/workflows/builder.yml
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
name: Build supervisor
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
channel:
|
||||
description: "Channel"
|
||||
required: true
|
||||
default: "dev"
|
||||
version:
|
||||
description: "Version"
|
||||
required: true
|
||||
publish:
|
||||
description: "Publish"
|
||||
required: true
|
||||
default: "false"
|
||||
stable:
|
||||
description: "Stable"
|
||||
required: true
|
||||
default: "false"
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
release:
|
||||
types: ["published"]
|
||||
push:
|
||||
branches: ["main"]
|
||||
paths:
|
||||
- "rootfs/**"
|
||||
- "supervisor/**"
|
||||
- build.json
|
||||
- Dockerfile
|
||||
- requirements.txt
|
||||
- setup.py
|
||||
|
||||
env:
|
||||
BUILD_NAME: supervisor
|
||||
BUILD_TYPE: supervisor
|
||||
WHEELS_TAG: 3.8-alpine3.12
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Initialize build
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@master
|
||||
|
||||
- name: Get version
|
||||
id: version
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
if: steps.version.outputs.publish == 'false'
|
||||
uses: jitterbit/get-changed-files@v1
|
||||
|
||||
- name: Check if requirements files changed
|
||||
id: requirements
|
||||
run: |
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ requirements.txt ]]; then
|
||||
echo "::set-output name=changed::true"
|
||||
fi
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.arch }} supervisor
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@master
|
||||
with:
|
||||
tag: ${{ env.WHEELS_TAG }}
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-host: ${{ secrets.WHEELS_HOST }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
wheels-user: wheels
|
||||
apk: "build-base;libffi-dev;openssl-dev"
|
||||
skip-binary: aiohttp
|
||||
requirements: "requirements.txt"
|
||||
|
||||
- name: Set version
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set build arguments
|
||||
if: needs.init.outputs.publish == 'false'
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2021.01.1
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
|
||||
version:
|
||||
name: Update version
|
||||
needs: ["init", "run_supervisor"]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Initialize git
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
with:
|
||||
name: ${{ secrets.GIT_NAME }}
|
||||
email: ${{ secrets.GIT_EMAIL }}
|
||||
token: ${{ secrets.GIT_TOKEN }}
|
||||
|
||||
- name: Update version file
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version-push@master
|
||||
with:
|
||||
key: ${{ env.BUILD_NAME }}
|
||||
version: ${{ needs.init.outputs.version }}
|
||||
channel: ${{ needs.init.outputs.channel }}
|
||||
|
||||
run_supervisor:
|
||||
runs-on: ubuntu-latest
|
||||
name: Run the Supervisor
|
||||
needs: ["build"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build the Supervisor
|
||||
uses: home-assistant/builder@2021.01.1
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
--amd64 \
|
||||
--target /data \
|
||||
--generic runner
|
||||
|
||||
- name: Create the Supervisor
|
||||
run: |
|
||||
mkdir -p /tmp/supervisor/data
|
||||
docker create --name hassio_supervisor \
|
||||
--privileged \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v /run/dbus:/run/dbus \
|
||||
-v /tmp/supervisor/data:/data \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-e SUPERVISOR_SHARE="/tmp/supervisor/data" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||
homeassistant/amd64-hassio-supervisor:runner
|
||||
|
||||
- name: Start the Supervisor
|
||||
run: docker start hassio_supervisor
|
||||
|
||||
- name: Wait for Supervisor to come up
|
||||
run: |
|
||||
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
|
||||
ping="error"
|
||||
while [ "$ping" != "ok" ]; do
|
||||
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r .result)
|
||||
sleep 5
|
||||
done
|
||||
|
||||
- name: Check the Supervisor
|
||||
run: |
|
||||
echo "Checking supervisor info"
|
||||
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r .result)
|
||||
if [ "$test" != "ok" ];then
|
||||
docker logs hassio_supervisor
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Checking supervisor network info"
|
||||
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r .result)
|
||||
if [ "$test" != "ok" ];then
|
||||
docker logs hassio_supervisor
|
||||
exit 1
|
||||
fi
|
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Check PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
types: [labeled, unlabeled, synchronize]
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Check labels
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check labels
|
||||
run: |
|
||||
labels=$(jq -r '.pull_request.labels[] | .name' ${{github.event_path }})
|
||||
echo "$labels"
|
||||
if [ "$labels" == "cla-signed" ]; then
|
||||
exit 1
|
||||
fi
|
431
.github/workflows/ci.yaml
vendored
Normal file
431
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,431 @@
|
||||
name: CI
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.8
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
|
||||
jobs:
|
||||
# Separate job to pre-populate the base dependency cache
|
||||
# This prevent upcoming jobs to do the same individually
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
pip install -U pip setuptools
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pre-commit-
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-black:
|
||||
name: Check black
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run black
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
black --target-version py38 --check supervisor tests setup.py
|
||||
|
||||
lint-dockerfile:
|
||||
name: Check Dockerfile
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
- name: Check Dockerfile
|
||||
uses: docker://hadolint/hadolint:v1.18.0
|
||||
with:
|
||||
args: hadolint Dockerfile
|
||||
|
||||
lint-executable-shebangs:
|
||||
name: Check executables
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 supervisor tests
|
||||
|
||||
lint-isort:
|
||||
name: Check isort
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run isort
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files
|
||||
|
||||
lint-pylint:
|
||||
name: Check pylint
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||
- name: Run pylint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
lint-pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Run tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Install additional system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Install Pytest Annotation plugin
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Ideally this should be part of our dependencies
|
||||
# However this plugin is fairly new and doesn't run correctly
|
||||
# on a non-GitHub environment.
|
||||
pip install pytest-github-actions-annotate-failures
|
||||
- name: Run pytest
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pytest \
|
||||
-qq \
|
||||
--timeout=10 \
|
||||
--durations=10 \
|
||||
--cov supervisor \
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v2.2.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
|
||||
coverage:
|
||||
name: Process test coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.2.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
coverage combine coverage*/.coverage*
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1.2.1
|
20
.github/workflows/lock.yml
vendored
Normal file
20
.github/workflows/lock.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: Lock
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v2.0.3
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-lock-inactive-days: "30"
|
||||
issue-exclude-created-before: "2020-10-01T00:00:00Z"
|
||||
issue-lock-reason: ""
|
||||
pr-lock-inactive-days: "1"
|
||||
pr-exclude-created-before: "2020-11-01T00:00:00Z"
|
||||
pr-lock-reason: ""
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-executables-have-shebangs",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+)$",
|
||||
"file": 1,
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-json",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||
"file": 1,
|
||||
"message": 2,
|
||||
"line": 3,
|
||||
"column": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "hadolint",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3,
|
||||
"code": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "pylint-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "pylint-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "python",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||
"file": 1,
|
||||
"line": 2
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
44
.github/workflows/release-drafter.yml
vendored
Normal file
44
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Find Next Version
|
||||
id: version
|
||||
run: |
|
||||
declare -i newpost
|
||||
latest=$(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
latestpre=$(echo "$latest" | awk '{split($0,a,"."); print a[1] "." a[2]}')
|
||||
datepre=$(date --utc '+%Y.%m')
|
||||
|
||||
|
||||
if [[ "$latestpre" == "$datepre" ]]; then
|
||||
latestpost=$(echo "$latest" | awk '{split($0,a,"."); print a[3]}')
|
||||
newpost=$latestpost+1
|
||||
else
|
||||
newpost=0
|
||||
fi
|
||||
|
||||
echo Current version: $latest
|
||||
echo New target version: $datepre.$newpost
|
||||
echo "::set-output name=version::$datepre.$newpost"
|
||||
|
||||
- name: Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
21
.github/workflows/sentry.yaml
vendored
Normal file
21
.github/workflows/sentry.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Sentry Release
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
release:
|
||||
types: [published, prereleased]
|
||||
|
||||
jobs:
|
||||
createSentryRelease:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.1
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
environment: production
|
39
.github/workflows/stale.yml
vendored
Normal file
39
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: Stale
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 * * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v3.0.15
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
||||
stale-issue-message: >
|
||||
There hasn't been any activity on this issue recently. Due to the
|
||||
high number of incoming GitHub notifications, we have to clean some
|
||||
of the old issues, as many of them have already been resolved with
|
||||
the latest updates.
|
||||
|
||||
Please make sure to update to the latest version and check if that
|
||||
solves the issue. Let us know if that works for you by
|
||||
adding a comment 👍
|
||||
|
||||
This issue has now been marked as stale and will be closed if no
|
||||
further activity occurs. Thank you for your contributions.
|
||||
|
||||
stale-pr-label: "stale"
|
||||
exempt-pr-labels: "no-stale,pinned,rfc,security"
|
||||
stale-pr-message: >
|
||||
There hasn't been any activity on this pull request recently. This
|
||||
pull request has been automatically marked as stale because of that
|
||||
and will be closed if no further activity occurs within 7 days.
|
||||
|
||||
Thank you for your contributions.
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -95,3 +95,8 @@ ENV/
|
||||
.vscode/*
|
||||
!.vscode/cSpell.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
|
||||
# mypy
|
||||
/.mypy_cache/*
|
||||
/.dmypy.json
|
||||
|
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py38
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.3
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.5.0
|
||||
- pydocstyle==5.0.2
|
||||
files: ^(supervisor|script|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v4.3.21
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
18
.vscode/launch.json
vendored
Normal file
18
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Supervisor remote debug",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 33333,
|
||||
"host": "172.30.32.2",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "/usr/src/supervisor"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
194
.vscode/tasks.json
vendored
194
.vscode/tasks.json
vendored
@@ -1,92 +1,104 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 hassio tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint hassio",
|
||||
"dependsOn": [
|
||||
"Install all Requirements"
|
||||
],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Supervisor",
|
||||
"type": "shell",
|
||||
"command": "./scripts/run-supervisor.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Build Supervisor",
|
||||
"type": "shell",
|
||||
"command": "./scripts/build-supervisor.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Supervisor CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update Supervisor Panel",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint supervisor",
|
||||
"dependsOn": ["Install all Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
969
API.md
969
API.md
@@ -1,969 +0,0 @@
|
||||
# Supervisor
|
||||
|
||||
## Supervisor RESTful API
|
||||
|
||||
Interface for Home Assistant to control things from supervisor.
|
||||
|
||||
On error / Code 400:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "error",
|
||||
"message": ""
|
||||
}
|
||||
```
|
||||
|
||||
On success / Code 200:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "ok",
|
||||
"data": {}
|
||||
}
|
||||
```
|
||||
|
||||
For access to API you need use a authorization header with a `Bearer` token.
|
||||
They are available for Add-ons and the Home Assistant using the `SUPERVISOR_TOKEN` environment variable.
|
||||
|
||||
### Supervisor
|
||||
|
||||
- GET `/supervisor/ping`
|
||||
|
||||
This API call don't need a token.
|
||||
|
||||
- GET `/supervisor/info`
|
||||
|
||||
The addons from `addons` are only installed one.
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"ip_address": "ip address",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"repository": "12345678|null",
|
||||
"version": "LATEST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"state": "started|stopped"
|
||||
}
|
||||
],
|
||||
"addons_repositories": ["REPO_URL"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"addons_repositories": ["REPO_URL"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/reload`
|
||||
|
||||
Reload addons/version.
|
||||
|
||||
- GET `/supervisor/logs`
|
||||
|
||||
Output is the raw docker log.
|
||||
|
||||
- GET `/supervisor/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/supervisor/repair`
|
||||
|
||||
Repair overlayfs issue and restore lost images
|
||||
|
||||
### Snapshot
|
||||
|
||||
- GET `/snapshots`
|
||||
|
||||
```json
|
||||
{
|
||||
"snapshots": [
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"date": "ISO",
|
||||
"name": "Custom name",
|
||||
"type": "full|partial",
|
||||
"protected": "bool"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- POST `/snapshots/new/upload`
|
||||
|
||||
return:
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional",
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- GET `/snapshots/{slug}/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": "SNAPSHOT ID",
|
||||
"type": "full|partial",
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"protected": "bool",
|
||||
"homeassistant": "version",
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION",
|
||||
"size": "SIZE_IN_MB"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
"folders": ["NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
|
||||
- GET `/snapshots/{slug}/download`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
- POST `/host/shutdown`
|
||||
|
||||
- POST `/host/reboot`
|
||||
|
||||
- GET `/host/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": "hostname|null",
|
||||
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
||||
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
||||
"kernel": "4.15.7|null",
|
||||
"chassis": "specific|null",
|
||||
"deployment": "stable|beta|dev|null",
|
||||
"cpe": "xy|null"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
#### Services
|
||||
|
||||
- GET `/host/services`
|
||||
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"name": "xy.service",
|
||||
"description": "XY ...",
|
||||
"state": "active|"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/service/{unit}/stop`
|
||||
|
||||
- POST `/host/service/{unit}/start`
|
||||
|
||||
- POST `/host/service/{unit}/reload`
|
||||
|
||||
### HassOS
|
||||
|
||||
- GET `/os/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "2.3",
|
||||
"version_cli": "7",
|
||||
"version_latest": "2.4",
|
||||
"version_cli_latest": "8",
|
||||
"board": "ova|rpi",
|
||||
"boot": "rauc boot slot"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/os/update`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/os/update/cli`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/os/config/sync`
|
||||
|
||||
Load host configs from a USB stick.
|
||||
|
||||
### Hardware
|
||||
|
||||
- GET `/hardware/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"serial": ["/dev/xy"],
|
||||
"input": ["Input device name"],
|
||||
"disk": ["/dev/sdax"],
|
||||
"gpio": ["gpiochip0", "gpiochip100"],
|
||||
"audio": {
|
||||
"CARD_ID": {
|
||||
"name": "xy",
|
||||
"type": "microphone",
|
||||
"devices": [
|
||||
"chan_id": "channel ID",
|
||||
"chan_type": "type of device"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/hardware/audio`
|
||||
|
||||
```json
|
||||
{
|
||||
"audio": {
|
||||
"input": {
|
||||
"0,0": "Mic"
|
||||
},
|
||||
"output": {
|
||||
"1,0": "Jack",
|
||||
"1,1": "HDMI"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hardware/trigger`
|
||||
|
||||
Trigger an udev reload
|
||||
|
||||
### Home Assistant
|
||||
|
||||
- GET `/core/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "arch",
|
||||
"machine": "Image machine type",
|
||||
"ip_address": "ip address",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
"port": 8123,
|
||||
"ssl": "bool",
|
||||
"watchdog": "bool",
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/core/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/core/logs`
|
||||
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/core/restart`
|
||||
- POST `/core/check`
|
||||
- POST `/core/start`
|
||||
- POST `/core/stop`
|
||||
- POST `/core/rebuild`
|
||||
|
||||
- POST `/core/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"image": "Optional|null",
|
||||
"last_version": "Optional for custom image|null",
|
||||
"port": "port for access core",
|
||||
"ssl": "bool",
|
||||
"refresh_token": "",
|
||||
"watchdog": "bool",
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
Image with `null` and last_version with `null` reset this options.
|
||||
|
||||
- POST/GET `/core/api`
|
||||
|
||||
Proxy to the Home Assistant Core instance.
|
||||
|
||||
- GET `/core/websocket`
|
||||
|
||||
Proxy to Home Assistant Core websocket.
|
||||
|
||||
- GET `/core/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### RESTful for API add-ons
|
||||
|
||||
If an add-on will call itself, you can use `/addons/self/...`.
|
||||
|
||||
- GET `/addons`
|
||||
|
||||
Get all available add-ons.
|
||||
|
||||
```json
|
||||
{
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"advanced": "bool",
|
||||
"stage": "stable|experimental|deprecated",
|
||||
"repository": "core|local|REP_ID",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"available": "bool",
|
||||
"build": "bool",
|
||||
"url": "null|url",
|
||||
"icon": "bool",
|
||||
"logo": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
{
|
||||
"slug": "12345678",
|
||||
"name": "Repitory Name|unknown",
|
||||
"source": "URL_OF_REPOSITORY",
|
||||
"url": "WEBSITE|REPOSITORY",
|
||||
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/reload`
|
||||
- GET `/addons/{addon}/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xdssd_xybla",
|
||||
"hostname": "xdssd-xybla",
|
||||
"dns": [],
|
||||
"description": "description",
|
||||
"long_description": "null|markdown",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
"available": "bool",
|
||||
"advanced": "bool",
|
||||
"stage": "stable|experimental|deprecated",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"machine": "[raspberrypi2, tinker]",
|
||||
"homeassistant": "null|min Home Assistant Core version",
|
||||
"repository": "12345678|null",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
"state": "none|started|stopped",
|
||||
"boot": "auto|manual",
|
||||
"build": "bool",
|
||||
"options": "{}",
|
||||
"schema": "{}|null",
|
||||
"network": "{}|null",
|
||||
"network_description": "{}|null",
|
||||
"host_network": "bool",
|
||||
"host_pid": "bool",
|
||||
"host_ipc": "bool",
|
||||
"host_dbus": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"apparmor": "disable|default|profile",
|
||||
"devices": ["/dev/xy"],
|
||||
"udev": "bool",
|
||||
"auto_uart": "bool",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"changelog": "bool",
|
||||
"documentation": "bool",
|
||||
"hassio_api": "bool",
|
||||
"hassio_role": "default|homeassistant|manager|admin",
|
||||
"homeassistant_api": "bool",
|
||||
"auth_api": "bool",
|
||||
"full_access": "bool",
|
||||
"protected": "bool",
|
||||
"rating": "1-6",
|
||||
"stdin": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"gpio": "bool",
|
||||
"kernel_modules": "bool",
|
||||
"devicetree": "bool",
|
||||
"docker_api": "bool",
|
||||
"video": "bool",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0",
|
||||
"services_role": "['service:access']",
|
||||
"discovery": "['service']",
|
||||
"ip_address": "ip address",
|
||||
"ingress": "bool",
|
||||
"ingress_entry": "null|/api/hassio_ingress/slug",
|
||||
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
||||
"ingress_port": "null|int",
|
||||
"ingress_panel": "null|bool"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/icon`
|
||||
|
||||
- GET `/addons/{addon}/logo`
|
||||
|
||||
- GET `/addons/{addon}/changelog`
|
||||
|
||||
- GET `/addons/{addon}/documentation`
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"boot": "auto|manual",
|
||||
"auto_update": "bool",
|
||||
"network": {
|
||||
"CONTAINER": "port|[ip, port]"
|
||||
},
|
||||
"options": {},
|
||||
"audio_output": "null|0,0",
|
||||
"audio_input": "null|0,0",
|
||||
"ingress_panel": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
Reset custom network/audio/options, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/security`
|
||||
|
||||
This function is not callable by itself.
|
||||
|
||||
```json
|
||||
{
|
||||
"protected": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
- POST `/addons/{addon}/stop`
|
||||
|
||||
- POST `/addons/{addon}/install`
|
||||
|
||||
- POST `/addons/{addon}/uninstall`
|
||||
|
||||
- POST `/addons/{addon}/update`
|
||||
|
||||
- GET `/addons/{addon}/logs`
|
||||
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/addons/{addon}/restart`
|
||||
|
||||
- POST `/addons/{addon}/rebuild`
|
||||
|
||||
Only supported for local build addons
|
||||
|
||||
- POST `/addons/{addon}/stdin`
|
||||
|
||||
Write data to add-on stdin
|
||||
|
||||
- GET `/addons/{addon}/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### ingress
|
||||
|
||||
- POST `/ingress/session`
|
||||
|
||||
Create a new Session for access to ingress service.
|
||||
|
||||
```json
|
||||
{
|
||||
"session": "token"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/ingress/panels`
|
||||
|
||||
Return a list of enabled panels.
|
||||
|
||||
```json
|
||||
{
|
||||
"panels": {
|
||||
"addon_slug": {
|
||||
"enable": "boolean",
|
||||
"icon": "mdi:...",
|
||||
"title": "title",
|
||||
"admin": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- VIEW `/ingress/{token}`
|
||||
|
||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
||||
Need ingress session as cookie.
|
||||
|
||||
### discovery
|
||||
|
||||
- GET `/discovery`
|
||||
|
||||
```json
|
||||
{
|
||||
"discovery": [
|
||||
{
|
||||
"addon": "slug",
|
||||
"service": "name",
|
||||
"uuid": "uuid",
|
||||
"config": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/discovery/{UUID}`
|
||||
|
||||
```json
|
||||
{
|
||||
"addon": "slug",
|
||||
"service": "name",
|
||||
"uuid": "uuid",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/discovery`
|
||||
|
||||
```json
|
||||
{
|
||||
"service": "name",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
|
||||
```json
|
||||
{
|
||||
"uuid": "uuid"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/discovery/{UUID}`
|
||||
|
||||
### Services
|
||||
|
||||
- GET `/services`
|
||||
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"slug": "name",
|
||||
"available": "bool",
|
||||
"providers": "list"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### MQTT
|
||||
|
||||
- GET `/services/mqtt`
|
||||
|
||||
```json
|
||||
{
|
||||
"addon": "name",
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/mqtt`
|
||||
|
||||
```json
|
||||
{
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool|optional",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/mqtt`
|
||||
|
||||
#### MySQL
|
||||
|
||||
- GET `/services/mysql`
|
||||
|
||||
```json
|
||||
{
|
||||
"addon": "name",
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"username": "optional",
|
||||
"password": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/mysql`
|
||||
|
||||
```json
|
||||
{
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"username": "optional",
|
||||
"password": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/mysql`
|
||||
|
||||
### Misc
|
||||
|
||||
- GET `/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"supervisor": "version",
|
||||
"homeassistant": "version",
|
||||
"hassos": "null|version",
|
||||
"hostname": "name",
|
||||
"machine": "type",
|
||||
"arch": "arch",
|
||||
"supported_arch": ["arch1", "arch2"],
|
||||
"channel": "stable|beta|dev",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"timezone": "Europe/Zurich"
|
||||
}
|
||||
```
|
||||
|
||||
### DNS
|
||||
|
||||
- GET `/dns/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"host": "ip-address",
|
||||
"version": "1",
|
||||
"latest_version": "2",
|
||||
"servers": ["dns://8.8.8.8"],
|
||||
"locals": ["dns://xy"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/dns/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": ["dns://8.8.8.8"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/dns/update`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/dns/restart`
|
||||
|
||||
- POST `/dns/reset`
|
||||
|
||||
- GET `/dns/logs`
|
||||
|
||||
- GET `/dns/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### Audio
|
||||
|
||||
- GET `/audio/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"host": "ip-address",
|
||||
"version": "1",
|
||||
"latest_version": "2",
|
||||
"audio": {
|
||||
"input": [
|
||||
{
|
||||
"name": "...",
|
||||
"description": "...",
|
||||
"volume": 0.3,
|
||||
"default": false
|
||||
}
|
||||
],
|
||||
"output": [
|
||||
{
|
||||
"name": "...",
|
||||
"description": "...",
|
||||
"volume": 0.3,
|
||||
"default": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/audio/update`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/audio/restart`
|
||||
|
||||
- POST `/audio/reload`
|
||||
|
||||
- GET `/audio/logs`
|
||||
|
||||
- POST `/audio/volume/input`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "...",
|
||||
"volume": 0.5
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/audio/volume/output`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "...",
|
||||
"volume": 0.5
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/audio/default/input`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "..."
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/audio/default/output`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "..."
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/audio/stats`
|
||||
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### Auth / SSO API
|
||||
|
||||
You can use the user system on homeassistant. We handle this auth system on
|
||||
supervisor.
|
||||
|
||||
You can call post `/auth`
|
||||
|
||||
We support:
|
||||
|
||||
- Json `{ "user|name": "...", "password": "..." }`
|
||||
- application/x-www-form-urlencoded `user|name=...&password=...`
|
||||
- BasicAuth
|
||||
|
||||
* POST `/auth/reset`
|
||||
|
||||
```json
|
||||
{
|
||||
"username": "xy",
|
||||
"password": "new-password"
|
||||
}
|
||||
```
|
30
Dockerfile
30
Dockerfile
@@ -1,24 +1,29 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=10000 \
|
||||
SUPERVISOR_API=http://localhost
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl \
|
||||
socat
|
||||
RUN \
|
||||
apk add --no-cache \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
RUN \
|
||||
export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||
-r ./requirements.txt \
|
||||
@@ -26,7 +31,8 @@ RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN pip3 install --no-cache-dir -e ./supervisor \
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
|
4
LICENSE
4
LICENSE
@@ -178,7 +178,7 @@
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017 Pascal Vizeli
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
36
README.md
36
README.md
@@ -1,28 +1,32 @@
|
||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||
|
||||
# Home Assistant Supervisor
|
||||
|
||||
## First private cloud solution for home automation
|
||||
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
Home Assistant (former Hass.io) is a container-based system for managing your
|
||||
Home Assistant Core installation and related applications. The system is
|
||||
controlled via Home Assistant which communicates with the Supervisor. The
|
||||
Supervisor provides an API to manage the installation. This includes changing
|
||||
network settings or installing and updating software.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
Installation instructions can be found at https://home-assistant.io/getting-started.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
For small changes and bugfixes you can just follow this, but for significant changes open a RFC first.
|
||||
Development instructions can be found [here][development].
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
## Release
|
||||
|
||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||
Releases are done in 3 stages (channels) with this structure:
|
||||
|
||||
1. Pull requests are merged to the `main` branch.
|
||||
2. A new build is pushed to the `dev` stage.
|
||||
3. Releases are published.
|
||||
4. A new build is pushed to the `beta` stage.
|
||||
5. The [`stable.json`][stable] file is updated.
|
||||
6. The build that was pushed to `beta` will now be pushed to `stable`.
|
||||
|
||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||
|
@@ -1,52 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: versionHadolint
|
||||
value: "v1.16.3"
|
||||
|
||||
jobs:
|
||||
- job: "Tox"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libpulse0 libudev1
|
||||
displayName: "Install Host library"
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- script: pip install tox
|
||||
displayName: "Install Tox"
|
||||
- script: tox
|
||||
displayName: "Run Tox"
|
||||
- job: "JQ"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: "Install JQ"
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: "Run JQ"
|
||||
- job: "Hadolint"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: "Install Hadolint"
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: "Run Hadolint"
|
@@ -1,53 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- "*"
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: "7.0"
|
||||
- group: docker
|
||||
|
||||
jobs:
|
||||
- job: "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: "Check version of branch/tag"
|
||||
- job: "Release"
|
||||
dependsOn:
|
||||
- "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: "Docker hub login"
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: "Install Builder"
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--generic $(Build.SourceBranchName) --all -t /data
|
||||
displayName: "Build Release"
|
@@ -1,60 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: "1.6-3.7-alpine3.11"
|
||||
- group: wheels
|
||||
|
||||
jobs:
|
||||
- job: "Wheels"
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: "amd64"
|
||||
i386:
|
||||
buildArch: "i386"
|
||||
armhf:
|
||||
buildArch: "armhf"
|
||||
armv7:
|
||||
buildArch: "armv7"
|
||||
aarch64:
|
||||
buildArch: "aarch64"
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support \
|
||||
curl
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: "Initial cross build"
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: "Install ssh key"
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: "Install wheels builder"
|
||||
- script: |
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;libffi-dev;openssl-dev" \
|
||||
--index $(wheelsIndex) \
|
||||
--requirement requirements.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: "Run wheels build"
|
10
build.json
10
build.json
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.7-alpine3.11",
|
||||
"armhf": "homeassistant/armhf-base-python:3.7-alpine3.11",
|
||||
"armv7": "homeassistant/armv7-base-python:3.7-alpine3.11",
|
||||
"amd64": "homeassistant/amd64-base-python:3.7-alpine3.11",
|
||||
"i386": "homeassistant/i386-base-python:3.7-alpine3.11"
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.8-alpine3.12",
|
||||
"armhf": "homeassistant/armhf-base-python:3.8-alpine3.12",
|
||||
"armv7": "homeassistant/armv7-base-python:3.8-alpine3.12",
|
||||
"amd64": "homeassistant/amd64-base-python:3.8-alpine3.12",
|
||||
"i386": "homeassistant/i386-base-python:3.8-alpine3.12"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "supervisor"
|
||||
|
11
codecov.yaml
Normal file
11
codecov.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
codecov:
|
||||
branch: dev
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 40
|
||||
threshold: 0.09
|
||||
comment: false
|
||||
github_checks:
|
||||
annotations: false
|
Submodule home-assistant-polymer updated: 8518f774d4...e06642e892
19
pylintrc
19
pylintrc
@@ -1,27 +1,30 @@
|
||||
[MASTER]
|
||||
reports=no
|
||||
jobs=2
|
||||
|
||||
good-names=id,i,j,k,ex,Run,_,fp,T
|
||||
|
||||
# Reasons disabled:
|
||||
# format - handled by black
|
||||
# locally-disabled - it spams too much
|
||||
# duplicate-code - unavoidable
|
||||
# cyclic-import - doesn't test if both import on load
|
||||
# abstract-class-little-used - prevents from setting right foundation
|
||||
# abstract-class-not-used - is flaky, should not show up but does
|
||||
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
||||
# global-statement - used for the on-demand requirement installation
|
||||
# redefined-variable-type - this is Python, we're duck typing!
|
||||
# too-many-* - are not enforced for the sake of readability
|
||||
# too-few-* - same as too-many-*
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
|
||||
disable=
|
||||
format,
|
||||
abstract-class-little-used,
|
||||
abstract-class-not-used,
|
||||
abstract-method,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
global-statement,
|
||||
locally-disabled,
|
||||
no-else-return,
|
||||
no-self-use,
|
||||
not-context-manager,
|
||||
redefined-variable-type,
|
||||
too-few-public-methods,
|
||||
@@ -34,14 +37,6 @@ disable=
|
||||
too-many-return-statements,
|
||||
too-many-statements,
|
||||
unused-argument,
|
||||
line-too-long,
|
||||
bad-continuation,
|
||||
too-few-public-methods,
|
||||
no-self-use,
|
||||
not-async-context-manager,
|
||||
too-many-locals,
|
||||
too-many-branches,
|
||||
no-else-return
|
||||
|
||||
[EXCEPTIONS]
|
||||
overgeneral-exceptions=Exception
|
||||
|
@@ -1,18 +1,20 @@
|
||||
aiohttp==3.6.1
|
||||
aiohttp==3.7.3
|
||||
async_timeout==3.0.1
|
||||
attrs==19.3.0
|
||||
cchardet==2.1.5
|
||||
colorlog==4.1.0
|
||||
atomicwrites==1.4.0
|
||||
attrs==20.3.0
|
||||
awesomeversion==21.2.0
|
||||
brotli==1.0.9
|
||||
cchardet==2.1.7
|
||||
colorlog==4.7.2
|
||||
cpe==1.2.1
|
||||
cryptography==2.8
|
||||
docker==4.2.0
|
||||
gitpython==3.1.0
|
||||
jinja2==2.11.1
|
||||
packaging==20.1
|
||||
ptvsd==4.3.2
|
||||
pulsectl==20.2.2
|
||||
pytz==2019.3
|
||||
cryptography==3.3.1
|
||||
debugpy==1.2.1
|
||||
docker==4.4.1
|
||||
gitpython==3.1.12
|
||||
jinja2==2.11.3
|
||||
pulsectl==20.5.1
|
||||
pytz==2021.1
|
||||
pyudev==0.22.0
|
||||
ruamel.yaml==0.15.100
|
||||
uvloop==0.14.0
|
||||
voluptuous==0.11.7
|
||||
sentry-sdk==0.19.5
|
||||
voluptuous==0.12.1
|
||||
|
@@ -1,6 +1,14 @@
|
||||
flake8==3.7.9
|
||||
pylint==2.4.4
|
||||
pytest==5.3.5
|
||||
pytest-timeout==1.3.4
|
||||
black==20.8b1
|
||||
codecov==2.1.11
|
||||
coverage==5.4
|
||||
flake8-docstrings==1.5.0
|
||||
flake8==3.8.4
|
||||
pre-commit==2.10.0
|
||||
pydocstyle==5.1.1
|
||||
pylint==2.6.0
|
||||
pytest-aiohttp==0.3.0
|
||||
black==19.10b0
|
||||
pytest-asyncio==0.12.0 # NB!: Versions over 0.12.0 breaks pytest-aiohttp (https://github.com/aio-libs/pytest-aiohttp/issues/16)
|
||||
pytest-cov==2.11.1
|
||||
pytest-timeout==1.4.2
|
||||
pytest==6.2.2
|
||||
pyupgrade==2.9.0
|
||||
|
@@ -2,8 +2,19 @@
|
||||
# ==============================================================================
|
||||
# Start udev service
|
||||
# ==============================================================================
|
||||
|
||||
if bashio::fs.directory_exists /run/udev && ! bashio::fs.file_exists /run/.old_udev; then
|
||||
bashio::log.info "Using udev information from host"
|
||||
bashio::exit.ok
|
||||
fi
|
||||
|
||||
bashio::log.info "Setup udev backend inside container"
|
||||
udevd --daemon
|
||||
|
||||
bashio::log.info "Update udev informations"
|
||||
udevadm trigger
|
||||
udevadm settle
|
||||
bashio::log.info "Update udev information"
|
||||
touch /run/.old_udev
|
||||
if udevadm trigger; then
|
||||
udevadm settle || true
|
||||
else
|
||||
bashio::log.warning "Triggering of udev rules fails!"
|
||||
fi
|
||||
|
@@ -26,7 +26,7 @@ autospawn = no
|
||||
; daemon-binary = /usr/bin/pulseaudio
|
||||
; extra-arguments = --log-target=syslog
|
||||
|
||||
; cookie-file =
|
||||
cookie-file = /run/pulse-cookie
|
||||
|
||||
; enable-shm = yes
|
||||
; shm-size-bytes = 0 # setting this 0 will use the system-default, usually 64 MiB
|
||||
|
@@ -1,5 +1,8 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
#!/usr/bin/execlineb -S1
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Supervisor fails
|
||||
# ==============================================================================
|
||||
s6-svscanctl -t /var/run/s6/services
|
||||
if { s6-test ${1} -ne 100 }
|
||||
if { s6-test ${1} -ne 256 }
|
||||
|
||||
redirfd -w 2 /dev/null s6-svscanctl -t /var/run/s6/services
|
||||
|
@@ -1,5 +1,7 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start Service service
|
||||
# Start Supervisor service
|
||||
# ==============================================================================
|
||||
exec python3 -m supervisor
|
||||
export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2"
|
||||
|
||||
exec python3 -m supervisor
|
||||
|
8
rootfs/etc/services.d/watchdog/finish
Normal file
8
rootfs/etc/services.d/watchdog/finish
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/execlineb -S1
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Watchdog fails
|
||||
# ==============================================================================
|
||||
if { s6-test ${1} -ne 0 }
|
||||
if { s6-test ${1} -ne 256 }
|
||||
|
||||
s6-svscanctl -t /var/run/s6/services
|
34
rootfs/etc/services.d/watchdog/run
Normal file
34
rootfs/etc/services.d/watchdog/run
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start Watchdog service
|
||||
# ==============================================================================
|
||||
declare failed_count=0
|
||||
declare supervisor_state
|
||||
|
||||
bashio::log.info "Starting local supervisor watchdog..."
|
||||
|
||||
while [[ failed_count -lt 2 ]];
|
||||
do
|
||||
sleep 300
|
||||
supervisor_state="$(cat /run/supervisor)"
|
||||
|
||||
if [[ "${supervisor_state}" = "running" ]]; then
|
||||
|
||||
# Check API
|
||||
if bashio::supervisor.ping; then
|
||||
failed_count=0
|
||||
else
|
||||
bashio::log.warning "Maybe found an issue on API healthy"
|
||||
((failed_count++))
|
||||
fi
|
||||
|
||||
elif [[ "close stopping" = *"${supervisor_state}"* ]]; then
|
||||
bashio::log.warning "Maybe found an issue on shutdown"
|
||||
((failed_count++))
|
||||
else
|
||||
failed_count=0
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
basio::exit.nok "Watchdog detected issue with Supervisor - taking container down!"
|
28
scripts/build-supervisor.sh
Executable file
28
scripts/build-supervisor.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
source "${BASH_SOURCE[0]%/*}/common.sh"
|
||||
|
||||
set -eE
|
||||
|
||||
DOCKER_TIMEOUT=30
|
||||
DOCKER_PID=0
|
||||
|
||||
function build_supervisor() {
|
||||
docker pull homeassistant/amd64-builder:dev
|
||||
|
||||
docker run --rm \
|
||||
--privileged \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v "$(pwd):/data" \
|
||||
homeassistant/amd64-builder:dev \
|
||||
--generic latest \
|
||||
--target /data \
|
||||
--test \
|
||||
--amd64 \
|
||||
--no-cache
|
||||
}
|
||||
|
||||
echo "Build Supervisor"
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
build_supervisor
|
58
scripts/common.sh
Normal file
58
scripts/common.sh
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
|
||||
function start_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Starting docker."
|
||||
dockerd 2> /dev/null &
|
||||
DOCKER_PID=$!
|
||||
|
||||
echo "Waiting for docker to initialize..."
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
until docker info >/dev/null 2>&1; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for docker to come up"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "Docker was initialized"
|
||||
}
|
||||
|
||||
function stop_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Stopping in container docker..."
|
||||
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
|
||||
# Now wait for it to die
|
||||
kill "$DOCKER_PID"
|
||||
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for container docker to die"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "Your host might have been left with unreleased resources"
|
||||
fi
|
||||
}
|
||||
|
||||
function cleanup_lastboot() {
|
||||
if [[ -f /workspaces/test_supervisor/config.json ]]; then
|
||||
echo "Cleaning up last boot"
|
||||
cp /workspaces/test_supervisor/config.json /tmp/config.json
|
||||
jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json
|
||||
rm /tmp/config.json
|
||||
fi
|
||||
}
|
102
scripts/run-supervisor.sh
Executable file
102
scripts/run-supervisor.sh
Executable file
@@ -0,0 +1,102 @@
|
||||
#!/bin/bash
|
||||
source "${BASH_SOURCE[0]%/*}/common.sh"
|
||||
source "${BASH_SOURCE[0]%/*}/build-supervisor.sh"
|
||||
|
||||
set -eE
|
||||
|
||||
DOCKER_TIMEOUT=30
|
||||
DOCKER_PID=0
|
||||
|
||||
|
||||
function cleanup_docker() {
|
||||
echo "Cleaning up stopped containers..."
|
||||
docker rm $(docker ps -a -q) || true
|
||||
}
|
||||
|
||||
|
||||
function run_supervisor() {
|
||||
mkdir -p /workspaces/test_supervisor
|
||||
|
||||
echo "Start Supervisor"
|
||||
docker run --rm --privileged \
|
||||
--name hassio_supervisor \
|
||||
--privileged \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
-v /run/dbus:/run/dbus:ro \
|
||||
-v /run/udev:/run/udev:ro \
|
||||
-v "/workspaces/test_supervisor":/data:rw \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-v /workspaces/supervisor:/usr/src/supervisor \
|
||||
-e SUPERVISOR_SHARE="/workspaces/test_supervisor" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||
homeassistant/amd64-hassio-supervisor:latest
|
||||
|
||||
}
|
||||
|
||||
|
||||
function init_dbus() {
|
||||
if pgrep dbus-daemon; then
|
||||
echo "Dbus is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup dbus"
|
||||
mkdir -p /var/lib/dbus
|
||||
cp -f /etc/machine-id /var/lib/dbus/machine-id
|
||||
|
||||
# cleanups
|
||||
mkdir -p /run/dbus
|
||||
rm -f /run/dbus/pid
|
||||
|
||||
# run
|
||||
dbus-daemon --system --print-address
|
||||
}
|
||||
|
||||
|
||||
function init_udev() {
|
||||
if pgrep systemd-udevd; then
|
||||
echo "udev is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup udev"
|
||||
|
||||
# cleanups
|
||||
mkdir -p /run/udev
|
||||
|
||||
# run
|
||||
/lib/systemd/systemd-udevd --daemon
|
||||
sleep 3
|
||||
udevadm trigger && udevadm settle
|
||||
}
|
||||
|
||||
echo "Run Supervisor"
|
||||
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
|
||||
if [ "$( docker container inspect -f '{{.State.Status}}' hassio_supervisor )" == "running" ]; then
|
||||
echo "Restarting Supervisor"
|
||||
docker rm -f hassio_supervisor
|
||||
init_dbus
|
||||
init_udev
|
||||
cleanup_lastboot
|
||||
run_supervisor
|
||||
stop_docker
|
||||
|
||||
else
|
||||
echo "Starting Supervisor"
|
||||
docker system prune -f
|
||||
build_supervisor
|
||||
cleanup_lastboot
|
||||
cleanup_docker
|
||||
init_dbus
|
||||
init_udev
|
||||
run_supervisor
|
||||
stop_docker
|
||||
fi
|
@@ -1,136 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -eE
|
||||
|
||||
DOCKER_TIMEOUT=30
|
||||
DOCKER_PID=0
|
||||
|
||||
|
||||
function start_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Starting docker."
|
||||
dockerd 2> /dev/null &
|
||||
DOCKER_PID=$!
|
||||
|
||||
echo "Waiting for docker to initialize..."
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
until docker info >/dev/null 2>&1; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for docker to come up"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "Docker was initialized"
|
||||
}
|
||||
|
||||
|
||||
function stop_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Stopping in container docker..."
|
||||
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
|
||||
# Now wait for it to die
|
||||
kill "$DOCKER_PID"
|
||||
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for container docker to die"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "Your host might have been left with unreleased resources"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function build_supervisor() {
|
||||
docker pull homeassistant/amd64-builder:dev
|
||||
|
||||
docker run --rm --privileged \
|
||||
-v /run/docker.sock:/run/docker.sock -v "$(pwd):/data" \
|
||||
homeassistant/amd64-builder:dev \
|
||||
--generic dev -t /data --test --amd64 --no-cache
|
||||
}
|
||||
|
||||
|
||||
function cleanup_lastboot() {
|
||||
if [[ -f /workspaces/test_supervisor/config.json ]]; then
|
||||
echo "Cleaning up last boot"
|
||||
cp /workspaces/test_supervisor/config.json /tmp/config.json
|
||||
jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json
|
||||
rm /tmp/config.json
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function cleanup_docker() {
|
||||
echo "Cleaning up stopped containers..."
|
||||
docker rm $(docker ps -a -q) || true
|
||||
}
|
||||
|
||||
|
||||
function install_cli() {
|
||||
docker pull homeassistant/amd64-hassio-cli:dev
|
||||
}
|
||||
|
||||
|
||||
function setup_test_env() {
|
||||
mkdir -p /workspaces/test_supervisor
|
||||
|
||||
echo "Start Supervisor"
|
||||
docker run --rm --privileged \
|
||||
--name hassio_supervisor \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v /run/dbus:/run/dbus \
|
||||
-v "/workspaces/test_supervisor":/data \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-e SUPERVISOR_SHARE="/workspaces/test_supervisor" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e HOMEASSISTANT_REPOSITORY="homeassistant/qemux86-64-homeassistant" \
|
||||
homeassistant/amd64-hassio-supervisor:latest
|
||||
|
||||
}
|
||||
|
||||
|
||||
function init_dbus() {
|
||||
if pgrep dbus-daemon; then
|
||||
echo "Dbus is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup dbus"
|
||||
mkdir -p /var/lib/dbus
|
||||
cp -f /etc/machine-id /var/lib/dbus/machine-id
|
||||
|
||||
# run
|
||||
mkdir -p /run/dbus
|
||||
dbus-daemon --system --print-address
|
||||
}
|
||||
|
||||
echo "Start Test-Env"
|
||||
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
build_supervisor
|
||||
install_cli
|
||||
cleanup_lastboot
|
||||
cleanup_docker
|
||||
init_dbus
|
||||
setup_test_env
|
||||
stop_docker
|
@@ -14,5 +14,5 @@ cd hassio
|
||||
./script/build_hassio
|
||||
|
||||
# Copy frontend
|
||||
rm -f ../../supervisor/hassio/api/panel/chunk.*
|
||||
cp -rf build/* ../../supervisor/api/panel/
|
||||
rm -rf ../../supervisor/api/panel/*
|
||||
cp -rf build/* ../../supervisor/api/panel/
|
||||
|
15
setup.cfg
15
setup.cfg
@@ -11,7 +11,20 @@ default_section = THIRDPARTY
|
||||
forced_separate = tests
|
||||
combine_as_imports = true
|
||||
use_parentheses = true
|
||||
known_first_party = supervisor,tests
|
||||
|
||||
[flake8]
|
||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
||||
doctests = True
|
||||
max-line-length = 88
|
||||
ignore = E501, W503
|
||||
# E501: line too long
|
||||
# W503: Line break occurred before a binary operator
|
||||
# E203: Whitespace before ':'
|
||||
# D202 No blank lines allowed after function docstring
|
||||
# W504 line break after binary operator
|
||||
ignore =
|
||||
E501,
|
||||
W503,
|
||||
E203,
|
||||
D202,
|
||||
W504
|
||||
|
22
setup.py
22
setup.py
@@ -25,19 +25,33 @@ setup(
|
||||
"Topic :: Scientific/Engineering :: Atmospheric Science",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
],
|
||||
keywords=["docker", "home-assistant", "api"],
|
||||
zip_safe=False,
|
||||
platforms="any",
|
||||
packages=[
|
||||
"supervisor",
|
||||
"supervisor.docker",
|
||||
"supervisor.addons",
|
||||
"supervisor.api",
|
||||
"supervisor.dbus.network",
|
||||
"supervisor.dbus.payloads",
|
||||
"supervisor.dbus",
|
||||
"supervisor.discovery.services",
|
||||
"supervisor.discovery",
|
||||
"supervisor.docker",
|
||||
"supervisor.homeassistant",
|
||||
"supervisor.host",
|
||||
"supervisor.jobs",
|
||||
"supervisor.misc",
|
||||
"supervisor.utils",
|
||||
"supervisor.plugins",
|
||||
"supervisor.resolution.evaluations",
|
||||
"supervisor.resolution",
|
||||
"supervisor.services.modules",
|
||||
"supervisor.services",
|
||||
"supervisor.snapshots",
|
||||
"supervisor.store",
|
||||
"supervisor.utils",
|
||||
"supervisor",
|
||||
],
|
||||
include_package_data=True,
|
||||
)
|
||||
|
@@ -2,24 +2,25 @@
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from supervisor import bootstrap
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
CONTAINER_OS_STARTUP_CHECK = Path("/run/os/startup-marker")
|
||||
|
||||
|
||||
def run_os_startup_check_cleanup() -> None:
|
||||
"""Cleanup OS startup check."""
|
||||
if not CONTAINER_OS_STARTUP_CHECK.exists():
|
||||
return
|
||||
|
||||
def initialize_event_loop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import uvloop
|
||||
|
||||
uvloop.install()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return asyncio.get_event_loop()
|
||||
CONTAINER_OS_STARTUP_CHECK.unlink()
|
||||
except OSError as err:
|
||||
_LOGGER.warning("Not able to remove the startup health file: %s", err)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
@@ -27,37 +28,36 @@ if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
|
||||
# Init async event loop
|
||||
loop = initialize_event_loop()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Check if all information are available to setup Supervisor
|
||||
if not bootstrap.check_environment():
|
||||
sys.exit(1)
|
||||
bootstrap.check_environment()
|
||||
|
||||
# init executor pool
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Supervisor setup")
|
||||
_LOGGER.info("Initializing Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
_LOGGER.info("Setup Supervisor")
|
||||
# Signal health startup for container
|
||||
run_os_startup_check_cleanup()
|
||||
|
||||
_LOGGER.info("Setting up Supervisor")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, coresys)
|
||||
|
||||
try:
|
||||
_LOGGER.info("Run Supervisor")
|
||||
_LOGGER.info("Running Supervisor")
|
||||
loop.run_forever()
|
||||
finally:
|
||||
_LOGGER.info("Stopping Supervisor")
|
||||
loop.run_until_complete(coresys.core.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Supervisor")
|
||||
sys.exit(0)
|
||||
_LOGGER.info("Closing Supervisor")
|
||||
sys.exit(coresys.core.exit_code)
|
||||
|
@@ -5,17 +5,24 @@ import logging
|
||||
import tarfile
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from ..const import BOOT_AUTO, STATE_STARTED
|
||||
from ..const import AddonBoot, AddonStartup, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerAPIError,
|
||||
DockerError,
|
||||
DockerNotFound,
|
||||
HomeAssistantAPIError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils import check_exception_chain
|
||||
from .addon import Addon
|
||||
from .data import AddonsData
|
||||
|
||||
@@ -37,7 +44,7 @@ class AddonManager(CoreSysAttributes):
|
||||
@property
|
||||
def all(self) -> List[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
addons = {**self.store, **self.local}
|
||||
addons: Dict[str, AnyAddon] = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
@@ -45,7 +52,7 @@ class AddonManager(CoreSysAttributes):
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
def get(self, addon_slug: str) -> Optional[AnyAddon]:
|
||||
def get(self, addon_slug: str, local_only: bool = False) -> Optional[AnyAddon]:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
Prio:
|
||||
@@ -54,12 +61,14 @@ class AddonManager(CoreSysAttributes):
|
||||
"""
|
||||
if addon_slug in self.local:
|
||||
return self.local[addon_slug]
|
||||
return self.store.get(addon_slug)
|
||||
if not local_only:
|
||||
return self.store.get(addon_slug)
|
||||
return None
|
||||
|
||||
def from_token(self, token: str) -> Optional[Addon]:
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
if token == addon.hassio_token:
|
||||
if token == addon.supervisor_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
@@ -78,31 +87,70 @@ class AddonManager(CoreSysAttributes):
|
||||
# Sync DNS
|
||||
await self.sync_dns()
|
||||
|
||||
async def boot(self, stage: str) -> None:
|
||||
async def boot(self, stage: AddonStartup) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks = []
|
||||
tasks: List[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.boot != BOOT_AUTO or addon.startup != stage:
|
||||
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon.start())
|
||||
tasks.append(addon)
|
||||
|
||||
_LOGGER.info("Phase '%s' start %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
# Evaluate add-ons which need to be started
|
||||
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
async def shutdown(self, stage: str) -> None:
|
||||
# Start Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
for addon in tasks:
|
||||
try:
|
||||
await addon.start()
|
||||
except AddonsError as err:
|
||||
# Check if there is an system/user issue
|
||||
if check_exception_chain(
|
||||
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
||||
):
|
||||
addon.boot = AddonBoot.MANUAL
|
||||
addon.save_persist()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
self.sys_capture_exception(err)
|
||||
else:
|
||||
continue
|
||||
|
||||
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
||||
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage: AddonStartup) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks = []
|
||||
tasks: List[Addon] = []
|
||||
for addon in self.installed:
|
||||
if await addon.state() != STATE_STARTED or addon.startup != stage:
|
||||
if addon.state != AddonState.STARTED or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon.stop())
|
||||
tasks.append(addon)
|
||||
|
||||
_LOGGER.info("Phase '%s' stop %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
# Evaluate add-ons which need to be stopped
|
||||
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Stop Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
for addon in tasks:
|
||||
try:
|
||||
await addon.stop()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||
self.sys_capture_exception(err)
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
if slug in self.local:
|
||||
@@ -122,7 +170,9 @@ class AddonManager(CoreSysAttributes):
|
||||
addon = Addon(self.coresys, slug)
|
||||
|
||||
if not addon.path_data.is_dir():
|
||||
_LOGGER.info("Create Home Assistant add-on data folder %s", addon.path_data)
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", addon.path_data
|
||||
)
|
||||
addon.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
@@ -130,24 +180,31 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
try:
|
||||
await addon.instance.install(store.version, store.image)
|
||||
except DockerAPIError:
|
||||
except DockerError as err:
|
||||
self.data.uninstall(addon)
|
||||
raise AddonsError() from None
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.local[slug] = addon
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
# Reload ingress tokens
|
||||
if addon.with_ingress:
|
||||
await self.sys_ingress.reload()
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
async def uninstall(self, slug: str) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||
return
|
||||
addon = self.local.get(slug)
|
||||
addon = self.local[slug]
|
||||
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
addon.state = AddonState.UNKNOWN
|
||||
|
||||
await addon.remove_data()
|
||||
|
||||
@@ -166,6 +223,11 @@ class AddonManager(CoreSysAttributes):
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
# Cleanup Ingress dynamic port assignment
|
||||
if addon.with_ingress:
|
||||
self.sys_create_task(self.sys_ingress.reload())
|
||||
self.sys_ingress.del_dynamic_port(slug)
|
||||
|
||||
# Cleanup discovery data
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != addon.slug:
|
||||
@@ -183,17 +245,25 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(self, slug: str) -> None:
|
||||
"""Update add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
addon = self.local.get(slug)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
store = self.store.get(slug)
|
||||
store = self.store[slug]
|
||||
|
||||
if addon.version == store.version:
|
||||
_LOGGER.warning("No update available for add-on %s", slug)
|
||||
@@ -205,15 +275,15 @@ class AddonManager(CoreSysAttributes):
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# Update instance
|
||||
last_state = await addon.state()
|
||||
last_state: AddonState = addon.state
|
||||
try:
|
||||
await addon.instance.update(store.version, store.image)
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
with suppress(DockerError):
|
||||
await addon.instance.cleanup()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.data.update(store)
|
||||
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||
@@ -222,20 +292,28 @@ class AddonManager(CoreSysAttributes):
|
||||
await addon.install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
if last_state == AddonState.STARTED:
|
||||
await addon.start()
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
addon = self.local.get(slug)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
store = self.store.get(slug)
|
||||
store = self.store[slug]
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
@@ -246,20 +324,28 @@ class AddonManager(CoreSysAttributes):
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# remove docker container but not addon config
|
||||
last_state = await addon.state()
|
||||
last_state: AddonState = addon.state
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
await addon.instance.install(addon.version)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.data.update(store)
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
if last_state == AddonState.STARTED:
|
||||
await addon.start()
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
if slug not in self.local:
|
||||
@@ -278,9 +364,11 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
# Update ingress
|
||||
if addon.with_ingress:
|
||||
await self.sys_ingress.reload()
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
|
||||
async def repair(self) -> None:
|
||||
"""Repair local add-ons."""
|
||||
needs_repair: List[Addon] = []
|
||||
@@ -296,12 +384,8 @@ class AddonManager(CoreSysAttributes):
|
||||
return
|
||||
|
||||
for addon in needs_repair:
|
||||
_LOGGER.info("Start repair for add-on: %s", addon.slug)
|
||||
await self.sys_run_in_executor(
|
||||
self.sys_docker.network.stale_cleanup, addon.instance.name
|
||||
)
|
||||
|
||||
with suppress(DockerAPIError, KeyError):
|
||||
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
||||
with suppress(DockerError, KeyError):
|
||||
# Need pull a image again
|
||||
if not addon.need_build:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
@@ -323,12 +407,23 @@ class AddonManager(CoreSysAttributes):
|
||||
"""Sync add-ons DNS names."""
|
||||
# Update hosts
|
||||
for addon in self.installed:
|
||||
if not await addon.instance.is_running():
|
||||
continue
|
||||
self.sys_dns.add_host(
|
||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||
)
|
||||
try:
|
||||
if not await addon.instance.is_running():
|
||||
continue
|
||||
except DockerError as err:
|
||||
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_DOCKER,
|
||||
ContextType.ADDON,
|
||||
reference=addon.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
self.sys_capture_exception(err)
|
||||
else:
|
||||
self.sys_plugins.dns.add_host(
|
||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||
)
|
||||
|
||||
# Write hosts files
|
||||
with suppress(CoreDNSError):
|
||||
self.sys_dns.write_hosts()
|
||||
self.sys_plugins.dns.write_hosts()
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
from ipaddress import IPv4Address
|
||||
@@ -9,8 +10,9 @@ import secrets
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
from typing import Any, Awaitable, Dict, List, Optional, Set
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -35,26 +37,33 @@ from ..const import (
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_WATCHDOG,
|
||||
DNS_SUFFIX,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
AddonBoot,
|
||||
AddonStartup,
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsNotSupportedError,
|
||||
DockerAPIError,
|
||||
DockerError,
|
||||
DockerRequestError,
|
||||
HostAppArmorError,
|
||||
JsonFileError,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from ..utils import check_port
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from ..utils.tar import exclude_filter, secure_path
|
||||
from ..utils.tar import atomic_contents_add, secure_path
|
||||
from .model import AddonModel, Data
|
||||
from .options import AddonOptions
|
||||
from .utils import remove_data
|
||||
from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options
|
||||
from .validate import SCHEMA_ADDON_SNAPSHOT
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -63,24 +72,48 @@ RE_WEBUI = re.compile(
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$"
|
||||
)
|
||||
|
||||
RE_WATCHDOG = re.compile(
|
||||
r"^(?:(?P<s_prefix>https?|tcp)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$"
|
||||
)
|
||||
|
||||
RE_OLD_AUDIO = re.compile(r"\d+,\d+")
|
||||
|
||||
WATCHDOG_TIMEOUT = aiohttp.ClientTimeout(total=10)
|
||||
|
||||
|
||||
class Addon(AddonModel):
|
||||
"""Hold data for add-on inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
super().__init__(coresys, slug)
|
||||
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||
self.slug: str = slug
|
||||
self.state: AddonState = AddonState.UNKNOWN
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return internal representation."""
|
||||
return f"<Addon: {self.slug}>"
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool:
|
||||
"""Return True if a task is in progress."""
|
||||
return self.instance.in_progress
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.attach(tag=self.version)
|
||||
with suppress(DockerError):
|
||||
await self.instance.attach(version=self.version)
|
||||
|
||||
# Evaluate state
|
||||
if await self.instance.is_running():
|
||||
self.state = AddonState.STARTED
|
||||
else:
|
||||
self.state = AddonState.STOPPED
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP of Add-on instance."""
|
||||
"""Return IP of add-on instance."""
|
||||
return self.instance.ip_address
|
||||
|
||||
@property
|
||||
@@ -118,6 +151,13 @@ class Addon(AddonModel):
|
||||
"""Return installed version."""
|
||||
return self.persist[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def need_update(self) -> bool:
|
||||
"""Return True if an update is available."""
|
||||
if self.is_detached:
|
||||
return False
|
||||
return self.version != self.latest_version
|
||||
|
||||
@property
|
||||
def dns(self) -> List[str]:
|
||||
"""Return list of DNS name for that add-on."""
|
||||
@@ -129,20 +169,17 @@ class Addon(AddonModel):
|
||||
return {**self.data[ATTR_OPTIONS], **self.persist[ATTR_OPTIONS]}
|
||||
|
||||
@options.setter
|
||||
def options(self, value: Optional[Dict[str, Any]]):
|
||||
def options(self, value: Optional[Dict[str, Any]]) -> None:
|
||||
"""Store user add-on options."""
|
||||
if value is None:
|
||||
self.persist[ATTR_OPTIONS] = {}
|
||||
else:
|
||||
self.persist[ATTR_OPTIONS] = deepcopy(value)
|
||||
self.persist[ATTR_OPTIONS] = {} if value is None else deepcopy(value)
|
||||
|
||||
@property
|
||||
def boot(self) -> bool:
|
||||
def boot(self) -> AddonBoot:
|
||||
"""Return boot config with prio local settings."""
|
||||
return self.persist.get(ATTR_BOOT, super().boot)
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value: bool):
|
||||
def boot(self, value: AddonBoot) -> None:
|
||||
"""Store user boot options."""
|
||||
self.persist[ATTR_BOOT] = value
|
||||
|
||||
@@ -152,17 +189,32 @@ class Addon(AddonModel):
|
||||
return self.persist.get(ATTR_AUTO_UPDATE, super().auto_update)
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value: bool):
|
||||
def auto_update(self, value: bool) -> None:
|
||||
"""Set auto update."""
|
||||
self.persist[ATTR_AUTO_UPDATE] = value
|
||||
|
||||
@property
|
||||
def watchdog(self) -> bool:
|
||||
"""Return True if watchdog is enable."""
|
||||
return self.persist[ATTR_WATCHDOG]
|
||||
|
||||
@watchdog.setter
|
||||
def watchdog(self, value: bool) -> None:
|
||||
"""Set watchdog enable/disable."""
|
||||
if value and self.startup == AddonStartup.ONCE:
|
||||
_LOGGER.warning(
|
||||
"Ignoring watchdog for %s because startup type is 'once'", self.slug
|
||||
)
|
||||
else:
|
||||
self.persist[ATTR_WATCHDOG] = value
|
||||
|
||||
@property
|
||||
def uuid(self) -> str:
|
||||
"""Return an API token for this add-on."""
|
||||
return self.persist[ATTR_UUID]
|
||||
|
||||
@property
|
||||
def hassio_token(self) -> Optional[str]:
|
||||
def supervisor_token(self) -> Optional[str]:
|
||||
"""Return access token for Supervisor API."""
|
||||
return self.persist.get(ATTR_ACCESS_TOKEN)
|
||||
|
||||
@@ -189,7 +241,7 @@ class Addon(AddonModel):
|
||||
return self.persist[ATTR_PROTECTED]
|
||||
|
||||
@protected.setter
|
||||
def protected(self, value: bool):
|
||||
def protected(self, value: bool) -> None:
|
||||
"""Set add-on in protected mode."""
|
||||
self.persist[ATTR_PROTECTED] = value
|
||||
|
||||
@@ -199,7 +251,7 @@ class Addon(AddonModel):
|
||||
return self.persist.get(ATTR_NETWORK, super().ports)
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value: Optional[Dict[str, Optional[int]]]):
|
||||
def ports(self, value: Optional[Dict[str, Optional[int]]]) -> None:
|
||||
"""Set custom ports of add-on."""
|
||||
if value is None:
|
||||
self.persist.pop(ATTR_NETWORK, None)
|
||||
@@ -244,10 +296,6 @@ class Addon(AddonModel):
|
||||
else:
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
|
||||
# for interface config or port lists
|
||||
if isinstance(port, (tuple, list)):
|
||||
port = port[-1]
|
||||
|
||||
# lookup the correct protocol from config
|
||||
if t_proto:
|
||||
proto = "https" if self.options.get(t_proto) else "http"
|
||||
@@ -273,7 +321,7 @@ class Addon(AddonModel):
|
||||
return self.persist[ATTR_INGRESS_PANEL]
|
||||
|
||||
@ingress_panel.setter
|
||||
def ingress_panel(self, value: bool):
|
||||
def ingress_panel(self, value: bool) -> None:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
self.persist[ATTR_INGRESS_PANEL] = value
|
||||
|
||||
@@ -282,86 +330,145 @@ class Addon(AddonModel):
|
||||
"""Return a pulse profile for output or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
return self.persist.get(ATTR_AUDIO_OUTPUT)
|
||||
|
||||
# Fallback with old audio settings
|
||||
# Remove after 210
|
||||
output_data = self.persist.get(ATTR_AUDIO_OUTPUT)
|
||||
if output_data and RE_OLD_AUDIO.fullmatch(output_data):
|
||||
return None
|
||||
return output_data
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value: Optional[str]):
|
||||
"""Set/reset audio output profile settings."""
|
||||
if value is None:
|
||||
self.persist.pop(ATTR_AUDIO_OUTPUT, None)
|
||||
else:
|
||||
self.persist[ATTR_AUDIO_OUTPUT] = value
|
||||
"""Set audio output profile settings."""
|
||||
self.persist[ATTR_AUDIO_OUTPUT] = value
|
||||
|
||||
@property
|
||||
def audio_input(self) -> Optional[str]:
|
||||
"""Return pulse profile for input or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
return self.persist.get(ATTR_AUDIO_INPUT)
|
||||
|
||||
# Fallback with old audio settings
|
||||
# Remove after 210
|
||||
input_data = self.persist.get(ATTR_AUDIO_INPUT)
|
||||
if input_data and RE_OLD_AUDIO.fullmatch(input_data):
|
||||
return None
|
||||
return input_data
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value: Optional[str]):
|
||||
"""Set/reset audio input settings."""
|
||||
if value is None:
|
||||
self.persist.pop(ATTR_AUDIO_INPUT, None)
|
||||
else:
|
||||
self.persist[ATTR_AUDIO_INPUT] = value
|
||||
def audio_input(self, value: Optional[str]) -> None:
|
||||
"""Set audio input settings."""
|
||||
self.persist[ATTR_AUDIO_INPUT] = value
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
def image(self) -> Optional[str]:
|
||||
"""Return image name of add-on."""
|
||||
return self.persist.get(ATTR_IMAGE)
|
||||
|
||||
@property
|
||||
def need_build(self):
|
||||
def need_build(self) -> bool:
|
||||
"""Return True if this add-on need a local build."""
|
||||
return ATTR_IMAGE not in self.data
|
||||
|
||||
@property
|
||||
def path_data(self):
|
||||
def path_data(self) -> Path:
|
||||
"""Return add-on data path inside Supervisor."""
|
||||
return Path(self.sys_config.path_addons_data, self.slug)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
def path_extern_data(self) -> PurePath:
|
||||
"""Return add-on data path external for Docker."""
|
||||
return PurePath(self.sys_config.path_extern_addons_data, self.slug)
|
||||
|
||||
@property
|
||||
def path_options(self):
|
||||
def path_options(self) -> Path:
|
||||
"""Return path to add-on options."""
|
||||
return Path(self.path_data, "options.json")
|
||||
|
||||
@property
|
||||
def path_pulse(self):
|
||||
def path_pulse(self) -> Path:
|
||||
"""Return path to asound config."""
|
||||
return Path(self.sys_config.path_tmp, f"{self.slug}_pulse")
|
||||
|
||||
@property
|
||||
def path_extern_pulse(self):
|
||||
def path_extern_pulse(self) -> Path:
|
||||
"""Return path to asound config for Docker."""
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_pulse")
|
||||
|
||||
def save_persist(self):
|
||||
@property
|
||||
def devices(self) -> Set[Device]:
|
||||
"""Create a schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
if isinstance(raw_schema, bool) or not raw_schema:
|
||||
return set()
|
||||
|
||||
# Validate devices
|
||||
options_validator = AddonOptions(self.coresys, raw_schema)
|
||||
with suppress(vol.Invalid):
|
||||
options_validator(self.options)
|
||||
|
||||
return options_validator.devices
|
||||
|
||||
def save_persist(self) -> None:
|
||||
"""Save data of add-on."""
|
||||
self.sys_addons.data.save_data()
|
||||
|
||||
async def write_options(self):
|
||||
"""Return True if add-on options is written to data."""
|
||||
schema = self.schema
|
||||
options = self.options
|
||||
async def watchdog_application(self) -> bool:
|
||||
"""Return True if application is running."""
|
||||
url = super().watchdog
|
||||
if not url:
|
||||
return True
|
||||
application = RE_WATCHDOG.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = application.group("t_port")
|
||||
t_proto = application.group("t_proto")
|
||||
s_prefix = application.group("s_prefix") or ""
|
||||
s_suffix = application.group("s_suffix") or ""
|
||||
|
||||
# search host port for this docker port
|
||||
if self.host_network:
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
else:
|
||||
port = t_port
|
||||
|
||||
# TCP monitoring
|
||||
if s_prefix == "tcp":
|
||||
return await self.sys_run_in_executor(check_port, self.ip_address, port)
|
||||
|
||||
# lookup the correct protocol from config
|
||||
if t_proto:
|
||||
proto = "https" if self.options.get(t_proto) else "http"
|
||||
else:
|
||||
proto = s_prefix
|
||||
|
||||
# Make HTTP request
|
||||
try:
|
||||
url = f"{proto}://{self.ip_address}:{port}{s_suffix}"
|
||||
async with self.sys_websession_ssl.get(
|
||||
url, timeout=WATCHDOG_TIMEOUT
|
||||
) as req:
|
||||
if req.status < 300:
|
||||
return True
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
async def write_options(self) -> None:
|
||||
"""Return True if add-on options is written to data."""
|
||||
# Update secrets for validation
|
||||
await self.sys_secrets.reload()
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
|
||||
try:
|
||||
options = schema(options)
|
||||
options = self.schema(self.options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
"Add-on %s have wrong options: %s",
|
||||
"Add-on %s has invalid options: %s",
|
||||
self.slug,
|
||||
humanize_error(options, ex),
|
||||
humanize_error(self.options, ex),
|
||||
)
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Add-on %s can't write options", self.slug)
|
||||
@@ -369,22 +476,27 @@ class Addon(AddonModel):
|
||||
_LOGGER.debug("Add-on %s write options: %s", self.slug, options)
|
||||
return
|
||||
|
||||
raise AddonsError()
|
||||
raise AddonConfigurationError()
|
||||
|
||||
async def remove_data(self):
|
||||
async def remove_data(self) -> None:
|
||||
"""Remove add-on data."""
|
||||
if not self.path_data.is_dir():
|
||||
return
|
||||
|
||||
_LOGGER.info("Remove add-on data folder %s", self.path_data)
|
||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||
await remove_data(self.path_data)
|
||||
|
||||
def write_pulse(self):
|
||||
def write_pulse(self) -> None:
|
||||
"""Write asound config to file and return True on success."""
|
||||
pulse_config = self.sys_audio.pulse_client(
|
||||
pulse_config = self.sys_plugins.audio.pulse_client(
|
||||
input_profile=self.audio_input, output_profile=self.audio_output
|
||||
)
|
||||
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
|
||||
# Write pulse config
|
||||
try:
|
||||
with self.path_pulse.open("w") as config_file:
|
||||
config_file.write(pulse_config)
|
||||
@@ -392,11 +504,10 @@ class Addon(AddonModel):
|
||||
_LOGGER.error(
|
||||
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
||||
)
|
||||
raise AddonsError()
|
||||
|
||||
_LOGGER.debug(
|
||||
"Add-on %s write pulse/client.config: %s", self.slug, self.path_pulse
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Add-on %s write pulse/client.config: %s", self.slug, self.path_pulse
|
||||
)
|
||||
|
||||
async def install_apparmor(self) -> None:
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
@@ -440,7 +551,7 @@ class Addon(AddonModel):
|
||||
|
||||
# create voluptuous
|
||||
new_schema = vol.Schema(
|
||||
vol.All(dict, validate_options(self.coresys, new_raw_schema))
|
||||
vol.All(dict, AddonOptions(self.coresys, new_raw_schema))
|
||||
)
|
||||
|
||||
# validate
|
||||
@@ -451,16 +562,10 @@ class Addon(AddonModel):
|
||||
return False
|
||||
return True
|
||||
|
||||
async def state(self) -> str:
|
||||
"""Return running state of add-on."""
|
||||
if await self.instance.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Set options and start add-on."""
|
||||
if await self.instance.is_running():
|
||||
_LOGGER.warning("%s already running!", self.slug)
|
||||
_LOGGER.warning("%s is already running!", self.slug)
|
||||
return
|
||||
|
||||
# Access Token
|
||||
@@ -477,15 +582,26 @@ class Addon(AddonModel):
|
||||
# Start Add-on
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
except DockerRequestError as err:
|
||||
self.state = AddonState.STOPPED
|
||||
raise AddonsError() from err
|
||||
except DockerError as err:
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.state = AddonState.STARTED
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop add-on."""
|
||||
try:
|
||||
return await self.instance.stop()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
await self.instance.stop()
|
||||
except DockerRequestError as err:
|
||||
raise AddonsError() from err
|
||||
except DockerError as err:
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.state = AddonState.STOPPED
|
||||
|
||||
async def restart(self) -> None:
|
||||
"""Restart add-on."""
|
||||
@@ -500,80 +616,91 @@ class Addon(AddonModel):
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
def is_running(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_running()
|
||||
|
||||
async def stats(self) -> DockerStats:
|
||||
"""Return stats of container."""
|
||||
try:
|
||||
return await self.instance.stats()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
async def write_stdin(self, data):
|
||||
async def write_stdin(self, data) -> None:
|
||||
"""Write data to add-on stdin.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if not self.with_stdin:
|
||||
_LOGGER.error("Add-on don't support write to stdin!")
|
||||
_LOGGER.error("Add-on %s does not support writing to stdin!", self.slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
try:
|
||||
return await self.instance.write_stdin(data)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
async def snapshot(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Snapshot state of an add-on."""
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
temp_path = Path(temp)
|
||||
|
||||
# store local image
|
||||
if self.need_build:
|
||||
try:
|
||||
await self.instance.export_image(Path(temp, "image.tar"))
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
await self.instance.export_image(temp_path.joinpath("image.tar"))
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.persist,
|
||||
ATTR_SYSTEM: self.data,
|
||||
ATTR_VERSION: self.version,
|
||||
ATTR_STATE: await self.state(),
|
||||
ATTR_STATE: self.state,
|
||||
}
|
||||
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(Path(temp, "addon.json"), data)
|
||||
except JsonFileError:
|
||||
write_json_file(temp_path.joinpath("addon.json"), data)
|
||||
except JsonFileError as err:
|
||||
_LOGGER.error("Can't save meta for %s", self.slug)
|
||||
raise AddonsError() from None
|
||||
raise AddonsError() from err
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
profile = Path(temp, "apparmor.txt")
|
||||
profile = temp_path.joinpath("apparmor.txt")
|
||||
try:
|
||||
self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError:
|
||||
except HostAppArmorError as err:
|
||||
_LOGGER.error("Can't backup AppArmor profile")
|
||||
raise AddonsError() from None
|
||||
raise AddonsError() from err
|
||||
|
||||
# write into tarfile
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
with tar_file as snapshot:
|
||||
# Snapshot system
|
||||
|
||||
snapshot.add(temp, arcname=".")
|
||||
|
||||
# Snapshot data
|
||||
snapshot.add(
|
||||
atomic_contents_add(
|
||||
snapshot,
|
||||
self.path_data,
|
||||
excludes=self.snapshot_exclude,
|
||||
arcname="data",
|
||||
filter=exclude_filter(self.snapshot_exclude),
|
||||
)
|
||||
|
||||
try:
|
||||
_LOGGER.info("Build snapshot for add-on %s", self.slug)
|
||||
_LOGGER.info("Building snapshot for add-on %s", self.slug)
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||
raise AddonsError() from None
|
||||
raise AddonsError() from err
|
||||
|
||||
_LOGGER.info("Finish snapshot for addon %s", self.slug)
|
||||
|
||||
@@ -590,13 +717,13 @@ class Addon(AddonModel):
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||
raise AddonsError() from None
|
||||
raise AddonsError() from err
|
||||
|
||||
# Read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except JsonFileError:
|
||||
raise AddonsError() from None
|
||||
except JsonFileError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
# Validate
|
||||
try:
|
||||
@@ -607,14 +734,14 @@ class Addon(AddonModel):
|
||||
self.slug,
|
||||
humanize_error(data, err),
|
||||
)
|
||||
raise AddonsError() from None
|
||||
raise AddonsError() from err
|
||||
|
||||
# If available
|
||||
if not self._available(data[ATTR_SYSTEM]):
|
||||
_LOGGER.error("Add-on %s is not available for this Platform", self.slug)
|
||||
_LOGGER.error("Add-on %s is not available for this platform", self.slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# Restore local add-on informations
|
||||
# Restore local add-on information
|
||||
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||
restore_image = self._image(data[ATTR_SYSTEM])
|
||||
self.sys_addons.data.restore(
|
||||
@@ -624,49 +751,51 @@ class Addon(AddonModel):
|
||||
# Check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore/Install image for addon %s", self.slug)
|
||||
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
||||
|
||||
image_file = Path(temp, "image.tar")
|
||||
if image_file.is_file():
|
||||
with suppress(DockerAPIError):
|
||||
with suppress(DockerError):
|
||||
await self.instance.import_image(image_file)
|
||||
else:
|
||||
with suppress(DockerAPIError):
|
||||
with suppress(DockerError):
|
||||
await self.instance.install(version, restore_image)
|
||||
await self.instance.cleanup()
|
||||
elif self.instance.version != version or self.legacy:
|
||||
_LOGGER.info("Restore/Update image for addon %s", self.slug)
|
||||
with suppress(DockerAPIError):
|
||||
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
||||
with suppress(DockerError):
|
||||
await self.instance.update(version, restore_image)
|
||||
else:
|
||||
with suppress(DockerAPIError):
|
||||
with suppress(DockerError):
|
||||
await self.instance.stop()
|
||||
|
||||
# Restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
shutil.copytree(Path(temp, "data"), self.path_data)
|
||||
shutil.copytree(Path(temp, "data"), self.path_data, symlinks=True)
|
||||
|
||||
_LOGGER.info("Restore data for addon %s", self.slug)
|
||||
_LOGGER.info("Restoring data for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
await remove_data(self.path_data)
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data: %s", err)
|
||||
raise AddonsError() from None
|
||||
raise AddonsError() from err
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(temp, "apparmor.txt")
|
||||
if profile_file.exists():
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't restore AppArmor profile")
|
||||
raise AddonsError() from None
|
||||
except HostAppArmorError as err:
|
||||
_LOGGER.error(
|
||||
"Can't restore AppArmor profile for add-on %s", self.slug
|
||||
)
|
||||
raise AddonsError() from err
|
||||
|
||||
# Run add-on
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
if data[ATTR_STATE] == AddonState.STARTED:
|
||||
return await self.start()
|
||||
|
||||
_LOGGER.info("Finish restore for add-on %s", self.slug)
|
||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||
|
@@ -1,8 +1,11 @@
|
||||
"""Supervisor add-on build environment."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
@@ -30,7 +33,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
"""Base images for this add-on."""
|
||||
"""Return base image for this add-on."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
|
||||
)
|
||||
@@ -45,11 +48,21 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Return additional Docker build arguments."""
|
||||
return self._data[ATTR_ARGS]
|
||||
|
||||
def get_docker_args(self, version):
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
"""Return true if the build env is valid."""
|
||||
return all(
|
||||
[
|
||||
self.addon.path_location.is_dir(),
|
||||
Path(self.addon.path_location, "Dockerfile").is_file(),
|
||||
]
|
||||
)
|
||||
|
||||
def get_docker_args(self, version: AwesomeVersion):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{self.addon.image}:{version}",
|
||||
"tag": f"{self.addon.image}:{version!s}",
|
||||
"pull": True,
|
||||
"forcerm": True,
|
||||
"squash": self.squash,
|
||||
|
@@ -12,8 +12,8 @@ from ..const import (
|
||||
FILE_HASSIO_ADDONS,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils.json import JsonConfig
|
||||
from .addon import Addon
|
||||
from .validate import SCHEMA_ADDONS_FILE
|
||||
|
||||
|
@@ -1,8 +1,9 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
from packaging import version as pkg_version
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
@@ -11,7 +12,6 @@ from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_BOOT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
@@ -31,6 +31,7 @@ from ..const import (
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INIT,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
@@ -54,41 +55,50 @@ from ..const import (
|
||||
ATTR_STDIN,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_VERSION,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_WEBUI,
|
||||
SECURITY_DEFAULT,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
AddonStages,
|
||||
AddonBoot,
|
||||
AddonStage,
|
||||
AddonStartup,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .validate import RE_SERVICE, RE_VOLUME, schema_ui_options, validate_options
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .options import AddonOptions, UiOptions
|
||||
from .validate import RE_SERVICE, RE_VOLUME
|
||||
|
||||
Data = Dict[str, Any]
|
||||
|
||||
|
||||
class AddonModel(CoreSysAttributes):
|
||||
class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Add-on Data layout."""
|
||||
|
||||
slug: str = None
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.slug: str = slug
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def data(self) -> Data:
|
||||
"""Return Add-on config/data."""
|
||||
raise NotImplementedError()
|
||||
"""Return add-on config/data."""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def is_installed(self) -> bool:
|
||||
"""Return True if an add-on is installed."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def is_detached(self) -> bool:
|
||||
"""Return True if add-on is detached."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
@@ -101,7 +111,7 @@ class AddonModel(CoreSysAttributes):
|
||||
return self.data[ATTR_OPTIONS]
|
||||
|
||||
@property
|
||||
def boot(self) -> bool:
|
||||
def boot(self) -> AddonBoot:
|
||||
"""Return boot config with prio local settings."""
|
||||
return self.data[ATTR_BOOT]
|
||||
|
||||
@@ -136,7 +146,7 @@ class AddonModel(CoreSysAttributes):
|
||||
return None
|
||||
|
||||
@property
|
||||
def hassio_token(self) -> Optional[str]:
|
||||
def supervisor_token(self) -> Optional[str]:
|
||||
"""Return access token for Supervisor API."""
|
||||
return None
|
||||
|
||||
@@ -174,12 +184,12 @@ class AddonModel(CoreSysAttributes):
|
||||
return self.data[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
def latest_version(self) -> AwesomeVersion:
|
||||
"""Return latest version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def version(self) -> str:
|
||||
def version(self) -> AwesomeVersion:
|
||||
"""Return version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@@ -189,9 +199,9 @@ class AddonModel(CoreSysAttributes):
|
||||
return True
|
||||
|
||||
@property
|
||||
def startup(self) -> Optional[str]:
|
||||
def startup(self) -> AddonStartup:
|
||||
"""Return startup type of add-on."""
|
||||
return self.data.get(ATTR_STARTUP)
|
||||
return self.data[ATTR_STARTUP]
|
||||
|
||||
@property
|
||||
def advanced(self) -> bool:
|
||||
@@ -199,7 +209,7 @@ class AddonModel(CoreSysAttributes):
|
||||
return self.data[ATTR_ADVANCED]
|
||||
|
||||
@property
|
||||
def stage(self) -> AddonStages:
|
||||
def stage(self) -> AddonStage:
|
||||
"""Return stage mode of add-on."""
|
||||
return self.data[ATTR_STAGE]
|
||||
|
||||
@@ -211,7 +221,8 @@ class AddonModel(CoreSysAttributes):
|
||||
services = {}
|
||||
for data in services_list:
|
||||
service = RE_SERVICE.match(data)
|
||||
services[service.group("service")] = service.group("rights")
|
||||
if service:
|
||||
services[service.group("service")] = service.group("rights")
|
||||
|
||||
return services
|
||||
|
||||
@@ -240,6 +251,11 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return URL to webui or None."""
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def watchdog(self) -> Optional[str]:
|
||||
"""Return URL to for watchdog or None."""
|
||||
return self.data.get(ATTR_WATCHDOG)
|
||||
|
||||
@property
|
||||
def ingress_port(self) -> Optional[int]:
|
||||
"""Return Ingress port."""
|
||||
@@ -281,19 +297,9 @@ class AddonModel(CoreSysAttributes):
|
||||
return self.data[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self) -> Optional[List[str]]:
|
||||
"""Return devices of add-on."""
|
||||
return self.data.get(ATTR_DEVICES, [])
|
||||
|
||||
@property
|
||||
def auto_uart(self) -> bool:
|
||||
"""Return True if we should map all UART device."""
|
||||
return self.data[ATTR_AUTO_UART]
|
||||
|
||||
@property
|
||||
def tmpfs(self) -> Optional[str]:
|
||||
"""Return tmpfs of add-on."""
|
||||
return self.data.get(ATTR_TMPFS)
|
||||
def static_devices(self) -> List[Path]:
|
||||
"""Return static devices of add-on."""
|
||||
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
|
||||
|
||||
@property
|
||||
def environment(self) -> Optional[Dict[str, str]]:
|
||||
@@ -344,6 +350,11 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return Exclude list for snapshot."""
|
||||
return self.data.get(ATTR_SNAPSHOT_EXCLUDE, [])
|
||||
|
||||
@property
|
||||
def default_init(self) -> bool:
|
||||
"""Return True if the add-on have no own init."""
|
||||
return self.data[ATTR_INIT]
|
||||
|
||||
@property
|
||||
def with_stdin(self) -> bool:
|
||||
"""Return True if the add-on access use stdin input."""
|
||||
@@ -364,6 +375,16 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return True if the add-on access to GPIO interface."""
|
||||
return self.data[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_usb(self) -> bool:
|
||||
"""Return True if the add-on need USB access."""
|
||||
return self.data[ATTR_USB]
|
||||
|
||||
@property
|
||||
def with_uart(self) -> bool:
|
||||
"""Return True if we should map all UART device."""
|
||||
return self.data[ATTR_UART]
|
||||
|
||||
@property
|
||||
def with_udev(self) -> bool:
|
||||
"""Return True if the add-on have his own udev."""
|
||||
@@ -384,6 +405,11 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self.data[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def with_tmpfs(self) -> Optional[str]:
|
||||
"""Return if tmp is in memory of add-on."""
|
||||
return self.data[ATTR_TMPFS]
|
||||
|
||||
@property
|
||||
def access_auth_api(self) -> bool:
|
||||
"""Return True if the add-on access to login/auth backend."""
|
||||
@@ -440,7 +466,7 @@ class AddonModel(CoreSysAttributes):
|
||||
return self.data.get(ATTR_MACHINE, [])
|
||||
|
||||
@property
|
||||
def image(self) -> str:
|
||||
def image(self) -> Optional[str]:
|
||||
"""Generate image name from data."""
|
||||
return self._image(self.data)
|
||||
|
||||
@@ -455,6 +481,8 @@ class AddonModel(CoreSysAttributes):
|
||||
volumes = {}
|
||||
for volume in self.data[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
if not result:
|
||||
continue
|
||||
volumes[result.group(1)] = result.group(2) or "ro"
|
||||
|
||||
return volumes
|
||||
@@ -495,8 +523,8 @@ class AddonModel(CoreSysAttributes):
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(self.coresys, raw_schema)))
|
||||
raw_schema = {}
|
||||
return vol.Schema(vol.All(dict, AddonOptions(self.coresys, raw_schema)))
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> Optional[List[Dict[str, Any]]]:
|
||||
@@ -505,7 +533,7 @@ class AddonModel(CoreSysAttributes):
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return None
|
||||
return schema_ui_options(raw_schema)
|
||||
return UiOptions(self.coresys)(raw_schema)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
@@ -521,17 +549,17 @@ class AddonModel(CoreSysAttributes):
|
||||
|
||||
# Machine / Hardware
|
||||
machine = config.get(ATTR_MACHINE)
|
||||
if machine and self.sys_machine not in machine:
|
||||
if machine and f"!{self.sys_machine}" in machine:
|
||||
return False
|
||||
elif machine and self.sys_machine not in machine:
|
||||
return False
|
||||
|
||||
# Home Assistant
|
||||
version = config.get(ATTR_HOMEASSISTANT) or self.sys_homeassistant.version
|
||||
if pkg_version.parse(self.sys_homeassistant.version) < pkg_version.parse(
|
||||
version
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
version: Optional[AwesomeVersion] = config.get(ATTR_HOMEASSISTANT)
|
||||
try:
|
||||
return self.sys_homeassistant.version >= version
|
||||
except (AwesomeVersionException, TypeError):
|
||||
return True
|
||||
|
||||
def _image(self, config) -> str:
|
||||
"""Generate image name from data."""
|
||||
|
381
supervisor/addons/options.py
Normal file
381
supervisor/addons/options.py
Normal file
@@ -0,0 +1,381 @@
|
||||
"""Add-on Options / UI rendering."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import Any, Dict, List, Set, Union
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import HardwareNotFound
|
||||
from ..hardware.const import UdevSubsystem
|
||||
from ..hardware.data import Device
|
||||
from ..validate import network_port
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
_STR = "str"
|
||||
_INT = "int"
|
||||
_FLOAT = "float"
|
||||
_BOOL = "bool"
|
||||
_PASSWORD = "password"
|
||||
_EMAIL = "email"
|
||||
_URL = "url"
|
||||
_PORT = "port"
|
||||
_MATCH = "match"
|
||||
_LIST = "list"
|
||||
_DEVICE = "device"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool"
|
||||
r"|email"
|
||||
r"|url"
|
||||
r"|port"
|
||||
r"|device(?:\((?P<filter>subsystem=[a-z]+)\))?"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r"|list\((?P<list>.+)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
|
||||
class AddonOptions(CoreSysAttributes):
|
||||
"""Validate Add-ons Options."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coresys: CoreSys,
|
||||
raw_schema: Dict[str, Any],
|
||||
):
|
||||
"""Validate schema."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.raw_schema: Dict[str, Any] = raw_schema
|
||||
self.devices: Set[Device] = set()
|
||||
|
||||
def __call__(self, struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in self.raw_schema:
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = self.raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = self._nested_validate_list(typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = self._nested_validate_dict(typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = self._single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
self._check_missing_options(self.raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(self, typ: str, value: Any, key: str):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'") from None
|
||||
|
||||
# Lookup secret
|
||||
if str(value).startswith("!secret "):
|
||||
secret: str = value.partition(" ")[2]
|
||||
value = self.sys_homeassistant.secrets.get(secret)
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Unknown secret {secret}") from None
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
if not match:
|
||||
raise vol.Invalid(f"Unknown type {typ}") from None
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(_STR) or typ.startswith(_PASSWORD):
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(_PORT):
|
||||
return network_port(value)
|
||||
elif typ.startswith(_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(_LIST):
|
||||
return vol.In(match.group("list").split("|"))(str(value))
|
||||
elif typ.startswith(_DEVICE):
|
||||
try:
|
||||
device = self.sys_hardware.get_by_path(Path(value))
|
||||
except HardwareNotFound:
|
||||
raise vol.Invalid(f"Device {value} does not exists!") from None
|
||||
|
||||
# Have filter
|
||||
if match.group("filter"):
|
||||
str_filter = match.group("filter")
|
||||
device_filter = _create_device_filter(str_filter)
|
||||
if device not in self.sys_hardware.filter_devices(**device_filter):
|
||||
raise vol.Invalid(
|
||||
f"Device {value} don't match the filter {str_filter}!"
|
||||
)
|
||||
|
||||
# Device valid
|
||||
self.devices.add(device)
|
||||
return str(device.path)
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}") from None
|
||||
|
||||
def _nested_validate_list(self, typ: Any, data_list: List[Any], key: str):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
# Make sure it is a list
|
||||
if not isinstance(data_list, list):
|
||||
raise vol.Invalid(f"Invalid list for {key}") from None
|
||||
|
||||
# Process list
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = self._nested_validate_dict(typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(self._single_validate(typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
def _nested_validate_dict(
|
||||
self, typ: Dict[Any, Any], data_dict: Dict[Any, Any], key: str
|
||||
):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
# Make sure it is a dict
|
||||
if not isinstance(data_dict, dict):
|
||||
raise vol.Invalid(f"Invalid dict for {key}") from None
|
||||
|
||||
# Process dict
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = self._nested_validate_list(
|
||||
typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = self._single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
self._check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
def _check_missing_options(
|
||||
self, origin: Dict[Any, Any], exists: Dict[Any, Any], root: str
|
||||
) -> None:
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}") from None
|
||||
|
||||
|
||||
class UiOptions(CoreSysAttributes):
|
||||
"""Render UI Add-ons Options."""
|
||||
|
||||
def __init__(self, coresys: CoreSys) -> None:
|
||||
"""Initialize UI option render."""
|
||||
self.coresys = coresys
|
||||
|
||||
def __call__(self, raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema: List[Dict[str, Any]] = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
self._nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
self._nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
self._single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
def _single_ui_option(
|
||||
self,
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
value: str,
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node: Dict[str, Union[str, bool, float, List[str]]] = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
if not match:
|
||||
return
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
elif value.startswith(_DEVICE):
|
||||
ui_node["type"] = "select"
|
||||
|
||||
# Have filter
|
||||
if match.group("filter"):
|
||||
device_filter = _create_device_filter(match.group("filter"))
|
||||
ui_node["options"] = [
|
||||
(device.by_id or device.path).as_posix()
|
||||
for device in self.sys_hardware.filter_devices(**device_filter)
|
||||
]
|
||||
else:
|
||||
ui_node["options"] = [
|
||||
(device.by_id or device.path).as_posix()
|
||||
for device in self.sys_hardware.devices()
|
||||
]
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
def _nested_ui_list(
|
||||
self,
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_list: List[Any],
|
||||
key: str,
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
self._nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
self._single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
def _nested_ui_dict(
|
||||
self,
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_dict: Dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {
|
||||
"name": key,
|
||||
"type": "schema",
|
||||
"optional": True,
|
||||
"multiple": multiple,
|
||||
}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
self._nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
self._single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _create_device_filter(str_filter: str) -> Dict[str, Any]:
|
||||
"""Generate device Filter."""
|
||||
raw_filter = dict(value.split("=") for value in str_filter.split(";"))
|
||||
|
||||
clean_filter = {}
|
||||
for key, value in raw_filter.items():
|
||||
if key == "subsystem":
|
||||
clean_filter[key] = UdevSubsystem(value)
|
||||
else:
|
||||
clean_filter[key] = value
|
||||
|
||||
return clean_filter
|
@@ -2,7 +2,7 @@
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any, Dict
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -18,7 +18,6 @@ from ..const import (
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD_FROM,
|
||||
@@ -44,6 +43,7 @@ from ..const import (
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_INIT,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
@@ -72,127 +72,146 @@ from ..const import (
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
PRIVILEGED_ALL,
|
||||
ROLE_ALL,
|
||||
ROLE_DEFAULT,
|
||||
STARTUP_ALL,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
AddonStages,
|
||||
AddonBoot,
|
||||
AddonStage,
|
||||
AddonStartup,
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..validate import (
|
||||
DOCKER_PORTS,
|
||||
DOCKER_PORTS_DESCRIPTION,
|
||||
docker_image,
|
||||
docker_ports,
|
||||
docker_ports_description,
|
||||
network_port,
|
||||
token,
|
||||
uuid_match,
|
||||
version_tag,
|
||||
)
|
||||
from .options import RE_SCHEMA_ELEMENT
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = "str"
|
||||
V_INT = "int"
|
||||
V_FLOAT = "float"
|
||||
V_BOOL = "bool"
|
||||
V_PASSWORD = "password"
|
||||
V_EMAIL = "email"
|
||||
V_URL = "url"
|
||||
V_PORT = "port"
|
||||
V_MATCH = "match"
|
||||
V_LIST = "list"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool|email|url|port"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r"|list\((?P<list>.+)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||
)
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
|
||||
|
||||
MACHINE_ALL = [
|
||||
"intel-nuc",
|
||||
"odroid-c2",
|
||||
"odroid-n2",
|
||||
"odroid-xu",
|
||||
"qemuarm-64",
|
||||
"qemuarm",
|
||||
"qemux86-64",
|
||||
"qemux86",
|
||||
"raspberrypi",
|
||||
"raspberrypi2",
|
||||
"raspberrypi3-64",
|
||||
"raspberrypi3",
|
||||
"raspberrypi4-64",
|
||||
"raspberrypi4",
|
||||
"tinker",
|
||||
]
|
||||
RE_MACHINE = re.compile(
|
||||
r"^!?(?:"
|
||||
r"|intel-nuc"
|
||||
r"|odroid-c2"
|
||||
r"|odroid-c4"
|
||||
r"|odroid-n2"
|
||||
r"|odroid-xu"
|
||||
r"|qemuarm-64"
|
||||
r"|qemuarm"
|
||||
r"|qemux86-64"
|
||||
r"|qemux86"
|
||||
r"|raspberrypi"
|
||||
r"|raspberrypi2"
|
||||
r"|raspberrypi3-64"
|
||||
r"|raspberrypi3"
|
||||
r"|raspberrypi4-64"
|
||||
r"|raspberrypi4"
|
||||
r"|tinker"
|
||||
r")$"
|
||||
)
|
||||
|
||||
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
if value == "before":
|
||||
return STARTUP_SERVICES
|
||||
if value == "after":
|
||||
return STARTUP_APPLICATION
|
||||
return value
|
||||
def _migrate_addon_config(protocol=False):
|
||||
"""Migrate addon config."""
|
||||
|
||||
def _migrate(config: Dict[str, Any]):
|
||||
name = config.get(ATTR_NAME)
|
||||
if not name:
|
||||
raise vol.Invalid("Invalid Add-on config!")
|
||||
|
||||
# Startup 2018-03-30
|
||||
if config.get(ATTR_STARTUP) in ("before", "after"):
|
||||
value = config[ATTR_STARTUP]
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'startup' with '%s' is deprecated. Please report this to the maintainer of %s",
|
||||
value,
|
||||
name,
|
||||
)
|
||||
if value == "before":
|
||||
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
|
||||
elif value == "after":
|
||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
|
||||
|
||||
# UART 2021-01-20
|
||||
if "auto_uart" in config:
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'auto_uart' is deprecated, use 'uart'. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_UART] = config.pop("auto_uart")
|
||||
|
||||
# Device 2021-01-20
|
||||
if ATTR_DEVICES in config and any(":" in line for line in config[ATTR_DEVICES]):
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'devices' use a deprecated format, the new format uses a list of paths only. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_DEVICES] = [line.split(":")[0] for line in config[ATTR_DEVICES]]
|
||||
|
||||
# TMPFS 2021-02-01
|
||||
if ATTR_TMPFS in config and not isinstance(config[ATTR_TMPFS], bool):
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'tmpfs' use a deprecated format, new it's only a boolean. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_TMPFS] = True
|
||||
|
||||
return config
|
||||
|
||||
return _migrate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
_SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): str,
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
vol.Required(ATTR_SLUG): str,
|
||||
vol.Required(ATTR_DESCRIPTON): str,
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
||||
AddonStartup
|
||||
),
|
||||
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STAGE, default=AddonStages.STABLE): vol.Coerce(AddonStages),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
||||
vol.Optional(ATTR_PORTS): docker_ports,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): docker_ports_description,
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Match(
|
||||
r"^(?:https?|\[PROTO:\w+\]|tcp):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||
),
|
||||
vol.Optional(ATTR_WEBUI): vol.Match(
|
||||
r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||
),
|
||||
@@ -200,27 +219,28 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||
network_port, vol.Equal(0)
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): str,
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): str,
|
||||
vol.Optional(ATTR_PANEL_TITLE): str,
|
||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(version_tag),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [str],
|
||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_VIDEO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_USB, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
@@ -232,39 +252,35 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [vol.Coerce(str)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(
|
||||
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [str],
|
||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
str: vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
[
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
|
||||
)
|
||||
},
|
||||
{str: vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])},
|
||||
)
|
||||
],
|
||||
vol.Schema(
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
vol.Schema({str: vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}),
|
||||
)
|
||||
}
|
||||
),
|
||||
False,
|
||||
),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=10, max=120)
|
||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
SCHEMA_ADDON_CONFIG = vol.All(_migrate_addon_config(True), _SCHEMA_ADDON_CONFIG)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
@@ -284,39 +300,42 @@ SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||
str
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): str,
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_NETWORK): docker_ports,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
}
|
||||
SCHEMA_ADDON_SYSTEM = vol.All(
|
||||
_migrate_addon_config(),
|
||||
_SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): str,
|
||||
vol.Required(ATTR_REPOSITORY): str,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDONS_FILE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
|
||||
}
|
||||
vol.Optional(ATTR_USER, default=dict): {str: SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {str: SCHEMA_ADDON_SYSTEM},
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
@@ -324,249 +343,8 @@ SCHEMA_ADDON_SNAPSHOT = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_STATE): vol.Coerce(AddonState),
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def validate_options(coresys: CoreSys, raw_schema: Dict[str, Any]):
|
||||
"""Validate schema."""
|
||||
|
||||
def validate(struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in raw_schema:
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = _nested_validate_list(coresys, typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = _nested_validate_dict(coresys, typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(coresys, typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'")
|
||||
|
||||
# Lookup secret
|
||||
if str(value).startswith("!secret "):
|
||||
secret: str = value.partition(" ")[2]
|
||||
value = coresys.secrets.get(secret)
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Unknown secret {secret}")
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR) or typ.startswith(V_PASSWORD):
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(V_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(V_PORT):
|
||||
return network_port(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(V_LIST):
|
||||
return vol.In(match.group("list").split("|"))(str(value))
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||
|
||||
|
||||
def _nested_validate_list(coresys, typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = _nested_validate_dict(coresys, typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(_single_validate(coresys, typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _nested_validate_dict(coresys, typ, data_dict, key):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(
|
||||
coresys, typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = _single_validate(coresys, typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||
|
||||
|
||||
def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
_nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
_nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
_single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
|
||||
def _single_ui_option(
|
||||
ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(V_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(V_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(V_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(V_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(V_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(V_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _nested_ui_list(
|
||||
ui_schema: List[Dict[str, Any]], option_list: List[Any], key: str
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
_nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
_single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
|
||||
def _nested_ui_dict(
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_dict: Dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {"name": key, "type": "schema", "optional": True, "multiple": multiple}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
_nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
_single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
||||
|
@@ -7,21 +7,28 @@ from aiohttp import web
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
from .cli import APICli
|
||||
from .discovery import APIDiscovery
|
||||
from .dns import APICoreDNS
|
||||
from .docker import APIDocker
|
||||
from .hardware import APIHardware
|
||||
from .hassos import APIHassOS
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .info import APIInfo
|
||||
from .ingress import APIIngress
|
||||
from .jobs import APIJobs
|
||||
from .multicast import APIMulticast
|
||||
from .network import APINetwork
|
||||
from .observer import APIObserver
|
||||
from .os import APIOS
|
||||
from .proxy import APIProxy
|
||||
from .resolution import APIResoulution
|
||||
from .security import SecurityMiddleware
|
||||
from .services import APIServices
|
||||
from .snapshots import APISnapshots
|
||||
from .supervisor import APISupervisor
|
||||
from .audio import APIAudio
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -38,7 +45,10 @@ class RestAPI(CoreSysAttributes):
|
||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
||||
self.webapp: web.Application = web.Application(
|
||||
client_max_size=MAX_CLIENT_SIZE,
|
||||
middlewares=[self.security.token_validation],
|
||||
middlewares=[
|
||||
self.security.system_validation,
|
||||
self.security.token_validation,
|
||||
],
|
||||
)
|
||||
|
||||
# service stuff
|
||||
@@ -47,22 +57,31 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
self._register_hassos()
|
||||
self._register_addons()
|
||||
self._register_audio()
|
||||
self._register_auth()
|
||||
self._register_cli()
|
||||
self._register_discovery()
|
||||
self._register_dns()
|
||||
self._register_docker()
|
||||
self._register_hardware()
|
||||
self._register_homeassistant()
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_ingress()
|
||||
self._register_snapshots()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
self._register_host()
|
||||
self._register_info()
|
||||
self._register_auth()
|
||||
self._register_dns()
|
||||
self._register_audio()
|
||||
self._register_ingress()
|
||||
self._register_multicast()
|
||||
self._register_network()
|
||||
self._register_observer()
|
||||
self._register_os()
|
||||
self._register_jobs()
|
||||
self._register_panel()
|
||||
self._register_proxy()
|
||||
self._register_resolution()
|
||||
self._register_services()
|
||||
self._register_snapshots()
|
||||
self._register_supervisor()
|
||||
|
||||
await self.start()
|
||||
|
||||
def _register_host(self) -> None:
|
||||
"""Register hostcontrol functions."""
|
||||
@@ -72,6 +91,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/host/info", api_host.info),
|
||||
web.get("/host/logs", api_host.logs),
|
||||
web.post("/host/reboot", api_host.reboot),
|
||||
web.post("/host/shutdown", api_host.shutdown),
|
||||
web.post("/host/reload", api_host.reload),
|
||||
@@ -84,22 +104,97 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
def _register_hassos(self) -> None:
|
||||
"""Register HassOS functions."""
|
||||
api_hassos = APIHassOS()
|
||||
api_hassos.coresys = self.coresys
|
||||
def _register_network(self) -> None:
|
||||
"""Register network functions."""
|
||||
api_network = APINetwork()
|
||||
api_network.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/os/info", api_hassos.info),
|
||||
web.post("/os/update", api_hassos.update),
|
||||
web.post("/os/update/cli", api_hassos.update_cli),
|
||||
web.post("/os/config/sync", api_hassos.config_sync),
|
||||
# Remove with old Supervisor fallback
|
||||
web.get("/hassos/info", api_hassos.info),
|
||||
web.post("/hassos/update", api_hassos.update),
|
||||
web.post("/hassos/update/cli", api_hassos.update_cli),
|
||||
web.post("/hassos/config/sync", api_hassos.config_sync),
|
||||
web.get("/network/info", api_network.info),
|
||||
web.post("/network/reload", api_network.reload),
|
||||
web.get(
|
||||
"/network/interface/{interface}/info", api_network.interface_info
|
||||
),
|
||||
web.post(
|
||||
"/network/interface/{interface}/update",
|
||||
api_network.interface_update,
|
||||
),
|
||||
web.get(
|
||||
"/network/interface/{interface}/accesspoints",
|
||||
api_network.scan_accesspoints,
|
||||
),
|
||||
web.post(
|
||||
"/network/interface/{interface}/vlan/{vlan}",
|
||||
api_network.create_vlan,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_os(self) -> None:
|
||||
"""Register OS functions."""
|
||||
api_os = APIOS()
|
||||
api_os.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/os/info", api_os.info),
|
||||
web.post("/os/update", api_os.update),
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_jobs(self) -> None:
|
||||
"""Register Jobs functions."""
|
||||
api_jobs = APIJobs()
|
||||
api_jobs.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/jobs/info", api_jobs.info),
|
||||
web.post("/jobs/options", api_jobs.options),
|
||||
web.post("/jobs/reset", api_jobs.reset),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_cli(self) -> None:
|
||||
"""Register HA cli functions."""
|
||||
api_cli = APICli()
|
||||
api_cli.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/cli/info", api_cli.info),
|
||||
web.get("/cli/stats", api_cli.stats),
|
||||
web.post("/cli/update", api_cli.update),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_observer(self) -> None:
|
||||
"""Register Observer functions."""
|
||||
api_observer = APIObserver()
|
||||
api_observer.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/observer/info", api_observer.info),
|
||||
web.get("/observer/stats", api_observer.stats),
|
||||
web.post("/observer/update", api_observer.update),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_multicast(self) -> None:
|
||||
"""Register Multicast functions."""
|
||||
api_multicast = APIMulticast()
|
||||
api_multicast.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/multicast/info", api_multicast.info),
|
||||
web.get("/multicast/stats", api_multicast.stats),
|
||||
web.get("/multicast/logs", api_multicast.logs),
|
||||
web.post("/multicast/update", api_multicast.update),
|
||||
web.post("/multicast/restart", api_multicast.restart),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -123,13 +218,41 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes([web.get("/info", api_info.info)])
|
||||
|
||||
def _register_resolution(self) -> None:
|
||||
"""Register info functions."""
|
||||
api_resolution = APIResoulution()
|
||||
api_resolution.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/resolution/info", api_resolution.info),
|
||||
web.post(
|
||||
"/resolution/suggestion/{suggestion}",
|
||||
api_resolution.apply_suggestion,
|
||||
),
|
||||
web.delete(
|
||||
"/resolution/suggestion/{suggestion}",
|
||||
api_resolution.dismiss_suggestion,
|
||||
),
|
||||
web.delete(
|
||||
"/resolution/issue/{issue}",
|
||||
api_resolution.dismiss_issue,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_auth(self) -> None:
|
||||
"""Register auth functions."""
|
||||
api_auth = APIAuth()
|
||||
api_auth.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[web.post("/auth", api_auth.auth), web.post("/auth/reset", api_auth.reset)]
|
||||
[
|
||||
web.get("/auth", api_auth.auth),
|
||||
web.post("/auth", api_auth.auth),
|
||||
web.post("/auth/reset", api_auth.reset),
|
||||
web.delete("/auth/cache", api_auth.cache),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_supervisor(self) -> None:
|
||||
@@ -145,6 +268,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/supervisor/logs", api_supervisor.logs),
|
||||
web.post("/supervisor/update", api_supervisor.update),
|
||||
web.post("/supervisor/reload", api_supervisor.reload),
|
||||
web.post("/supervisor/restart", api_supervisor.restart),
|
||||
web.post("/supervisor/options", api_supervisor.options),
|
||||
web.post("/supervisor/repair", api_supervisor.repair),
|
||||
]
|
||||
@@ -221,6 +345,9 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||
web.post("/addons/{addon}/update", api_addons.update),
|
||||
web.post("/addons/{addon}/options", api_addons.options),
|
||||
web.post(
|
||||
"/addons/{addon}/options/validate", api_addons.options_validate
|
||||
),
|
||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||
web.get("/addons/{addon}/icon", api_addons.icon),
|
||||
@@ -241,6 +368,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.post("/ingress/session", api_ingress.create_session),
|
||||
web.post("/ingress/validate_session", api_ingress.validate_session),
|
||||
web.get("/ingress/panels", api_ingress.panels),
|
||||
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||
]
|
||||
@@ -259,7 +387,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
|
||||
web.post("/snapshots/new/upload", api_snapshots.upload),
|
||||
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
|
||||
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
|
||||
web.delete("/snapshots/{snapshot}", api_snapshots.remove),
|
||||
web.post(
|
||||
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
|
||||
),
|
||||
@@ -268,6 +396,8 @@ class RestAPI(CoreSysAttributes):
|
||||
api_snapshots.restore_partial,
|
||||
),
|
||||
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
|
||||
# Old, remove at end of 2020
|
||||
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -329,7 +459,11 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/audio/update", api_audio.update),
|
||||
web.post("/audio/restart", api_audio.restart),
|
||||
web.post("/audio/reload", api_audio.reload),
|
||||
web.post("/audio/profile", api_audio.set_profile),
|
||||
web.post("/audio/volume/{source}/application", api_audio.set_volume),
|
||||
web.post("/audio/volume/{source}", api_audio.set_volume),
|
||||
web.post("/audio/mute/{source}/application", api_audio.set_mute),
|
||||
web.post("/audio/mute/{source}", api_audio.set_mute),
|
||||
web.post("/audio/default/{source}", api_audio.set_default),
|
||||
]
|
||||
)
|
||||
@@ -339,6 +473,20 @@ class RestAPI(CoreSysAttributes):
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||
|
||||
def _register_docker(self) -> None:
|
||||
"""Register docker configuration functions."""
|
||||
api_docker = APIDocker()
|
||||
api_docker.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/docker/info", api_docker.info),
|
||||
web.get("/docker/registries", api_docker.registries),
|
||||
web.post("/docker/registries", api_docker.create_registry),
|
||||
web.delete("/docker/registries/{hostname}", api_docker.remove_registry),
|
||||
]
|
||||
)
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Run RESTful API webserver."""
|
||||
await self._runner.setup()
|
||||
@@ -349,9 +497,9 @@ class RestAPI(CoreSysAttributes):
|
||||
try:
|
||||
await self._site.start()
|
||||
except OSError as err:
|
||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
_LOGGER.critical("Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
_LOGGER.info("Starting API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop RESTful API webserver."""
|
||||
@@ -362,4 +510,4 @@ class RestAPI(CoreSysAttributes):
|
||||
await self._site.stop()
|
||||
await self._runner.cleanup()
|
||||
|
||||
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
||||
_LOGGER.info("Stopping API on %s", self.sys_docker.network.supervisor)
|
||||
|
@@ -5,6 +5,7 @@ from typing import Any, Awaitable, Dict, List
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..addons import AnyAddon
|
||||
from ..addons.addon import Addon
|
||||
@@ -54,7 +55,6 @@ from ..const import (
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_LOGO,
|
||||
ATTR_LONG_DESCRIPTION,
|
||||
ATTR_MACHINE,
|
||||
@@ -62,6 +62,7 @@ from ..const import (
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_MESSAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_NETWORK_DESCRIPTION,
|
||||
@@ -78,25 +79,31 @@ from ..const import (
|
||||
ATTR_SLUG,
|
||||
ATTR_SOURCE,
|
||||
ATTR_STAGE,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_VALID,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
CONTENT_TYPE_BINARY,
|
||||
CONTENT_TYPE_PNG,
|
||||
CONTENT_TYPE_TEXT,
|
||||
REQUEST_FROM,
|
||||
STATE_NONE,
|
||||
AddonBoot,
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import APIError
|
||||
from ..validate import DOCKER_PORTS
|
||||
from ..validate import docker_ports
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -106,12 +113,13 @@ SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Maybe(DOCKER_PORTS),
|
||||
vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_NETWORK): vol.Maybe(docker_ports),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -122,9 +130,7 @@ SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def _extract_addon(
|
||||
self, request: web.Request, check_installed: bool = True
|
||||
) -> AnyAddon:
|
||||
def _extract_addon(self, request: web.Request) -> AnyAddon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon_slug: str = request.match_info.get("addon")
|
||||
|
||||
@@ -137,49 +143,53 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
if not addon:
|
||||
raise APIError("Addon does not exist")
|
||||
raise APIError(f"Addon {addon_slug} does not exist")
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
return addon
|
||||
|
||||
def _extract_addon_installed(self, request: web.Request) -> Addon:
|
||||
addon = self._extract_addon(request)
|
||||
if not isinstance(addon, Addon) or not addon.is_installed:
|
||||
raise APIError("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = []
|
||||
for addon in self.sys_addons.all:
|
||||
data_addons.append(
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version if addon.is_installed else None,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
}
|
||||
)
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.sys_store.all:
|
||||
data_repositories.append(
|
||||
{
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
}
|
||||
)
|
||||
data_addons = [
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_VERSION: addon.version if addon.is_installed else None,
|
||||
ATTR_VERSION_LATEST: addon.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: addon.need_update
|
||||
if addon.is_installed
|
||||
else False,
|
||||
ATTR_INSTALLED: addon.is_installed,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
}
|
||||
for addon in self.sys_addons.all
|
||||
]
|
||||
|
||||
data_repositories = [
|
||||
{
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
}
|
||||
for repository in self.sys_store.all
|
||||
]
|
||||
return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories}
|
||||
|
||||
@api_process
|
||||
@@ -190,7 +200,7 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -204,7 +214,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_AUTO_UPDATE: None,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_VERSION: None,
|
||||
ATTR_LAST_VERSION: addon.latest_version,
|
||||
ATTR_VERSION_LATEST: addon.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: False,
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT: addon.boot,
|
||||
@@ -214,7 +225,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_MACHINE: addon.supported_machine,
|
||||
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_STATE: STATE_NONE,
|
||||
ATTR_STATE: AddonState.UNKNOWN,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
@@ -227,7 +238,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: _pretty_devices(addon),
|
||||
ATTR_DEVICES: addon.static_devices,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
@@ -239,6 +250,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_AUTH_API: addon.access_auth_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_USB: addon.with_usb,
|
||||
ATTR_UART: addon.with_uart,
|
||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_UDEV: addon.with_udev,
|
||||
@@ -247,6 +260,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: None,
|
||||
ATTR_AUDIO_OUTPUT: None,
|
||||
ATTR_STARTUP: addon.startup,
|
||||
ATTR_SERVICES: _pretty_services(addon),
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
ATTR_IP_ADDRESS: None,
|
||||
@@ -255,12 +269,13 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_INGRESS_URL: None,
|
||||
ATTR_INGRESS_PORT: None,
|
||||
ATTR_INGRESS_PANEL: None,
|
||||
ATTR_WATCHDOG: None,
|
||||
}
|
||||
|
||||
if addon.is_installed:
|
||||
if isinstance(addon, Addon) and addon.is_installed:
|
||||
data.update(
|
||||
{
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_STATE: addon.state,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
||||
ATTR_INGRESS_URL: addon.ingress_url,
|
||||
@@ -271,6 +286,10 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_IP_ADDRESS: str(addon.ip_address),
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_UPDATE_AVAILABLE: addon.need_update,
|
||||
ATTR_WATCHDOG: addon.watchdog,
|
||||
ATTR_DEVICES: addon.static_devices
|
||||
+ [device.path for device in addon.devices],
|
||||
}
|
||||
)
|
||||
|
||||
@@ -279,10 +298,10 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Store user options for add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon = self._extract_addon_installed(request)
|
||||
|
||||
# Update secrets for validation
|
||||
await self.sys_secrets.reload()
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
|
||||
# Extend schema with add-on specific validation
|
||||
addon_schema = SCHEMA_OPTIONS.extend(
|
||||
@@ -306,17 +325,32 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_INGRESS_PANEL in body:
|
||||
addon.ingress_panel = body[ATTR_INGRESS_PANEL]
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
if ATTR_WATCHDOG in body:
|
||||
addon.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def options_validate(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
addon = self._extract_addon_installed(request)
|
||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True}
|
||||
try:
|
||||
addon.schema(addon.options)
|
||||
except vol.Invalid as ex:
|
||||
data[ATTR_MESSAGE] = humanize_error(addon.options, ex)
|
||||
data[ATTR_VALID] = False
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def security(self, request: web.Request) -> None:
|
||||
"""Store security options for add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon = self._extract_addon_installed(request)
|
||||
body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||
|
||||
if ATTR_PROTECTED in body:
|
||||
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
||||
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_persist()
|
||||
@@ -324,7 +358,8 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon = self._extract_addon_installed(request)
|
||||
|
||||
stats: DockerStats = await addon.stats()
|
||||
|
||||
return {
|
||||
@@ -341,64 +376,57 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
def install(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Install add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Uninstall add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon = self._extract_addon_installed(request)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
def start(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Start add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon = self._extract_addon_installed(request)
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon = self._extract_addon_installed(request)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def update(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Update add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
|
||||
if addon.latest_version == addon.version:
|
||||
raise APIError("No update available!")
|
||||
|
||||
addon: Addon = self._extract_addon_installed(request)
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon: Addon = self._extract_addon_installed(request)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Rebuild local build add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
if not addon.need_build:
|
||||
raise APIError("Only local build addons are supported")
|
||||
|
||||
addon = self._extract_addon_installed(request)
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return logs from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon = self._extract_addon_installed(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def icon(self, request: web.Request) -> bytes:
|
||||
"""Return icon from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_icon:
|
||||
raise APIError("No icon found!")
|
||||
raise APIError(f"No icon found for add-on {addon.slug}!")
|
||||
|
||||
with addon.path_icon.open("rb") as png:
|
||||
return png.read()
|
||||
@@ -406,9 +434,9 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request: web.Request) -> bytes:
|
||||
"""Return logo from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_logo:
|
||||
raise APIError("No logo found!")
|
||||
raise APIError(f"No logo found for add-on {addon.slug}!")
|
||||
|
||||
with addon.path_logo.open("rb") as png:
|
||||
return png.read()
|
||||
@@ -416,9 +444,9 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def changelog(self, request: web.Request) -> str:
|
||||
"""Return changelog from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_changelog:
|
||||
raise APIError("No changelog found!")
|
||||
raise APIError(f"No changelog found for add-on {addon.slug}!")
|
||||
|
||||
with addon.path_changelog.open("r") as changelog:
|
||||
return changelog.read()
|
||||
@@ -426,9 +454,9 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def documentation(self, request: web.Request) -> str:
|
||||
"""Return documentation from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_documentation:
|
||||
raise APIError("No documentation found!")
|
||||
raise APIError(f"No documentation found for add-on {addon.slug}!")
|
||||
|
||||
with addon.path_documentation.open("r") as documentation:
|
||||
return documentation.read()
|
||||
@@ -436,9 +464,9 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def stdin(self, request: web.Request) -> None:
|
||||
"""Write to stdin of add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon = self._extract_addon_installed(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError("STDIN not supported by add-on")
|
||||
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
||||
|
||||
data = await request.read()
|
||||
await asyncio.shield(addon.write_stdin(data))
|
||||
@@ -448,13 +476,10 @@ def _pretty_devices(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return None
|
||||
return []
|
||||
return [row.split(":")[0] for row in dev_list]
|
||||
|
||||
|
||||
def _pretty_services(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified services role list."""
|
||||
services = []
|
||||
for name, access in addon.services_role.items():
|
||||
services.append(f"{name}:{access}")
|
||||
return services
|
||||
return [f"{name}:{access}" for name, access in addon.services_role.items()]
|
||||
|
@@ -8,13 +8,16 @@ import attr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACTIVE,
|
||||
ATTR_APPLICATION,
|
||||
ATTR_AUDIO,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CARD,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_HOST,
|
||||
ATTR_INDEX,
|
||||
ATTR_INPUT,
|
||||
ATTR_LATEST_VERSION,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
@@ -22,28 +25,43 @@ from ..const import (
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_OUTPUT,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_VOLUME,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..host.sound import SourceType
|
||||
from ..host.sound import StreamType
|
||||
from ..validate import version_tag
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
|
||||
SCHEMA_VOLUME = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_INDEX): vol.Coerce(int),
|
||||
vol.Required(ATTR_VOLUME): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_MUTE = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_INDEX): vol.Coerce(int),
|
||||
vol.Required(ATTR_ACTIVE): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_DEFAULT = vol.Schema({vol.Required(ATTR_NAME): vol.Coerce(str)})
|
||||
|
||||
SCHEMA_PROFILE = vol.Schema(
|
||||
{vol.Required(ATTR_CARD): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str)}
|
||||
)
|
||||
|
||||
|
||||
class APIAudio(CoreSysAttributes):
|
||||
"""Handle RESTful API for Audio functions."""
|
||||
@@ -52,17 +70,20 @@ class APIAudio(CoreSysAttributes):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return Audio information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_audio.version,
|
||||
ATTR_LATEST_VERSION: self.sys_audio.latest_version,
|
||||
ATTR_VERSION: self.sys_plugins.audio.version,
|
||||
ATTR_VERSION_LATEST: self.sys_plugins.audio.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
|
||||
ATTR_HOST: str(self.sys_docker.network.audio),
|
||||
ATTR_AUDIO: {
|
||||
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
|
||||
ATTR_INPUT: [
|
||||
attr.asdict(profile)
|
||||
for profile in self.sys_host.sound.input_profiles
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.inputs
|
||||
],
|
||||
ATTR_OUTPUT: [
|
||||
attr.asdict(profile)
|
||||
for profile in self.sys_host.sound.output_profiles
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.outputs
|
||||
],
|
||||
ATTR_APPLICATION: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.applications
|
||||
],
|
||||
},
|
||||
}
|
||||
@@ -70,7 +91,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_audio.stats()
|
||||
stats = await self.sys_plugins.audio.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
@@ -87,21 +108,21 @@ class APIAudio(CoreSysAttributes):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Audio plugin."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_audio.latest_version)
|
||||
version = body.get(ATTR_VERSION, self.sys_plugins.audio.latest_version)
|
||||
|
||||
if version == self.sys_audio.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
await asyncio.shield(self.sys_audio.update(version))
|
||||
if version == self.sys_plugins.audio.version:
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.audio.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Audio Docker logs."""
|
||||
return self.sys_audio.logs()
|
||||
return self.sys_plugins.audio.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Audio plugin."""
|
||||
return asyncio.shield(self.sys_audio.restart())
|
||||
return asyncio.shield(self.sys_plugins.audio.restart())
|
||||
|
||||
@api_process
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
@@ -110,18 +131,43 @@ class APIAudio(CoreSysAttributes):
|
||||
|
||||
@api_process
|
||||
async def set_volume(self, request: web.Request) -> None:
|
||||
"""Set Audio information."""
|
||||
source: SourceType = SourceType(request.match_info.get("source"))
|
||||
"""Set audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_VOLUME, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.set_volume(source, body[ATTR_NAME], body[ATTR_VOLUME])
|
||||
self.sys_host.sound.set_volume(
|
||||
source, body[ATTR_INDEX], body[ATTR_VOLUME], application
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def set_mute(self, request: web.Request) -> None:
|
||||
"""Mute audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_MUTE, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.set_mute(
|
||||
source, body[ATTR_INDEX], body[ATTR_ACTIVE], application
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def set_default(self, request: web.Request) -> None:
|
||||
"""Set Audio default sources."""
|
||||
source: SourceType = SourceType(request.match_info.get("source"))
|
||||
"""Set audio default stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
body = await api_validate(SCHEMA_DEFAULT, request)
|
||||
|
||||
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))
|
||||
|
||||
@api_process
|
||||
async def set_profile(self, request: web.Request) -> None:
|
||||
"""Set audio default sources."""
|
||||
body = await api_validate(SCHEMA_PROFILE, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.ativate_profile(body[ATTR_CARD], body[ATTR_NAME])
|
||||
)
|
||||
|
@@ -29,6 +29,10 @@ SCHEMA_PASSWORD_RESET = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
REALM_HEADER: Dict[str, str] = {
|
||||
WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'
|
||||
}
|
||||
|
||||
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
@@ -63,7 +67,9 @@ class APIAuth(CoreSysAttributes):
|
||||
|
||||
# BasicAuth
|
||||
if AUTHORIZATION in request.headers:
|
||||
return await self._process_basic(request, addon)
|
||||
if not await self._process_basic(request, addon):
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER)
|
||||
return True
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
@@ -75,9 +81,7 @@ class APIAuth(CoreSysAttributes):
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(
|
||||
headers={WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'}
|
||||
)
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER)
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
@@ -86,3 +90,8 @@ class APIAuth(CoreSysAttributes):
|
||||
await asyncio.shield(
|
||||
self.sys_auth.change_password(body[ATTR_USERNAME], body[ATTR_PASSWORD])
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def cache(self, request: web.Request) -> None:
|
||||
"""Process cache reset request."""
|
||||
self.sys_auth.reset_data()
|
||||
|
65
supervisor/api/cli.py
Normal file
65
supervisor/api/cli.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Init file for Supervisor HA cli RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import version_tag
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
|
||||
|
||||
class APICli(CoreSysAttributes):
|
||||
"""Handle RESTful API for HA Cli functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HA cli information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_plugins.cli.version,
|
||||
ATTR_VERSION_LATEST: self.sys_plugins.cli.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.cli.need_update,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_plugins.cli.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HA CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_plugins.cli.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_plugins.cli.update(version))
|
@@ -1,19 +1,19 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDON,
|
||||
ATTR_UUID,
|
||||
ATTR_CONFIG,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_SERVICE,
|
||||
ATTR_SERVICES,
|
||||
ATTR_UUID,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
{
|
||||
@@ -43,6 +43,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
"""Show register services."""
|
||||
self._check_permission_ha(request)
|
||||
|
||||
# Get available discovery
|
||||
discovery = []
|
||||
for message in self.sys_discovery.list_messages:
|
||||
discovery.append(
|
||||
@@ -54,7 +55,13 @@ class APIDiscovery(CoreSysAttributes):
|
||||
}
|
||||
)
|
||||
|
||||
return {ATTR_DISCOVERY: discovery}
|
||||
# Get available services/add-ons
|
||||
services = {}
|
||||
for addon in self.sys_addons.all:
|
||||
for name in addon.discovery:
|
||||
services.setdefault(name, []).append(addon.slug)
|
||||
|
||||
return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request):
|
||||
@@ -64,7 +71,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
|
||||
# Access?
|
||||
if body[ATTR_SERVICE] not in addon.discovery:
|
||||
raise APIForbidden(f"Can't use discovery!")
|
||||
raise APIForbidden("Can't use discovery!")
|
||||
|
||||
# Process discovery message
|
||||
message = self.sys_discovery.send(addon, **body)
|
||||
@@ -94,7 +101,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
|
||||
# Permission
|
||||
if message.addon != addon.slug:
|
||||
raise APIForbidden(f"Can't remove discovery message")
|
||||
raise APIForbidden("Can't remove discovery message")
|
||||
|
||||
self.sys_discovery.remove(message)
|
||||
return True
|
||||
|
@@ -11,7 +11,6 @@ from ..const import (
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_HOST,
|
||||
ATTR_LATEST_VERSION,
|
||||
ATTR_LOCALS,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
@@ -19,12 +18,14 @@ from ..const import (
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_SERVERS,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import dns_server_list
|
||||
from ..validate import dns_server_list, version_tag
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -32,7 +33,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
|
||||
|
||||
class APICoreDNS(CoreSysAttributes):
|
||||
@@ -42,11 +43,12 @@ class APICoreDNS(CoreSysAttributes):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return DNS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_dns.version,
|
||||
ATTR_LATEST_VERSION: self.sys_dns.latest_version,
|
||||
ATTR_VERSION: self.sys_plugins.dns.version,
|
||||
ATTR_VERSION_LATEST: self.sys_plugins.dns.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.dns.need_update,
|
||||
ATTR_HOST: str(self.sys_docker.network.dns),
|
||||
ATTR_SERVERS: self.sys_dns.servers,
|
||||
ATTR_LOCALS: self.sys_host.network.dns_servers,
|
||||
ATTR_SERVERS: self.sys_plugins.dns.servers,
|
||||
ATTR_LOCALS: self.sys_plugins.dns.locals,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -55,15 +57,15 @@ class APICoreDNS(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_SERVERS in body:
|
||||
self.sys_dns.servers = body[ATTR_SERVERS]
|
||||
self.sys_create_task(self.sys_dns.restart())
|
||||
self.sys_plugins.dns.servers = body[ATTR_SERVERS]
|
||||
self.sys_create_task(self.sys_plugins.dns.restart())
|
||||
|
||||
self.sys_dns.save_data()
|
||||
self.sys_plugins.dns.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_dns.stats()
|
||||
stats = await self.sys_plugins.dns.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
@@ -80,23 +82,23 @@ class APICoreDNS(CoreSysAttributes):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update DNS plugin."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_dns.latest_version)
|
||||
version = body.get(ATTR_VERSION, self.sys_plugins.dns.latest_version)
|
||||
|
||||
if version == self.sys_dns.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
await asyncio.shield(self.sys_dns.update(version))
|
||||
if version == self.sys_plugins.dns.version:
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.dns.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return DNS Docker logs."""
|
||||
return self.sys_dns.logs()
|
||||
return self.sys_plugins.dns.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart CoreDNS plugin."""
|
||||
return asyncio.shield(self.sys_dns.restart())
|
||||
return asyncio.shield(self.sys_plugins.dns.restart())
|
||||
|
||||
@api_process
|
||||
def reset(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reset CoreDNS plugin."""
|
||||
return asyncio.shield(self.sys_dns.reset())
|
||||
return asyncio.shield(self.sys_plugins.dns.reset())
|
||||
|
76
supervisor/api/docker.py
Normal file
76
supervisor/api/docker.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_LOGGING,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_REGISTRIES,
|
||||
ATTR_STORAGE,
|
||||
ATTR_USERNAME,
|
||||
ATTR_VERSION,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_DOCKER_REGISTRY = vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): {
|
||||
vol.Required(ATTR_USERNAME): str,
|
||||
vol.Required(ATTR_PASSWORD): str,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIDocker(CoreSysAttributes):
|
||||
"""Handle RESTful API for Docker configuration."""
|
||||
|
||||
@api_process
|
||||
async def registries(self, request) -> Dict[str, Any]:
|
||||
"""Return the list of registries."""
|
||||
data_registries = {}
|
||||
for hostname, registry in self.sys_docker.config.registries.items():
|
||||
data_registries[hostname] = {
|
||||
ATTR_USERNAME: registry[ATTR_USERNAME],
|
||||
}
|
||||
|
||||
return {ATTR_REGISTRIES: data_registries}
|
||||
|
||||
@api_process
|
||||
async def create_registry(self, request: web.Request):
|
||||
"""Create a new docker registry."""
|
||||
body = await api_validate(SCHEMA_DOCKER_REGISTRY, request)
|
||||
|
||||
for hostname, registry in body.items():
|
||||
self.sys_docker.config.registries[hostname] = registry
|
||||
|
||||
self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def remove_registry(self, request: web.Request):
|
||||
"""Delete a docker registry."""
|
||||
hostname = request.match_info.get(ATTR_HOSTNAME)
|
||||
del self.sys_docker.config.registries[hostname]
|
||||
self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request):
|
||||
"""Get docker info."""
|
||||
data_registries = {}
|
||||
for hostname, registry in self.sys_docker.config.registries.items():
|
||||
data_registries[hostname] = {
|
||||
ATTR_USERNAME: registry[ATTR_USERNAME],
|
||||
}
|
||||
return {
|
||||
ATTR_VERSION: self.sys_docker.info.version,
|
||||
ATTR_STORAGE: self.sys_docker.info.storage,
|
||||
ATTR_LOGGING: self.sys_docker.info.logging,
|
||||
ATTR_REGISTRIES: data_registries,
|
||||
}
|
@@ -1,24 +1,36 @@
|
||||
"""Init file for Supervisor hardware RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_AUDIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
)
|
||||
from ..const import ATTR_AUDIO, ATTR_DEVICES, ATTR_INPUT, ATTR_NAME, ATTR_OUTPUT
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..hardware.const import (
|
||||
ATTR_ATTRIBUTES,
|
||||
ATTR_BY_ID,
|
||||
ATTR_DEV_PATH,
|
||||
ATTR_SUBSYSTEM,
|
||||
ATTR_SYSFS,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def device_struct(device: Device) -> Dict[str, Any]:
|
||||
"""Return a dict with information of a interface to be used in th API."""
|
||||
return {
|
||||
ATTR_NAME: device.name,
|
||||
ATTR_SYSFS: device.sysfs,
|
||||
ATTR_DEV_PATH: device.path,
|
||||
ATTR_SUBSYSTEM: device.subsystem,
|
||||
ATTR_BY_ID: device.by_id,
|
||||
ATTR_ATTRIBUTES: device.attributes,
|
||||
}
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@@ -26,13 +38,9 @@ class APIHardware(CoreSysAttributes):
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(
|
||||
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||
),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
ATTR_DEVICES: [
|
||||
device_struct(device) for device in self.sys_hardware.devices
|
||||
]
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -42,16 +50,16 @@ class APIHardware(CoreSysAttributes):
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: {
|
||||
profile.name: profile.description
|
||||
for profile in self.sys_host.sound.input_profiles
|
||||
for profile in self.sys_host.sound.inputs
|
||||
},
|
||||
ATTR_OUTPUT: {
|
||||
profile.name: profile.description
|
||||
for profile in self.sys_host.sound.output_profiles
|
||||
for profile in self.sys_host.sound.outputs
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@api_process
|
||||
def trigger(self, request: web.Request) -> None:
|
||||
async def trigger(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger a udev device reload."""
|
||||
return asyncio.shield(self.sys_hardware.udev_trigger())
|
||||
_LOGGER.debug("Ignoring DEPRECATED hardware trigger function call.")
|
||||
|
@@ -1,38 +1,40 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Coroutine, Dict, Any
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_CUSTOM,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_PORT,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SSL,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_IP_ADDRESS,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import docker_image, network_port
|
||||
from ..validate import docker_image, network_port, version_tag
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -41,17 +43,18 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(docker_image),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_PORT): network_port,
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
|
||||
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
@@ -62,17 +65,21 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.latest_version,
|
||||
ATTR_VERSION_LATEST: self.sys_homeassistant.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_homeassistant.need_update,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_IP_ADDRESS: str(self.sys_homeassistant.ip_address),
|
||||
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||
ATTR_PORT: self.sys_homeassistant.api_port,
|
||||
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
||||
# Remove end of Q3 2020
|
||||
"last_version": self.sys_homeassistant.latest_version,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -80,9 +87,8 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Set Home Assistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
if ATTR_IMAGE in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.latest_version = body[ATTR_LAST_VERSION]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
@@ -102,12 +108,18 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self.sys_homeassistant.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[Any, str]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_homeassistant.stats()
|
||||
stats = await self.sys_homeassistant.core.stats()
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
@@ -128,36 +140,36 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||
await asyncio.shield(self.sys_homeassistant.core.update(version))
|
||||
|
||||
@api_process
|
||||
def stop(self, request: web.Request) -> Coroutine:
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.stop())
|
||||
return asyncio.shield(self.sys_homeassistant.core.stop())
|
||||
|
||||
@api_process
|
||||
def start(self, request: web.Request) -> Coroutine:
|
||||
def start(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Start Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.start())
|
||||
return asyncio.shield(self.sys_homeassistant.core.start())
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Coroutine:
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.restart())
|
||||
return asyncio.shield(self.sys_homeassistant.core.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Coroutine:
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Rebuild Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.rebuild())
|
||||
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Coroutine:
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Home Assistant Docker logs."""
|
||||
return self.sys_homeassistant.logs()
|
||||
return self.sys_homeassistant.core.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request: web.Request) -> None:
|
||||
"""Check configuration of Home Assistant."""
|
||||
result = await self.sys_homeassistant.check_config()
|
||||
result = await self.sys_homeassistant.core.check_config()
|
||||
if not result.valid:
|
||||
raise APIError(result.log)
|
||||
|
@@ -1,26 +1,30 @@
|
||||
"""Init file for Supervisor host RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Awaitable
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_FEATURES,
|
||||
ATTR_KERNEL,
|
||||
ATTR_OPERATING_SYSTEM,
|
||||
ATTR_CHASSIS,
|
||||
ATTR_DEPLOYMENT,
|
||||
ATTR_STATE,
|
||||
ATTR_NAME,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_SERVICES,
|
||||
ATTR_CPE,
|
||||
ATTR_DEPLOYMENT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DISK_FREE,
|
||||
ATTR_DISK_LIFE_TIME,
|
||||
ATTR_DISK_TOTAL,
|
||||
ATTR_DISK_USED,
|
||||
ATTR_FEATURES,
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_KERNEL,
|
||||
ATTR_NAME,
|
||||
ATTR_OPERATING_SYSTEM,
|
||||
ATTR_SERVICES,
|
||||
ATTR_STATE,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
SERVICE = "service"
|
||||
|
||||
@@ -36,11 +40,15 @@ class APIHost(CoreSysAttributes):
|
||||
return {
|
||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_FEATURES: self.sys_host.supperted_features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||
ATTR_DISK_TOTAL: self.sys_host.info.total_space,
|
||||
ATTR_DISK_USED: self.sys_host.info.used_space,
|
||||
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
|
||||
ATTR_FEATURES: self.sys_host.features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_KERNEL: self.sys_host.info.kernel,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -67,7 +75,11 @@ class APIHost(CoreSysAttributes):
|
||||
@api_process
|
||||
def reload(self, request):
|
||||
"""Reload host data."""
|
||||
return asyncio.shield(self.sys_host.reload())
|
||||
return asyncio.shield(
|
||||
asyncio.wait(
|
||||
[self.sys_host.reload(), self.sys_resolution.evaluate.evaluate_system()]
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def services(self, request):
|
||||
@@ -107,3 +119,8 @@ class APIHost(CoreSysAttributes):
|
||||
"""Restart a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.restart(unit))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return host kernel logs."""
|
||||
return self.sys_host.info.get_dmesg()
|
||||
|
@@ -7,12 +7,16 @@ from aiohttp import web
|
||||
from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_DOCKER,
|
||||
ATTR_FEATURES,
|
||||
ATTR_HASSOS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_LOGGING,
|
||||
ATTR_MACHINE,
|
||||
ATTR_OPERATING_SYSTEM,
|
||||
ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED,
|
||||
ATTR_SUPPORTED_ARCH,
|
||||
ATTR_TIMEZONE,
|
||||
)
|
||||
@@ -32,11 +36,15 @@ class APIInfo(CoreSysAttributes):
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
||||
ATTR_HASSOS: self.sys_hassos.version,
|
||||
ATTR_DOCKER: self.sys_docker.info.version,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
ATTR_FEATURES: self.sys_host.features,
|
||||
ATTR_MACHINE: self.sys_machine,
|
||||
ATTR_ARCH: self.sys_arch.default,
|
||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||
ATTR_SUPPORTED: self.sys_core.supported,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_LOGGING: self.sys_config.logging,
|
||||
ATTR_TIMEZONE: self.sys_timezone,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
}
|
||||
|
@@ -12,25 +12,28 @@ from aiohttp.web_exceptions import (
|
||||
HTTPUnauthorized,
|
||||
)
|
||||
from multidict import CIMultiDict, istr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_ADMIN,
|
||||
ATTR_ENABLE,
|
||||
ATTR_ICON,
|
||||
ATTR_PANELS,
|
||||
ATTR_SESSION,
|
||||
ATTR_TITLE,
|
||||
ATTR_PANELS,
|
||||
ATTR_ENABLE,
|
||||
COOKIE_INGRESS,
|
||||
HEADER_TOKEN,
|
||||
HEADER_TOKEN_OLD,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
|
||||
|
||||
|
||||
class APIIngress(CoreSysAttributes):
|
||||
"""Ingress view to handle add-on webui routing."""
|
||||
@@ -78,6 +81,18 @@ class APIIngress(CoreSysAttributes):
|
||||
session = self.sys_ingress.create_session()
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
@api_process
|
||||
async def validate_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Validate session and extending how long it's valid for."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
data = await api_validate(VALIDATE_SESSION_DATA, request)
|
||||
|
||||
# Check Ingress Session
|
||||
if not self.sys_ingress.validate_session(data[ATTR_SESSION]):
|
||||
_LOGGER.warning("No valid ingress session %s", data[ATTR_SESSION])
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
@@ -104,7 +119,7 @@ class APIIngress(CoreSysAttributes):
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Ingress error: %s", err)
|
||||
|
||||
raise HTTPBadGateway() from None
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def _handle_websocket(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
@@ -129,7 +144,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
# Support GET query
|
||||
if request.query_string:
|
||||
url = "{}?{}".format(url, request.query_string)
|
||||
url = f"{url}?{request.query_string}"
|
||||
|
||||
# Start proxy
|
||||
async with self.sys_websession.ws_connect(
|
||||
@@ -191,7 +206,11 @@ class APIIngress(CoreSysAttributes):
|
||||
async for data in result.content.iter_chunked(4096):
|
||||
await response.write(data)
|
||||
|
||||
except (aiohttp.ClientError, aiohttp.ClientPayloadError) as err:
|
||||
except (
|
||||
aiohttp.ClientError,
|
||||
aiohttp.ClientPayloadError,
|
||||
ConnectionResetError,
|
||||
) as err:
|
||||
_LOGGER.error("Stream error with %s: %s", url, err)
|
||||
|
||||
return response
|
||||
|
42
supervisor/api/jobs.py
Normal file
42
supervisor/api/jobs.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Init file for Supervisor Jobs RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{vol.Optional(ATTR_IGNORE_CONDITIONS): [vol.Coerce(JobCondition)]}
|
||||
)
|
||||
|
||||
|
||||
class APIJobs(CoreSysAttributes):
|
||||
"""Handle RESTful API for OS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return JobManager information."""
|
||||
return {
|
||||
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set options for JobManager."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IGNORE_CONDITIONS in body:
|
||||
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
|
||||
|
||||
self.sys_jobs.save_data()
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Reset options for JobManager."""
|
||||
self.sys_jobs.reset_data()
|
79
supervisor/api/multicast.py
Normal file
79
supervisor/api/multicast.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""Init file for Supervisor Multicast RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import version_tag
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
|
||||
|
||||
class APIMulticast(CoreSysAttributes):
|
||||
"""Handle RESTful API for Multicast functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return Multicast information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_plugins.multicast.version,
|
||||
ATTR_VERSION_LATEST: self.sys_plugins.multicast.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.multicast.need_update,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_plugins.multicast.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Multicast plugin."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_plugins.multicast.latest_version)
|
||||
|
||||
if version == self.sys_plugins.multicast.version:
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Multicast Docker logs."""
|
||||
return self.sys_plugins.multicast.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Multicast plugin."""
|
||||
return asyncio.shield(self.sys_plugins.multicast.restart())
|
282
supervisor/api/network.py
Normal file
282
supervisor/api/network.py
Normal file
@@ -0,0 +1,282 @@
|
||||
"""REST API for network."""
|
||||
import asyncio
|
||||
from ipaddress import ip_address, ip_interface
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACCESSPOINTS,
|
||||
ATTR_ADDRESS,
|
||||
ATTR_AUTH,
|
||||
ATTR_CONNECTED,
|
||||
ATTR_DNS,
|
||||
ATTR_DOCKER,
|
||||
ATTR_ENABLED,
|
||||
ATTR_FREQUENCY,
|
||||
ATTR_GATEWAY,
|
||||
ATTR_HOST_INTERNET,
|
||||
ATTR_ID,
|
||||
ATTR_INTERFACE,
|
||||
ATTR_INTERFACES,
|
||||
ATTR_IPV4,
|
||||
ATTR_IPV6,
|
||||
ATTR_MAC,
|
||||
ATTR_METHOD,
|
||||
ATTR_MODE,
|
||||
ATTR_NAMESERVERS,
|
||||
ATTR_PARENT,
|
||||
ATTR_PRIMARY,
|
||||
ATTR_PSK,
|
||||
ATTR_SIGNAL,
|
||||
ATTR_SSID,
|
||||
ATTR_SUPERVISOR_INTERNET,
|
||||
ATTR_TYPE,
|
||||
ATTR_VLAN,
|
||||
ATTR_WIFI,
|
||||
DOCKER_NETWORK,
|
||||
DOCKER_NETWORK_MASK,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, HostNetworkNotFound
|
||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||
from ..host.network import (
|
||||
AccessPoint,
|
||||
Interface,
|
||||
InterfaceMethod,
|
||||
IpConfig,
|
||||
VlanConfig,
|
||||
WifiConfig,
|
||||
)
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_SCHEMA_IP_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
|
||||
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
|
||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
|
||||
}
|
||||
)
|
||||
|
||||
_SCHEMA_WIFI_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_MODE): vol.Coerce(WifiMode),
|
||||
vol.Optional(ATTR_AUTH): vol.Coerce(AuthMethod),
|
||||
vol.Optional(ATTR_SSID): str,
|
||||
vol.Optional(ATTR_PSK): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_UPDATE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
|
||||
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
|
||||
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
||||
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def ipconfig_struct(config: IpConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about ip configuration."""
|
||||
return {
|
||||
ATTR_METHOD: config.method,
|
||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
||||
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
||||
}
|
||||
|
||||
|
||||
def wifi_struct(config: WifiConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about wifi configuration."""
|
||||
return {
|
||||
ATTR_MODE: config.mode,
|
||||
ATTR_AUTH: config.auth,
|
||||
ATTR_SSID: config.ssid,
|
||||
ATTR_SIGNAL: config.signal,
|
||||
}
|
||||
|
||||
|
||||
def vlan_struct(config: VlanConfig) -> Dict[str, Any]:
|
||||
"""Return a dict with information about VLAN configuration."""
|
||||
return {
|
||||
ATTR_ID: config.id,
|
||||
ATTR_PARENT: config.interface,
|
||||
}
|
||||
|
||||
|
||||
def interface_struct(interface: Interface) -> Dict[str, Any]:
|
||||
"""Return a dict with information of a interface to be used in th API."""
|
||||
return {
|
||||
ATTR_INTERFACE: interface.name,
|
||||
ATTR_TYPE: interface.type,
|
||||
ATTR_ENABLED: interface.enabled,
|
||||
ATTR_CONNECTED: interface.connected,
|
||||
ATTR_PRIMARY: interface.primary,
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||
}
|
||||
|
||||
|
||||
def accesspoint_struct(accesspoint: AccessPoint) -> Dict[str, Any]:
|
||||
"""Return a dict for AccessPoint."""
|
||||
return {
|
||||
ATTR_MODE: accesspoint.mode,
|
||||
ATTR_SSID: accesspoint.ssid,
|
||||
ATTR_FREQUENCY: accesspoint.frequency,
|
||||
ATTR_SIGNAL: accesspoint.signal,
|
||||
ATTR_MAC: accesspoint.mac,
|
||||
}
|
||||
|
||||
|
||||
class APINetwork(CoreSysAttributes):
|
||||
"""Handle REST API for network."""
|
||||
|
||||
def _get_interface(self, name: str) -> Interface:
|
||||
"""Get Interface by name or default."""
|
||||
name = name.lower()
|
||||
|
||||
if name == "default":
|
||||
for interface in self.sys_host.network.interfaces:
|
||||
if not interface.primary:
|
||||
continue
|
||||
return interface
|
||||
|
||||
else:
|
||||
try:
|
||||
return self.sys_host.network.get(name)
|
||||
except HostNetworkNotFound:
|
||||
pass
|
||||
|
||||
raise APIError(f"Interface {name} does not exist") from None
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return network information."""
|
||||
return {
|
||||
ATTR_INTERFACES: [
|
||||
interface_struct(interface)
|
||||
for interface in self.sys_host.network.interfaces
|
||||
],
|
||||
ATTR_DOCKER: {
|
||||
ATTR_INTERFACE: DOCKER_NETWORK,
|
||||
ATTR_ADDRESS: str(DOCKER_NETWORK_MASK),
|
||||
ATTR_GATEWAY: str(self.sys_docker.network.gateway),
|
||||
ATTR_DNS: str(self.sys_docker.network.dns),
|
||||
},
|
||||
ATTR_HOST_INTERNET: self.sys_host.network.connectivity,
|
||||
ATTR_SUPERVISOR_INTERNET: self.sys_supervisor.connectivity,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def interface_info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return network information for a interface."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
return interface_struct(interface)
|
||||
|
||||
@api_process
|
||||
async def interface_update(self, request: web.Request) -> None:
|
||||
"""Update the configuration of an interface."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
# Validate data
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
if not body:
|
||||
raise APIError("You need to supply at least one option to update")
|
||||
|
||||
# Apply config
|
||||
for key, config in body.items():
|
||||
if key == ATTR_IPV4:
|
||||
interface.ipv4 = attr.evolve(
|
||||
interface.ipv4 or IpConfig(InterfaceMethod.STATIC, [], None, []),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_IPV6:
|
||||
interface.ipv6 = attr.evolve(
|
||||
interface.ipv6 or IpConfig(InterfaceMethod.STATIC, [], None, []),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_WIFI:
|
||||
interface.wifi = attr.evolve(
|
||||
interface.wifi
|
||||
or WifiConfig(
|
||||
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
|
||||
),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_ENABLED:
|
||||
interface.enabled = config
|
||||
|
||||
await asyncio.shield(self.sys_host.network.apply_changes(interface))
|
||||
|
||||
@api_process
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload network data."""
|
||||
return asyncio.shield(self.sys_host.network.update())
|
||||
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Scan and return a list of available networks."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
# Only wlan is supported
|
||||
if interface.type != InterfaceType.WIRELESS:
|
||||
raise APIError(f"Interface {interface.name} is not a valid wireless card!")
|
||||
|
||||
ap_list = await self.sys_host.network.scan_wifi(interface)
|
||||
|
||||
return {ATTR_ACCESSPOINTS: [accesspoint_struct(ap) for ap in ap_list]}
|
||||
|
||||
@api_process
|
||||
async def create_vlan(self, request: web.Request) -> None:
|
||||
"""Create a new vlan."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
vlan = int(request.match_info.get(ATTR_VLAN))
|
||||
|
||||
# Only ethernet is supported
|
||||
if interface.type != InterfaceType.ETHERNET:
|
||||
raise APIError(
|
||||
f"Interface {interface.name} is not a valid ethernet card for vlan!"
|
||||
)
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
|
||||
vlan_config = VlanConfig(vlan, interface.name)
|
||||
|
||||
ipv4_config = None
|
||||
if ATTR_IPV4 in body:
|
||||
ipv4_config = IpConfig(
|
||||
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||
)
|
||||
|
||||
ipv6_config = None
|
||||
if ATTR_IPV6 in body:
|
||||
ipv6_config = IpConfig(
|
||||
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
||||
)
|
||||
|
||||
vlan_interface = Interface(
|
||||
"",
|
||||
True,
|
||||
True,
|
||||
False,
|
||||
InterfaceType.VLAN,
|
||||
ipv4_config,
|
||||
ipv6_config,
|
||||
None,
|
||||
vlan_config,
|
||||
)
|
||||
await asyncio.shield(self.sys_host.network.apply_changes(vlan_interface))
|
67
supervisor/api/observer.py
Normal file
67
supervisor/api/observer.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Init file for Supervisor Observer RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_HOST,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import version_tag
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
|
||||
|
||||
class APIObserver(CoreSysAttributes):
|
||||
"""Handle RESTful API for Observer functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HA Observer information."""
|
||||
return {
|
||||
ATTR_HOST: str(self.sys_docker.network.observer),
|
||||
ATTR_VERSION: self.sys_plugins.observer.version,
|
||||
ATTR_VERSION_LATEST: self.sys_plugins.observer.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.observer.need_update,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_plugins.observer.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HA observer."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_plugins.observer.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_plugins.observer.update(version))
|
@@ -9,51 +9,42 @@ import voluptuous as vol
|
||||
from ..const import (
|
||||
ATTR_BOARD,
|
||||
ATTR_BOOT,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import version_tag
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for HassOS functions."""
|
||||
class APIOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for OS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HassOS information."""
|
||||
"""Return OS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_hassos.need_update,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HassOS."""
|
||||
"""Update OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request: web.Request) -> None:
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger config reload on HassOS."""
|
||||
"""Trigger config reload on OS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user