mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-14 11:39:21 +00:00
Compare commits
637 Commits
2023.03.0
...
need-updat
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b7c53d9e40 | ||
![]() |
b684c8673e | ||
![]() |
547f42439d | ||
![]() |
c51ceb000f | ||
![]() |
4cbede1bc8 | ||
![]() |
5eac8c7780 | ||
![]() |
ab78d87304 | ||
![]() |
09166e3867 | ||
![]() |
8a5c813cdd | ||
![]() |
4200622f43 | ||
![]() |
c4452a85b4 | ||
![]() |
e57de4a3c1 | ||
![]() |
9fd2c91c55 | ||
![]() |
fbd70013a8 | ||
![]() |
8d18f3e66e | ||
![]() |
5f5754e860 | ||
![]() |
974c882b9a | ||
![]() |
a9ea90096b | ||
![]() |
45c72c426e | ||
![]() |
4e5b75fe19 | ||
![]() |
3cd617e68f | ||
![]() |
ddff02f73b | ||
![]() |
b59347b3d3 | ||
![]() |
1dc769076f | ||
![]() |
f150a19c0f | ||
![]() |
c4bc1e3824 | ||
![]() |
eca99b69db | ||
![]() |
043af72847 | ||
![]() |
05c7b6c639 | ||
![]() |
3385c99f1f | ||
![]() |
895117f857 | ||
![]() |
9e3135e2de | ||
![]() |
9a1c517437 | ||
![]() |
c0c0c4b7ad | ||
![]() |
be6e39fed0 | ||
![]() |
b384921ee0 | ||
![]() |
0d05a6eae3 | ||
![]() |
430aef68c6 | ||
![]() |
eac6070e12 | ||
![]() |
6693b7c2e6 | ||
![]() |
7898c3e433 | ||
![]() |
420ecd064e | ||
![]() |
4289be53f8 | ||
![]() |
29b41b564e | ||
![]() |
998eb69583 | ||
![]() |
8ebc097ff4 | ||
![]() |
c05984ca49 | ||
![]() |
1a700c3013 | ||
![]() |
a9c92cdec8 | ||
![]() |
da8b938d5b | ||
![]() |
71e91328f1 | ||
![]() |
6356be4c52 | ||
![]() |
e26e5440b6 | ||
![]() |
fecfbd1a3e | ||
![]() |
c00d6dfc76 | ||
![]() |
85be66d90d | ||
![]() |
1ac506b391 | ||
![]() |
f7738b77de | ||
![]() |
824037bb7d | ||
![]() |
221292ad14 | ||
![]() |
16f8c75e9f | ||
![]() |
90a37079f1 | ||
![]() |
798092af5e | ||
![]() |
2a622a929d | ||
![]() |
ca8eeaa68c | ||
![]() |
d1b8ac1249 | ||
![]() |
3f629c4d60 | ||
![]() |
3fa910e68b | ||
![]() |
e3cf2989c9 | ||
![]() |
136b2f402d | ||
![]() |
8d18d2d9c6 | ||
![]() |
f18213361a | ||
![]() |
18d9d32bca | ||
![]() |
1246e429c9 | ||
![]() |
77bc46bc37 | ||
![]() |
ce16963c94 | ||
![]() |
a70e8cfe58 | ||
![]() |
ba922a1aaa | ||
![]() |
b09230a884 | ||
![]() |
f1cb9ca08e | ||
![]() |
06513e88c6 | ||
![]() |
b4a79bd068 | ||
![]() |
dfd8fe84e0 | ||
![]() |
4857c2e243 | ||
![]() |
7d384f6160 | ||
![]() |
672a7621f9 | ||
![]() |
f0e2fb3f57 | ||
![]() |
8c3a520512 | ||
![]() |
22e50d56db | ||
![]() |
a0735f3585 | ||
![]() |
50a2e8fde3 | ||
![]() |
55ed63cc79 | ||
![]() |
97e9dfff3f | ||
![]() |
501c9579fb | ||
![]() |
f9aedadee6 | ||
![]() |
c3c17b2bc3 | ||
![]() |
a894c4589e | ||
![]() |
56a8a1b5a1 | ||
![]() |
be3f7a6c37 | ||
![]() |
906e400ab7 | ||
![]() |
a9265afd4c | ||
![]() |
d26058ac80 | ||
![]() |
ebd1f30606 | ||
![]() |
c78e077649 | ||
![]() |
07619223b0 | ||
![]() |
25c326ec6c | ||
![]() |
df167b94c2 | ||
![]() |
3730908881 | ||
![]() |
975dc1bc11 | ||
![]() |
31409f0c32 | ||
![]() |
b19273227b | ||
![]() |
f89179fb03 | ||
![]() |
90c971f9f1 | ||
![]() |
d685780a4a | ||
![]() |
b6bc8b7b7c | ||
![]() |
92daba898f | ||
![]() |
138843591e | ||
![]() |
0814552b2a | ||
![]() |
0e0fadd72d | ||
![]() |
5426bd4392 | ||
![]() |
3520a65099 | ||
![]() |
b15a5c2c87 | ||
![]() |
a8af04ff82 | ||
![]() |
2148de45a0 | ||
![]() |
c4143dacee | ||
![]() |
a8025e77b3 | ||
![]() |
dd1e76be93 | ||
![]() |
36f997959a | ||
![]() |
c1faed163a | ||
![]() |
9ca927dbe7 | ||
![]() |
02c6011818 | ||
![]() |
2e96b16396 | ||
![]() |
53b8de6c1c | ||
![]() |
daea9f893c | ||
![]() |
d1b5b1734c | ||
![]() |
74a5899626 | ||
![]() |
202ebf6d4e | ||
![]() |
2c7b417e25 | ||
![]() |
bb5e138134 | ||
![]() |
3a2c3e2f84 | ||
![]() |
d5be0c34ac | ||
![]() |
ea5431ef2b | ||
![]() |
9c4cdcd11f | ||
![]() |
e5ef6333e4 | ||
![]() |
98779a48b1 | ||
![]() |
9d4848ee77 | ||
![]() |
5126820619 | ||
![]() |
8b5c808e8c | ||
![]() |
9c75996c40 | ||
![]() |
d524778e42 | ||
![]() |
52d4bc660e | ||
![]() |
8884696a6c | ||
![]() |
d493ccde28 | ||
![]() |
1ececaaaa2 | ||
![]() |
91b48ad432 | ||
![]() |
f3fe40a19f | ||
![]() |
cf4b29c425 | ||
![]() |
4344e14a9d | ||
![]() |
df935ec423 | ||
![]() |
e7f9f7504e | ||
![]() |
5721b2353a | ||
![]() |
c9de846d0e | ||
![]() |
a598108c26 | ||
![]() |
5467aa399d | ||
![]() |
da052b074a | ||
![]() |
90c035edd0 | ||
![]() |
fc4eb44a24 | ||
![]() |
a71111b378 | ||
![]() |
52e0c7e484 | ||
![]() |
e32970f191 | ||
![]() |
897cc36017 | ||
![]() |
d79c575860 | ||
![]() |
1f19f84edd | ||
![]() |
27c37b8b84 | ||
![]() |
06a5dd3153 | ||
![]() |
b5bf270d22 | ||
![]() |
8e71d69a64 | ||
![]() |
06edb6f8a8 | ||
![]() |
dca82ec0a1 | ||
![]() |
9c82ce4103 | ||
![]() |
8a23a9eb1b | ||
![]() |
e1b7e515df | ||
![]() |
c8ff335ed7 | ||
![]() |
5736da8ab7 | ||
![]() |
060bba4dce | ||
![]() |
4c573991d2 | ||
![]() |
7fd6dce55f | ||
![]() |
1861d756e9 | ||
![]() |
c36c041f5e | ||
![]() |
c3d877bdd2 | ||
![]() |
1242030d4a | ||
![]() |
1626e74608 | ||
![]() |
b1b913777f | ||
![]() |
190894010c | ||
![]() |
765265723c | ||
![]() |
7e20502379 | ||
![]() |
366fc30e9d | ||
![]() |
aa91788a69 | ||
![]() |
375789b019 | ||
![]() |
140b769a42 | ||
![]() |
88d718271d | ||
![]() |
6ed26cdd1f | ||
![]() |
d1851fa607 | ||
![]() |
e846157c52 | ||
![]() |
e190bb4c1a | ||
![]() |
137fbe7acd | ||
![]() |
9ccdb2ae3a | ||
![]() |
f5f7515744 | ||
![]() |
ddadbec7e3 | ||
![]() |
d24543e103 | ||
![]() |
f80c4c9565 | ||
![]() |
480b383782 | ||
![]() |
d3efd4c24b | ||
![]() |
67a0acffa2 | ||
![]() |
41b07da399 | ||
![]() |
a6ce55d5b5 | ||
![]() |
98c01fe1b3 | ||
![]() |
51df986222 | ||
![]() |
9c625f93a5 | ||
![]() |
7101d47e2e | ||
![]() |
eb85be2770 | ||
![]() |
2da27937a5 | ||
![]() |
2a29b801a4 | ||
![]() |
57e65714b0 | ||
![]() |
0ae40cb51c | ||
![]() |
ddd195dfc6 | ||
![]() |
54b9f23ec5 | ||
![]() |
242dd3e626 | ||
![]() |
1b8acb5b60 | ||
![]() |
a7ab96ab12 | ||
![]() |
06ab11cf87 | ||
![]() |
1410a1b06e | ||
![]() |
5baf19f7a3 | ||
![]() |
6c66a7ba17 | ||
![]() |
37b6e09475 | ||
![]() |
e08c8ca26d | ||
![]() |
2c09e7929f | ||
![]() |
3e760f0d85 | ||
![]() |
3cc6bd19ad | ||
![]() |
b7ddfba71d | ||
![]() |
32f21d208f | ||
![]() |
ed7edd9fe0 | ||
![]() |
fd3c995c7c | ||
![]() |
c0d1a2d53b | ||
![]() |
76bc3015a7 | ||
![]() |
ad2896243b | ||
![]() |
d0dcded42d | ||
![]() |
a0dfa01287 | ||
![]() |
4ec5c90180 | ||
![]() |
a0c813bfc1 | ||
![]() |
5f7b3a7087 | ||
![]() |
6426f02a2c | ||
![]() |
7fef92c480 | ||
![]() |
c64744dedf | ||
![]() |
72a2088931 | ||
![]() |
db54556b0f | ||
![]() |
a2653d8462 | ||
![]() |
ef778238f6 | ||
![]() |
4cc0ddc35d | ||
![]() |
a0429179a0 | ||
![]() |
5cfb45c668 | ||
![]() |
a53b7041f5 | ||
![]() |
f534fae293 | ||
![]() |
f7cbd968d2 | ||
![]() |
844d76290c | ||
![]() |
8c8122eee0 | ||
![]() |
d63f0d5e0b | ||
![]() |
96f4ba5d25 | ||
![]() |
72e64676da | ||
![]() |
883e54f989 | ||
![]() |
c2d4be3304 | ||
![]() |
de737ddb91 | ||
![]() |
11ec6dd9ac | ||
![]() |
df7541e397 | ||
![]() |
95ac53d780 | ||
![]() |
e8c4b32a65 | ||
![]() |
eca535c978 | ||
![]() |
9088810b49 | ||
![]() |
172a7053ed | ||
![]() |
3d5bd2adef | ||
![]() |
cb03d039f4 | ||
![]() |
bb31b1bc6e | ||
![]() |
727532858e | ||
![]() |
c0868d9dac | ||
![]() |
ce26e1dac6 | ||
![]() |
c74f87ca12 | ||
![]() |
043111b91c | ||
![]() |
5c579e557c | ||
![]() |
f8f51740c1 | ||
![]() |
176b63df52 | ||
![]() |
e1979357a5 | ||
![]() |
030527a4f2 | ||
![]() |
cca74da1f3 | ||
![]() |
928aff342f | ||
![]() |
60a97235df | ||
![]() |
c77779cf9d | ||
![]() |
9351796ba8 | ||
![]() |
bef0f023d4 | ||
![]() |
3116f183f5 | ||
![]() |
16b71a22d1 | ||
![]() |
5f4581042c | ||
![]() |
6976a4cf2e | ||
![]() |
68d86b3b7b | ||
![]() |
d7d34d36c8 | ||
![]() |
68da328cc5 | ||
![]() |
78870186d7 | ||
![]() |
d634273b48 | ||
![]() |
2d970eee02 | ||
![]() |
1f0ea3c6f7 | ||
![]() |
d736913f7f | ||
![]() |
3e95a9d282 | ||
![]() |
7cd7259992 | ||
![]() |
87385cf28e | ||
![]() |
3a00c94325 | ||
![]() |
38d5d2307f | ||
![]() |
a0c12e7228 | ||
![]() |
b6625ad909 | ||
![]() |
6f01341055 | ||
![]() |
6762a4153a | ||
![]() |
31200df89f | ||
![]() |
18e422ca77 | ||
![]() |
1b362716e3 | ||
![]() |
1e49129197 | ||
![]() |
a8f818fca5 | ||
![]() |
0f600da096 | ||
![]() |
b04efe4eac | ||
![]() |
7361d39231 | ||
![]() |
059c0df16c | ||
![]() |
6f6b849335 | ||
![]() |
a390500309 | ||
![]() |
7c576da32c | ||
![]() |
6d021c1659 | ||
![]() |
37c1c89d44 | ||
![]() |
010043f116 | ||
![]() |
b1010c3c61 | ||
![]() |
7f0204bfc3 | ||
![]() |
a508cc5efd | ||
![]() |
65c90696d5 | ||
![]() |
b9f47898d6 | ||
![]() |
26f554e46a | ||
![]() |
b57889c84f | ||
![]() |
77fd1b4017 | ||
![]() |
ab6745bc99 | ||
![]() |
a5ea3cae72 | ||
![]() |
8bcd1b4efd | ||
![]() |
a24657e565 | ||
![]() |
b7721420fa | ||
![]() |
6c564fe4fd | ||
![]() |
012bfd7e6c | ||
![]() |
a70f81aa01 | ||
![]() |
1376a38de5 | ||
![]() |
1827ecda65 | ||
![]() |
994c981228 | ||
![]() |
5bbfbf44ae | ||
![]() |
ace58ba735 | ||
![]() |
f9840306a0 | ||
![]() |
322b3bbb4e | ||
![]() |
501318f468 | ||
![]() |
0234f38b23 | ||
![]() |
8743e0072f | ||
![]() |
a79e06afa7 | ||
![]() |
682b8e0535 | ||
![]() |
d70aa5f9a9 | ||
![]() |
1c815dcad1 | ||
![]() |
afa467a32b | ||
![]() |
274218d48e | ||
![]() |
7e73df26ab | ||
![]() |
ef8fc80c95 | ||
![]() |
05c39144e3 | ||
![]() |
f5cd35af47 | ||
![]() |
c69ecdafd0 | ||
![]() |
fa90c247ec | ||
![]() |
0cd7bd47bb | ||
![]() |
36d48d19fc | ||
![]() |
9322b68d47 | ||
![]() |
e11ff64b15 | ||
![]() |
3776dabfcf | ||
![]() |
d4e5831f0f | ||
![]() |
7b3b478e88 | ||
![]() |
f5afe13e91 | ||
![]() |
49ce468d83 | ||
![]() |
b26551c812 | ||
![]() |
394ba580d2 | ||
![]() |
2f7a54f5fd | ||
![]() |
360e085926 | ||
![]() |
042921925d | ||
![]() |
dcf024387b | ||
![]() |
e1232bc9e7 | ||
![]() |
d96598b5dd | ||
![]() |
2605f85668 | ||
![]() |
2c8e6ca0cd | ||
![]() |
0225f574be | ||
![]() |
34090bf2eb | ||
![]() |
5ae585ce13 | ||
![]() |
2bb10a32d7 | ||
![]() |
435743dd2c | ||
![]() |
98589fba6d | ||
![]() |
32da679e02 | ||
![]() |
44daffc65b | ||
![]() |
0aafda1477 | ||
![]() |
60604e33b9 | ||
![]() |
98268b377a | ||
![]() |
de54979471 | ||
![]() |
ee6e339587 | ||
![]() |
c16cf89318 | ||
![]() |
c66cb7423e | ||
![]() |
f5bd95a519 | ||
![]() |
500f9ec1c1 | ||
![]() |
a4713d4a1e | ||
![]() |
04452dfb1a | ||
![]() |
69d09851d9 | ||
![]() |
1b649fe5cd | ||
![]() |
38572a5a86 | ||
![]() |
f5f51169e6 | ||
![]() |
07c2178ae1 | ||
![]() |
f30d21361f | ||
![]() |
6adb4fbcf7 | ||
![]() |
d73962bd7d | ||
![]() |
f4b43739da | ||
![]() |
4838b280ad | ||
![]() |
f93b753c03 | ||
![]() |
de06361cb0 | ||
![]() |
15ce48c8aa | ||
![]() |
38758d05a8 | ||
![]() |
a79fa14ee7 | ||
![]() |
1eb95b4d33 | ||
![]() |
d04e47f5b3 | ||
![]() |
dad5118f21 | ||
![]() |
acc0e5c989 | ||
![]() |
204fcdf479 | ||
![]() |
93ba8a3574 | ||
![]() |
f2f9e3b514 | ||
![]() |
61288559b3 | ||
![]() |
bd2c99a455 | ||
![]() |
1937348b24 | ||
![]() |
b7b2fae325 | ||
![]() |
11115923b2 | ||
![]() |
295133d2e9 | ||
![]() |
3018b851c8 | ||
![]() |
222c3fd485 | ||
![]() |
9650fd2ba1 | ||
![]() |
c88fd9a7d9 | ||
![]() |
1611beccd1 | ||
![]() |
71077fb0f7 | ||
![]() |
9647fba98f | ||
![]() |
86f004e45a | ||
![]() |
a98334ede8 | ||
![]() |
e19c2d6805 | ||
![]() |
847736dab8 | ||
![]() |
45f930ab21 | ||
![]() |
6ea54f1ddb | ||
![]() |
81ce0a60f6 | ||
![]() |
bf5d839c22 | ||
![]() |
fc385cfac0 | ||
![]() |
12d55b8411 | ||
![]() |
e60af93e2b | ||
![]() |
1691f0eac7 | ||
![]() |
be4a6a1564 | ||
![]() |
24c5613a50 | ||
![]() |
5266927bf7 | ||
![]() |
4bd2000174 | ||
![]() |
b8178414a4 | ||
![]() |
f9bc2f5993 | ||
![]() |
f1a72ee418 | ||
![]() |
b19dcef5b7 | ||
![]() |
1f92ab42ca | ||
![]() |
1f940a04fd | ||
![]() |
f771eaab5f | ||
![]() |
d1379a8154 | ||
![]() |
e488f02557 | ||
![]() |
f11cc86254 | ||
![]() |
175667bfe8 | ||
![]() |
0a0f14ddea | ||
![]() |
9e08677ade | ||
![]() |
abbf8b9b65 | ||
![]() |
96d5fc244e | ||
![]() |
3b38047fd4 | ||
![]() |
48e9e1c4f9 | ||
![]() |
355961a1eb | ||
![]() |
e68190b6b6 | ||
![]() |
e7cc7e971f | ||
![]() |
ee027eb510 | ||
![]() |
a584300bf3 | ||
![]() |
16e1f839d7 | ||
![]() |
c2123f0903 | ||
![]() |
9fbeb2a769 | ||
![]() |
3e0723ec24 | ||
![]() |
3e5f1d96b5 | ||
![]() |
be87082502 | ||
![]() |
f997e51249 | ||
![]() |
456316fdd4 | ||
![]() |
9a7d547394 | ||
![]() |
d3031e2eae | ||
![]() |
35bd66119a | ||
![]() |
9be3b47e0e | ||
![]() |
4bed8c1327 | ||
![]() |
254ec2d1af | ||
![]() |
e4ee3e4226 | ||
![]() |
65545e7218 | ||
![]() |
8b4e8e9804 | ||
![]() |
5d1ef34f17 | ||
![]() |
9504eff889 | ||
![]() |
d5828a6815 | ||
![]() |
488f246f75 | ||
![]() |
000d4ec78a | ||
![]() |
6c0415163b | ||
![]() |
0205cbb78b | ||
![]() |
72db559adc | ||
![]() |
a57c145870 | ||
![]() |
759fd1077a | ||
![]() |
fb90e6d07e | ||
![]() |
86d17acd83 | ||
![]() |
6eb8de02eb | ||
![]() |
f4df298cb3 | ||
![]() |
9800955646 | ||
![]() |
1706d14c9c | ||
![]() |
cf68d9fd19 | ||
![]() |
6f2f8e88a6 | ||
![]() |
c896b60410 | ||
![]() |
0200c72db1 | ||
![]() |
fe5705b35b | ||
![]() |
3c3846240d | ||
![]() |
b86a6d292f | ||
![]() |
1feda7d89f | ||
![]() |
73d795e05e | ||
![]() |
e449205863 | ||
![]() |
841f68c175 | ||
![]() |
0df19cee91 | ||
![]() |
d3f490bcc3 | ||
![]() |
0fda5f6c4b | ||
![]() |
e984797f3c | ||
![]() |
334bcf48fb | ||
![]() |
73f3627ebd | ||
![]() |
0adf2864b4 | ||
![]() |
f542c8e790 | ||
![]() |
a7c1693911 | ||
![]() |
bb497c0c9f | ||
![]() |
95eee712a3 | ||
![]() |
6aeac271fa | ||
![]() |
1204852893 | ||
![]() |
f6c3bdb6a8 | ||
![]() |
fbb2776277 | ||
![]() |
5ced4e2f3b | ||
![]() |
61a7e6a87d | ||
![]() |
88d25fc14e | ||
![]() |
b5233cd398 | ||
![]() |
109b8b47a0 | ||
![]() |
c5566f40ca | ||
![]() |
9dd5d89458 | ||
![]() |
c6f31ce73f | ||
![]() |
da9787bb58 | ||
![]() |
4254b80c0a | ||
![]() |
b4fd5b28f6 | ||
![]() |
6a95f97ec9 | ||
![]() |
fc171b674e | ||
![]() |
17f5ff1cb1 | ||
![]() |
b017fed329 | ||
![]() |
4c69c7206e | ||
![]() |
caf094815f | ||
![]() |
4043503940 | ||
![]() |
4cd80c4228 | ||
![]() |
7fd38da403 | ||
![]() |
7688e1b9cb | ||
![]() |
61202db8b2 | ||
![]() |
34c394c3d1 | ||
![]() |
ebe9c32092 | ||
![]() |
2108b218d8 | ||
![]() |
b85b5041b4 | ||
![]() |
7c29c56b9a | ||
![]() |
207ae8ae4f | ||
![]() |
c13531e9e3 | ||
![]() |
0373030cb2 | ||
![]() |
9635c70f2b | ||
![]() |
ff54c5268c | ||
![]() |
c7141caa12 | ||
![]() |
d0bf2aa817 | ||
![]() |
ed2f57f3ca | ||
![]() |
744cd4ea39 | ||
![]() |
b3ca08f2c2 | ||
![]() |
afbafe44f9 | ||
![]() |
a54e0a8401 | ||
![]() |
df336dd493 | ||
![]() |
778134f096 | ||
![]() |
dc4a753fe3 | ||
![]() |
f5b6feec77 | ||
![]() |
08c40dfe98 | ||
![]() |
98110a26d4 | ||
![]() |
610b0e9adc | ||
![]() |
be39275cd0 | ||
![]() |
0c7fc10147 | ||
![]() |
6dd9b573fd | ||
![]() |
2c2f1afc48 | ||
![]() |
8cf71ffa81 | ||
![]() |
1123101c87 | ||
![]() |
5adddc97e3 | ||
![]() |
d09f35f079 | ||
![]() |
9a3459434f | ||
![]() |
fce0d2aaed | ||
![]() |
842e550dda | ||
![]() |
c9ee76f1d3 | ||
![]() |
852771fbcf | ||
![]() |
de1f3555b1 | ||
![]() |
c0b75edfb7 | ||
![]() |
a3204f4ebd | ||
![]() |
84e4d70a37 | ||
![]() |
cede47e95c | ||
![]() |
75b3ebec7c | ||
![]() |
b707a468d2 | ||
![]() |
4e41255a57 | ||
![]() |
3ceec044a8 | ||
![]() |
3646ae070e | ||
![]() |
a6caccd845 | ||
![]() |
c6ddc8e427 | ||
![]() |
8bfd07d66b | ||
![]() |
d764f00580 | ||
![]() |
d9b86fa2ab | ||
![]() |
0ddce4d9bc | ||
![]() |
8386b5cb3a | ||
![]() |
8fc036874a | ||
![]() |
2a625defc0 | ||
![]() |
3f1e72d69f | ||
![]() |
42374a3a3f | ||
![]() |
2adebd9da6 | ||
![]() |
3b2c75fbd7 | ||
![]() |
19f6e12936 | ||
![]() |
abe59ab1e5 | ||
![]() |
79d8db6015 | ||
![]() |
1b317f5e92 | ||
![]() |
c262a39c11 | ||
![]() |
6ee86ee062 | ||
![]() |
b3a869429f | ||
![]() |
e4e9dee02c | ||
![]() |
2887934dbe | ||
![]() |
daeec266cc | ||
![]() |
3887fcfc93 | ||
![]() |
ab83c51910 | ||
![]() |
2ae2d0e107 |
@@ -1,37 +1,40 @@
|
||||
{
|
||||
"name": "Supervisor dev",
|
||||
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
|
||||
"containerEnv": {
|
||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
||||
},
|
||||
"appPort": ["9123:8123", "7357:4357"],
|
||||
"postCreateCommand": "bash devcontainer_bootstrap",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"mounts": ["type=volume,target=/var/lib/docker"],
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/usr/bin/zsh"
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.pylint",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"redhat.vscode-yaml",
|
||||
"esbenp.prettier-vscode",
|
||||
"GitHub.vscode-pull-request-github"
|
||||
],
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/usr/bin/zsh"
|
||||
}
|
||||
},
|
||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
}
|
||||
}
|
||||
},
|
||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py310"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||
}
|
||||
}
|
||||
},
|
||||
"mounts": ["type=volume,target=/var/lib/docker"]
|
||||
}
|
||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -52,7 +52,7 @@
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] I have followed the [development checklist][dev-checklist]
|
||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
||||
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
|
||||
If API endpoints of add-on configuration are added/changed:
|
||||
|
107
.github/workflows/builder.yml
vendored
107
.github/workflows/builder.yml
vendored
@@ -33,12 +33,12 @@ on:
|
||||
- setup.py
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.10"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
BUILD_NAME: supervisor
|
||||
BUILD_TYPE: supervisor
|
||||
|
||||
concurrency:
|
||||
group: '${{ github.workflow }}-${{ github.ref }}'
|
||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -70,25 +70,29 @@ jobs:
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
if: steps.version.outputs.publish == 'false'
|
||||
uses: jitterbit/get-changed-files@v1
|
||||
uses: masesgroup/retrieve-changed-files@v3.0.0
|
||||
|
||||
- name: Check if requirements files changed
|
||||
id: requirements
|
||||
run: |
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then
|
||||
echo "::set-output name=changed::true"
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.arch }} supervisor
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -102,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@2022.10.1
|
||||
uses: home-assistant/wheels@2024.01.0
|
||||
with:
|
||||
abi: cp310
|
||||
abi: cp312
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
apk: "libffi-dev;openssl-dev"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev"
|
||||
skip-binary: aiohttp
|
||||
env-file: true
|
||||
requirements: "requirements.txt"
|
||||
@@ -119,16 +123,33 @@ jobs:
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Login to DockerHub
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v2.1.0
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Install Cosign
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: sigstore/cosign-installer@v3.5.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Install dirhash and calc hash
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
pip3 install setuptools dirhash
|
||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||
echo "${dir_hash}" > rootfs/supervisor.sha256
|
||||
|
||||
- name: Sign supervisor SHA256
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v2.1.0
|
||||
uses: docker/login-action@v3.2.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -139,55 +160,17 @@ jobs:
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2022.11.0
|
||||
uses: home-assistant/builder@2024.03.5
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
--target /data \
|
||||
--cosign \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
env:
|
||||
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
||||
|
||||
codenotary:
|
||||
name: CAS signature
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v3.3.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/setup-python@v4.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Set version
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Install dirhash and calc hash
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
id: dirhash
|
||||
run: |
|
||||
pip3 install dirhash
|
||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||
echo "::set-output name=dirhash::${dir_hash}"
|
||||
|
||||
- name: Signing Source
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/codenotary@master
|
||||
with:
|
||||
source: hash://${{ steps.dirhash.outputs.dirhash }}
|
||||
asset: supervisor-${{ needs.init.outputs.version }}
|
||||
token: ${{ secrets.CAS_TOKEN }}
|
||||
|
||||
version:
|
||||
name: Update version
|
||||
needs: ["init", "run_supervisor"]
|
||||
@@ -195,7 +178,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
|
||||
- name: Initialize git
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
@@ -216,15 +199,15 @@ jobs:
|
||||
run_supervisor:
|
||||
runs-on: ubuntu-latest
|
||||
name: Run the Supervisor
|
||||
needs: ["build", "codenotary", "init"]
|
||||
needs: ["build", "init"]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
|
||||
- name: Build the Supervisor
|
||||
if: needs.init.outputs.publish != 'true'
|
||||
uses: home-assistant/builder@2022.11.0
|
||||
uses: home-assistant/builder@2024.03.5
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
@@ -236,7 +219,7 @@ jobs:
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
|
||||
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} homeassistant/amd64-hassio-supervisor:runner
|
||||
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
||||
|
||||
- name: Create the Supervisor
|
||||
run: |
|
||||
@@ -253,7 +236,7 @@ jobs:
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||
homeassistant/amd64-hassio-supervisor:runner
|
||||
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
||||
|
||||
- name: Start the Supervisor
|
||||
run: docker start hassio_supervisor
|
||||
@@ -341,7 +324,7 @@ jobs:
|
||||
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
|
||||
exit 1
|
||||
fi
|
||||
echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
|
||||
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Uninstall SSH add-on
|
||||
run: |
|
||||
|
257
.github/workflows/ci.yaml
vendored
257
.github/workflows/ci.yaml
vendored
@@ -8,12 +8,11 @@ on:
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.10"
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
DEFAULT_CAS: v1.0.2
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
|
||||
concurrency:
|
||||
group: '${{ github.workflow }}-${{ github.ref }}'
|
||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -26,15 +25,15 @@ jobs:
|
||||
name: Prepare Python dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -48,9 +47,10 @@ jobs:
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
@@ -61,21 +61,21 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-black:
|
||||
name: Check black
|
||||
lint-ruff-format:
|
||||
name: Check ruff-format
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -85,10 +85,67 @@ jobs:
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run black
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run ruff-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
black --target-version py38 --check supervisor tests setup.py
|
||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-dockerfile:
|
||||
name: Check Dockerfile
|
||||
@@ -96,7 +153,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -111,15 +168,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -131,9 +188,9 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
@@ -149,94 +206,21 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 supervisor tests
|
||||
|
||||
lint-isort:
|
||||
name: Check isort
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.2.5
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run isort
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -248,9 +232,9 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
@@ -272,15 +256,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -298,66 +282,25 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
lint-pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.2.5
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Install CAS tools
|
||||
uses: home-assistant/actions/helpers/cas@master
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.5.0
|
||||
with:
|
||||
version: ${{ env.DEFAULT_CAS }}
|
||||
cosign-release: "v2.2.3"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -370,7 +313,7 @@ jobs:
|
||||
- name: Install additional system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
@@ -392,7 +335,7 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v3.1.2
|
||||
uses: actions/upload-artifact@v4.3.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
@@ -403,15 +346,15 @@ jobs:
|
||||
needs: ["pytest", "prepare"]
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
uses: actions/setup-python@v5.1.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.2.5
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -422,7 +365,7 @@ jobs:
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4.1.7
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -430,4 +373,4 @@ jobs:
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3.1.1
|
||||
uses: codecov/codecov-action@v4.4.1
|
||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v4.0.0
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
30
.github/workflows/matchers/flake8.json
vendored
30
.github/workflows/matchers/flake8.json
vendored
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
6
.github/workflows/release-drafter.yml
vendored
6
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -33,10 +33,10 @@ jobs:
|
||||
|
||||
echo Current version: $latest
|
||||
echo New target version: $datepre.$newpost
|
||||
echo "::set-output name=version::$datepre.$newpost"
|
||||
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v5.22.0
|
||||
uses: release-drafter/release-drafter@v6.0.0
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }}
|
||||
|
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@@ -10,9 +10,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.2.1
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v7.0.0
|
||||
- uses: actions/stale@v9.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 30
|
||||
|
@@ -3,4 +3,5 @@ ignored:
|
||||
- DL3006
|
||||
- DL3013
|
||||
- DL3018
|
||||
- DL3042
|
||||
- SC2155
|
||||
|
@@ -1,34 +1,15 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.2.1
|
||||
hooks:
|
||||
- id: black
|
||||
- id: ruff
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py310
|
||||
- --fix
|
||||
- id: ruff-format
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.7.0
|
||||
- pydocstyle==6.3.0
|
||||
files: ^(supervisor|script|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.3.0
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py310-plus]
|
||||
|
7
.vscode/launch.json
vendored
7
.vscode/launch.json
vendored
@@ -13,6 +13,13 @@
|
||||
"remoteRoot": "/usr/src/supervisor"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Debug Tests",
|
||||
"type": "python",
|
||||
"request": "test",
|
||||
"console": "internalConsole",
|
||||
"justMyCode": false
|
||||
}
|
||||
]
|
||||
}
|
||||
|
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@@ -58,9 +58,23 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"label": "Ruff Check",
|
||||
"type": "shell",
|
||||
"command": "flake8 supervisor tests",
|
||||
"command": "ruff check --fix supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Ruff Format",
|
||||
"type": "shell",
|
||||
"command": "ruff format supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
|
27
Dockerfile
27
Dockerfile
@@ -3,16 +3,19 @@ FROM ${BUILD_FROM}
|
||||
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=10000 \
|
||||
SUPERVISOR_API=http://localhost
|
||||
SUPERVISOR_API=http://localhost \
|
||||
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
|
||||
|
||||
ARG \
|
||||
CAS_VERSION
|
||||
COSIGN_VERSION \
|
||||
BUILD_ARCH
|
||||
|
||||
# Install base
|
||||
WORKDIR /usr/src
|
||||
RUN \
|
||||
set -x \
|
||||
&& apk add --no-cache \
|
||||
findutils \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
@@ -20,33 +23,23 @@ RUN \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
go \
|
||||
yaml \
|
||||
\
|
||||
&& git clone -b "v${CAS_VERSION}" --depth 1 \
|
||||
https://github.com/codenotary/cas \
|
||||
&& cd cas \
|
||||
&& make cas \
|
||||
&& mv cas /usr/bin/cas \
|
||||
\
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /root/go /root/.cache \
|
||||
&& rm -rf /usr/src/cas
|
||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
||||
&& chmod a+x /usr/bin/cosign
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN \
|
||||
export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/musllinux/" \
|
||||
&& pip3 install --only-binary=:all: \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -e ./supervisor \
|
||||
pip3 install -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
|
18
build.yaml
18
build.yaml
@@ -1,16 +1,18 @@
|
||||
image: homeassistant/{arch}-hassio-supervisor
|
||||
shadow_repository: ghcr.io/home-assistant
|
||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.10-alpine3.16
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.10-alpine3.16
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.10-alpine3.16
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.10-alpine3.16
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.10-alpine3.16
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.19
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.19
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.19
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.19
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.19
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker-base/.*
|
||||
identity: https://github.com/home-assistant/supervisor/.*
|
||||
args:
|
||||
CAS_VERSION: 1.0.2
|
||||
COSIGN_VERSION: 2.2.3
|
||||
labels:
|
||||
io.hass.type: supervisor
|
||||
org.opencontainers.image.title: Home Assistant Supervisor
|
||||
|
Submodule home-assistant-polymer updated: 84affcce33...9d457d52e8
45
pylintrc
45
pylintrc
@@ -1,45 +0,0 @@
|
||||
[MASTER]
|
||||
reports=no
|
||||
jobs=2
|
||||
|
||||
good-names=id,i,j,k,ex,Run,_,fp,T,os
|
||||
|
||||
extension-pkg-whitelist=
|
||||
ciso8601
|
||||
|
||||
# Reasons disabled:
|
||||
# format - handled by black
|
||||
# locally-disabled - it spams too much
|
||||
# duplicate-code - unavoidable
|
||||
# cyclic-import - doesn't test if both import on load
|
||||
# abstract-class-not-used - is flaky, should not show up but does
|
||||
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
||||
# too-many-* - are not enforced for the sake of readability
|
||||
# too-few-* - same as too-many-*
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
disable=
|
||||
format,
|
||||
abstract-method,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
locally-disabled,
|
||||
no-else-return,
|
||||
not-context-manager,
|
||||
too-few-public-methods,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-instance-attributes,
|
||||
too-many-lines,
|
||||
too-many-locals,
|
||||
too-many-public-methods,
|
||||
too-many-return-statements,
|
||||
too-many-statements,
|
||||
unused-argument,
|
||||
consider-using-with
|
||||
|
||||
[EXCEPTIONS]
|
||||
overgeneral-exceptions=Exception
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
ignored-modules = distutils
|
371
pyproject.toml
Normal file
371
pyproject.toml
Normal file
@@ -0,0 +1,371 @@
|
||||
[build-system]
|
||||
requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "Supervisor"
|
||||
dynamic = ["version", "dependencies"]
|
||||
license = { text = "Apache-2.0" }
|
||||
description = "Open-source private cloud os for Home-Assistant based on HassOS"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
||||
]
|
||||
keywords = ["docker", "home-assistant", "api"]
|
||||
requires-python = ">=3.12.0"
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://www.home-assistant.io/"
|
||||
"Source Code" = "https://github.com/home-assistant/supervisor"
|
||||
"Bug Reports" = "https://github.com/home-assistant/supervisor/issues"
|
||||
"Docs: Dev" = "https://developers.home-assistant.io/"
|
||||
"Discord" = "https://www.home-assistant.io/join-chat/"
|
||||
"Forum" = "https://community.home-assistant.io/"
|
||||
|
||||
[tool.setuptools]
|
||||
platforms = ["any"]
|
||||
zip-safe = false
|
||||
include-package-data = true
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
include = ["supervisor*"]
|
||||
|
||||
[tool.pylint.MAIN]
|
||||
py-version = "3.11"
|
||||
# Use a conservative default here; 2 should speed up most setups and not hurt
|
||||
# any too bad. Override on command line as appropriate.
|
||||
jobs = 2
|
||||
persistent = false
|
||||
extension-pkg-allow-list = ["ciso8601"]
|
||||
|
||||
[tool.pylint.BASIC]
|
||||
class-const-naming-style = "any"
|
||||
good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
|
||||
|
||||
[tool.pylint."MESSAGES CONTROL"]
|
||||
# Reasons disabled:
|
||||
# format - handled by ruff
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
# cyclic-import - doesn't test if both import on load
|
||||
# duplicate-code - unavoidable
|
||||
# locally-disabled - it spams too much
|
||||
# too-many-* - are not enforced for the sake of readability
|
||||
# too-few-* - same as too-many-*
|
||||
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
||||
disable = [
|
||||
"format",
|
||||
"abstract-method",
|
||||
"cyclic-import",
|
||||
"duplicate-code",
|
||||
"locally-disabled",
|
||||
"no-else-return",
|
||||
"not-context-manager",
|
||||
"too-few-public-methods",
|
||||
"too-many-arguments",
|
||||
"too-many-branches",
|
||||
"too-many-instance-attributes",
|
||||
"too-many-lines",
|
||||
"too-many-locals",
|
||||
"too-many-public-methods",
|
||||
"too-many-return-statements",
|
||||
"too-many-statements",
|
||||
"unused-argument",
|
||||
"consider-using-with",
|
||||
|
||||
# Handled by ruff
|
||||
# Ref: <https://github.com/astral-sh/ruff/issues/970>
|
||||
"await-outside-async", # PLE1142
|
||||
"bad-str-strip-call", # PLE1310
|
||||
"bad-string-format-type", # PLE1307
|
||||
"bidirectional-unicode", # PLE2502
|
||||
"continue-in-finally", # PLE0116
|
||||
"duplicate-bases", # PLE0241
|
||||
"format-needs-mapping", # F502
|
||||
"function-redefined", # F811
|
||||
# Needed because ruff does not understand type of __all__ generated by a function
|
||||
# "invalid-all-format", # PLE0605
|
||||
"invalid-all-object", # PLE0604
|
||||
"invalid-character-backspace", # PLE2510
|
||||
"invalid-character-esc", # PLE2513
|
||||
"invalid-character-nul", # PLE2514
|
||||
"invalid-character-sub", # PLE2512
|
||||
"invalid-character-zero-width-space", # PLE2515
|
||||
"logging-too-few-args", # PLE1206
|
||||
"logging-too-many-args", # PLE1205
|
||||
"missing-format-string-key", # F524
|
||||
"mixed-format-string", # F506
|
||||
"no-method-argument", # N805
|
||||
"no-self-argument", # N805
|
||||
"nonexistent-operator", # B002
|
||||
"nonlocal-without-binding", # PLE0117
|
||||
"not-in-loop", # F701, F702
|
||||
"notimplemented-raised", # F901
|
||||
"return-in-init", # PLE0101
|
||||
"return-outside-function", # F706
|
||||
"syntax-error", # E999
|
||||
"too-few-format-args", # F524
|
||||
"too-many-format-args", # F522
|
||||
"too-many-star-expressions", # F622
|
||||
"truncated-format-string", # F501
|
||||
"undefined-all-variable", # F822
|
||||
"undefined-variable", # F821
|
||||
"used-prior-global-declaration", # PLE0118
|
||||
"yield-inside-async-function", # PLE1700
|
||||
"yield-outside-function", # F704
|
||||
"anomalous-backslash-in-string", # W605
|
||||
"assert-on-string-literal", # PLW0129
|
||||
"assert-on-tuple", # F631
|
||||
"bad-format-string", # W1302, F
|
||||
"bad-format-string-key", # W1300, F
|
||||
"bare-except", # E722
|
||||
"binary-op-exception", # PLW0711
|
||||
"cell-var-from-loop", # B023
|
||||
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
|
||||
"duplicate-except", # B014
|
||||
"duplicate-key", # F601
|
||||
"duplicate-string-formatting-argument", # F
|
||||
"duplicate-value", # F
|
||||
"eval-used", # PGH001
|
||||
"exec-used", # S102
|
||||
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
|
||||
"f-string-without-interpolation", # F541
|
||||
"forgotten-debug-statement", # T100
|
||||
"format-string-without-interpolation", # F
|
||||
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
||||
"global-variable-not-assigned", # PLW0602
|
||||
"implicit-str-concat", # ISC001
|
||||
"import-self", # PLW0406
|
||||
"inconsistent-quotes", # Q000
|
||||
"invalid-envvar-default", # PLW1508
|
||||
"keyword-arg-before-vararg", # B026
|
||||
"logging-format-interpolation", # G
|
||||
"logging-fstring-interpolation", # G
|
||||
"logging-not-lazy", # G
|
||||
"misplaced-future", # F404
|
||||
"named-expr-without-context", # PLW0131
|
||||
"nested-min-max", # PLW3301
|
||||
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
||||
"raise-missing-from", # TRY200
|
||||
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
||||
"try-except-raise", # TRY302
|
||||
"unused-argument", # ARG001, we don't use it
|
||||
"unused-format-string-argument", #F507
|
||||
"unused-format-string-key", # F504
|
||||
"unused-import", # F401
|
||||
"unused-variable", # F841
|
||||
"useless-else-on-loop", # PLW0120
|
||||
"wildcard-import", # F403
|
||||
"bad-classmethod-argument", # N804
|
||||
"consider-iterating-dictionary", # SIM118
|
||||
"empty-docstring", # D419
|
||||
"invalid-name", # N815
|
||||
"line-too-long", # E501, disabled globally
|
||||
"missing-class-docstring", # D101
|
||||
"missing-final-newline", # W292
|
||||
"missing-function-docstring", # D103
|
||||
"missing-module-docstring", # D100
|
||||
"multiple-imports", #E401
|
||||
"singleton-comparison", # E711, E712
|
||||
"subprocess-run-check", # PLW1510
|
||||
"superfluous-parens", # UP034
|
||||
"ungrouped-imports", # I001
|
||||
"unidiomatic-typecheck", # E721
|
||||
"unnecessary-direct-lambda-call", # PLC3002
|
||||
"unnecessary-lambda-assignment", # PLC3001
|
||||
"unneeded-not", # SIM208
|
||||
"useless-import-alias", # PLC0414
|
||||
"wrong-import-order", # I001
|
||||
"wrong-import-position", # E402
|
||||
"comparison-of-constants", # PLR0133
|
||||
"comparison-with-itself", # PLR0124
|
||||
# "consider-alternative-union-syntax", # UP007, typing extension
|
||||
"consider-merging-isinstance", # PLR1701
|
||||
# "consider-using-alias", # UP006, typing extension
|
||||
"consider-using-dict-comprehension", # C402
|
||||
"consider-using-generator", # C417
|
||||
"consider-using-get", # SIM401
|
||||
"consider-using-set-comprehension", # C401
|
||||
"consider-using-sys-exit", # PLR1722
|
||||
"consider-using-ternary", # SIM108
|
||||
"literal-comparison", # F632
|
||||
"property-with-parameters", # PLR0206
|
||||
"super-with-arguments", # UP008
|
||||
"too-many-branches", # PLR0912
|
||||
"too-many-return-statements", # PLR0911
|
||||
"too-many-statements", # PLR0915
|
||||
"trailing-comma-tuple", # COM818
|
||||
"unnecessary-comprehension", # C416
|
||||
"use-a-generator", # C417
|
||||
"use-dict-literal", # C406
|
||||
"use-list-literal", # C405
|
||||
"useless-object-inheritance", # UP004
|
||||
"useless-return", # PLR1711
|
||||
# "no-self-use", # PLR6301 # Optional plugin, not enabled
|
||||
]
|
||||
|
||||
[tool.pylint.REPORTS]
|
||||
score = false
|
||||
|
||||
[tool.pylint.TYPECHECK]
|
||||
ignored-modules = ["distutils"]
|
||||
|
||||
[tool.pylint.FORMAT]
|
||||
expected-line-ending-format = "LF"
|
||||
|
||||
[tool.pylint.EXCEPTIONS]
|
||||
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
norecursedirs = [".git"]
|
||||
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
||||
log_date_format = "%Y-%m-%d %H:%M:%S"
|
||||
asyncio_mode = "auto"
|
||||
filterwarnings = [
|
||||
"error",
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
|
||||
"ignore::pytest.PytestUnraisableExceptionWarning",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
select = [
|
||||
"B002", # Python does not support the unary prefix increment
|
||||
"B007", # Loop control variable {name} not used within loop body
|
||||
"B014", # Exception handler with duplicate exception
|
||||
"B023", # Function definition does not bind loop variable {name}
|
||||
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
|
||||
"C", # complexity
|
||||
"COM818", # Trailing comma on bare tuple prohibited
|
||||
"D", # docstrings
|
||||
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
|
||||
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes/autoflake
|
||||
"G", # flake8-logging-format
|
||||
"I", # isort
|
||||
"ICN001", # import concentions; {name} should be imported as {asname}
|
||||
"N804", # First argument of a class method should be named cls
|
||||
"N805", # First argument of a method should be named self
|
||||
"N815", # Variable {name} in class scope should not be mixedCase
|
||||
"PGH001", # No builtin eval() allowed
|
||||
"PGH004", # Use specific rule codes when using noqa
|
||||
"PLC0414", # Useless import alias. Import alias does not rename original package.
|
||||
"PLC", # pylint
|
||||
"PLE", # pylint
|
||||
"PLR", # pylint
|
||||
"PLW", # pylint
|
||||
"Q000", # Double quotes found but single quotes preferred
|
||||
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||
"S102", # Use of exec detected
|
||||
"S103", # bad-file-permissions
|
||||
"S108", # hardcoded-temp-file
|
||||
"S306", # suspicious-mktemp-usage
|
||||
"S307", # suspicious-eval-usage
|
||||
"S313", # suspicious-xmlc-element-tree-usage
|
||||
"S314", # suspicious-xml-element-tree-usage
|
||||
"S315", # suspicious-xml-expat-reader-usage
|
||||
"S316", # suspicious-xml-expat-builder-usage
|
||||
"S317", # suspicious-xml-sax-usage
|
||||
"S318", # suspicious-xml-mini-dom-usage
|
||||
"S319", # suspicious-xml-pull-dom-usage
|
||||
"S320", # suspicious-xmle-tree-usage
|
||||
"S601", # paramiko-call
|
||||
"S602", # subprocess-popen-with-shell-equals-true
|
||||
"S604", # call-with-shell-equals-true
|
||||
"S608", # hardcoded-sql-expression
|
||||
"S609", # unix-command-wildcard-injection
|
||||
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
|
||||
"SIM117", # Merge with-statements that use the same scope
|
||||
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
|
||||
"SIM201", # Use {left} != {right} instead of not {left} == {right}
|
||||
"SIM208", # Use {expr} instead of not (not {expr})
|
||||
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
|
||||
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
|
||||
"SIM401", # Use get from dict with default instead of an if block
|
||||
"T100", # Trace found: {name} used
|
||||
"T20", # flake8-print
|
||||
"TID251", # Banned imports
|
||||
"TRY004", # Prefer TypeError exception for invalid type
|
||||
"TRY200", # Use raise from to specify exception cause
|
||||
"TRY302", # Remove exception handler; error is immediately re-raised
|
||||
"UP", # pyupgrade
|
||||
"W", # pycodestyle
|
||||
]
|
||||
|
||||
ignore = [
|
||||
"D202", # No blank lines allowed after function docstring
|
||||
"D203", # 1 blank line required before class docstring
|
||||
"D213", # Multi-line docstring summary should start at the second line
|
||||
"D406", # Section name should end with a newline
|
||||
"D407", # Section name underlining
|
||||
"E501", # line too long
|
||||
"E731", # do not assign a lambda expression, use a def
|
||||
|
||||
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
|
||||
# be ignored anymore without warnings.
|
||||
# https://github.com/astral-sh/ruff/issues/7491
|
||||
# "PLC1901", # Lots of false positives
|
||||
|
||||
# False positives https://github.com/astral-sh/ruff/issues/5386
|
||||
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
|
||||
"PLR0911", # Too many return statements ({returns} > {max_returns})
|
||||
"PLR0912", # Too many branches ({branches} > {max_branches})
|
||||
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
||||
"PLR0915", # Too many statements ({statements} > {max_statements})
|
||||
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
||||
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||
"UP006", # keep type annotation style as is
|
||||
"UP007", # keep type annotation style as is
|
||||
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
||||
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
||||
|
||||
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||
"W191",
|
||||
"E111",
|
||||
"E114",
|
||||
"E117",
|
||||
"D206",
|
||||
"D300",
|
||||
"Q000",
|
||||
"Q001",
|
||||
"Q002",
|
||||
"Q003",
|
||||
"COM812",
|
||||
"COM819",
|
||||
"ISC001",
|
||||
"ISC002",
|
||||
|
||||
# Disabled because ruff does not understand type of __all__ generated by a function
|
||||
"PLE0605",
|
||||
]
|
||||
|
||||
[tool.ruff.flake8-import-conventions.extend-aliases]
|
||||
voluptuous = "vol"
|
||||
|
||||
[tool.ruff.flake8-pytest-style]
|
||||
fixture-parentheses = false
|
||||
|
||||
[tool.ruff.flake8-tidy-imports.banned-api]
|
||||
"pytz".msg = "use zoneinfo instead"
|
||||
|
||||
[tool.ruff.isort]
|
||||
force-sort-within-sections = true
|
||||
section-order = [
|
||||
"future",
|
||||
"standard-library",
|
||||
"third-party",
|
||||
"first-party",
|
||||
"local-folder",
|
||||
]
|
||||
forced-separate = ["tests"]
|
||||
known-first-party = ["supervisor", "tests"]
|
||||
combine-as-imports = true
|
||||
split-on-trailing-comma = false
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
|
||||
# DBus Service Mocks must use typing and names understood by dbus-fast
|
||||
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 25
|
@@ -1,2 +0,0 @@
|
||||
[pytest]
|
||||
asyncio_mode = auto
|
@@ -1,26 +1,30 @@
|
||||
aiodns==3.0.0
|
||||
aiohttp==3.8.4
|
||||
async_timeout==4.0.2
|
||||
aiodns==3.2.0
|
||||
aiohttp==3.9.5
|
||||
aiohttp-fast-url-dispatcher==0.3.0
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==22.2.0
|
||||
awesomeversion==22.9.0
|
||||
brotli==1.0.9
|
||||
cchardet==2.1.7
|
||||
ciso8601==2.3.0
|
||||
colorlog==6.7.0
|
||||
attrs==23.2.0
|
||||
awesomeversion==24.2.0
|
||||
brotli==1.1.0
|
||||
ciso8601==2.3.1
|
||||
colorlog==6.8.2
|
||||
cpe==1.2.1
|
||||
cryptography==39.0.1
|
||||
debugpy==1.6.6
|
||||
deepmerge==1.1.0
|
||||
dirhash==0.2.1
|
||||
docker==6.0.1
|
||||
gitpython==3.1.31
|
||||
jinja2==3.1.2
|
||||
pulsectl==22.3.2
|
||||
pyudev==0.24.0
|
||||
ruamel.yaml==0.17.21
|
||||
securetar==2022.2.0
|
||||
sentry-sdk==1.15.0
|
||||
voluptuous==0.13.1
|
||||
dbus-fast==1.84.1
|
||||
typing_extensions==4.3.0
|
||||
cryptography==42.0.8
|
||||
debugpy==1.8.1
|
||||
deepmerge==1.1.1
|
||||
dirhash==0.4.0
|
||||
docker==7.1.0
|
||||
faust-cchardet==2.1.19
|
||||
gitpython==3.1.43
|
||||
jinja2==3.1.4
|
||||
orjson==3.9.15
|
||||
pulsectl==24.4.0
|
||||
pyudev==0.24.3
|
||||
PyYAML==6.0.1
|
||||
requests==2.32.3
|
||||
securetar==2024.2.1
|
||||
sentry-sdk==2.5.1
|
||||
setuptools==70.0.0
|
||||
voluptuous==0.14.2
|
||||
dbus-fast==2.21.3
|
||||
typing_extensions==4.12.2
|
||||
zlib-fast==0.2.0
|
||||
|
@@ -1,17 +1,12 @@
|
||||
black==23.1.0
|
||||
codecov==2.1.12
|
||||
coverage==7.1.0
|
||||
flake8-docstrings==1.7.0
|
||||
flake8==6.0.0
|
||||
pre-commit==3.0.4
|
||||
pydocstyle==6.3.0
|
||||
pylint==2.15.10
|
||||
pytest-aiohttp==1.0.4
|
||||
pytest-asyncio==0.18.3
|
||||
pytest-cov==4.0.0
|
||||
pytest-timeout==2.1.0
|
||||
pytest==7.2.1
|
||||
pyupgrade==3.3.1
|
||||
time-machine==2.9.0
|
||||
typing_extensions==4.3.0
|
||||
urllib3==1.26.14
|
||||
coverage==7.5.3
|
||||
pre-commit==3.7.1
|
||||
pylint==3.2.3
|
||||
pytest-aiohttp==1.0.5
|
||||
pytest-asyncio==0.23.6
|
||||
pytest-cov==5.0.0
|
||||
pytest-timeout==2.3.1
|
||||
pytest==8.2.2
|
||||
ruff==0.4.8
|
||||
time-machine==2.14.1
|
||||
typing_extensions==4.12.2
|
||||
urllib3==2.2.1
|
||||
|
@@ -15,7 +15,7 @@ do
|
||||
if [[ "${supervisor_state}" = "running" ]]; then
|
||||
|
||||
# Check API
|
||||
if bashio::supervisor.ping; then
|
||||
if bashio::supervisor.ping > /dev/null; then
|
||||
failed_count=0
|
||||
else
|
||||
bashio::log.warning "Maybe found an issue on API healthy"
|
||||
|
@@ -1,4 +0,0 @@
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE03LvYuz79GTJx4uKp3w6NrSe5JZI
|
||||
iBtgzzYi0YQYtZO/r+xFpgDJEa0gLHkXtl94fpqrFiN89In83lzaszbZtA==
|
||||
-----END PUBLIC KEY-----
|
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"currentcontext": {
|
||||
"LcHost": "cas.codenotary.com",
|
||||
"LcPort": "443"
|
||||
},
|
||||
"schemaversion": 3,
|
||||
"users": null
|
||||
}
|
29
setup.cfg
29
setup.cfg
@@ -1,29 +0,0 @@
|
||||
[isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
line_length=88
|
||||
indent = " "
|
||||
force_sort_within_sections = true
|
||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
default_section = THIRDPARTY
|
||||
forced_separate = tests
|
||||
combine_as_imports = true
|
||||
use_parentheses = true
|
||||
known_first_party = supervisor,tests
|
||||
|
||||
[flake8]
|
||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
||||
doctests = True
|
||||
max-line-length = 88
|
||||
# E501: line too long
|
||||
# W503: Line break occurred before a binary operator
|
||||
# E203: Whitespace before ':'
|
||||
# D202 No blank lines allowed after function docstring
|
||||
# W504 line break after binary operator
|
||||
ignore =
|
||||
E501,
|
||||
W503,
|
||||
E203,
|
||||
D202,
|
||||
W504
|
75
setup.py
75
setup.py
@@ -1,60 +1,27 @@
|
||||
"""Home Assistant Supervisor setup."""
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
from supervisor.const import SUPERVISOR_VERSION
|
||||
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$")
|
||||
|
||||
SUPERVISOR_DIR = Path(__file__).parent
|
||||
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
|
||||
CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py"
|
||||
|
||||
REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8")
|
||||
CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
|
||||
|
||||
|
||||
def _get_supervisor_version():
|
||||
for line in CONSTANTS.split("/n"):
|
||||
if match := RE_SUPERVISOR_VERSION.match(line):
|
||||
return match.group(1)
|
||||
return "99.9.9dev"
|
||||
|
||||
|
||||
setup(
|
||||
name="Supervisor",
|
||||
version=SUPERVISOR_VERSION,
|
||||
license="BSD License",
|
||||
author="The Home Assistant Authors",
|
||||
author_email="hello@home-assistant.io",
|
||||
url="https://home-assistant.io/",
|
||||
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
|
||||
long_description=(
|
||||
"A maintainless private cloud operator system that"
|
||||
"setup a Home-Assistant instance. Based on HassOS"
|
||||
),
|
||||
classifiers=[
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Topic :: Home Automation",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Scientific/Engineering :: Atmospheric Science",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
],
|
||||
keywords=["docker", "home-assistant", "api"],
|
||||
zip_safe=False,
|
||||
platforms="any",
|
||||
packages=[
|
||||
"supervisor.addons",
|
||||
"supervisor.api",
|
||||
"supervisor.backups",
|
||||
"supervisor.dbus.network",
|
||||
"supervisor.dbus.network.setting",
|
||||
"supervisor.dbus",
|
||||
"supervisor.discovery.services",
|
||||
"supervisor.discovery",
|
||||
"supervisor.docker",
|
||||
"supervisor.homeassistant",
|
||||
"supervisor.host",
|
||||
"supervisor.jobs",
|
||||
"supervisor.misc",
|
||||
"supervisor.plugins",
|
||||
"supervisor.resolution.checks",
|
||||
"supervisor.resolution.evaluations",
|
||||
"supervisor.resolution.fixups",
|
||||
"supervisor.resolution",
|
||||
"supervisor.security",
|
||||
"supervisor.services.modules",
|
||||
"supervisor.services",
|
||||
"supervisor.store",
|
||||
"supervisor.utils",
|
||||
"supervisor",
|
||||
],
|
||||
include_package_data=True,
|
||||
version=_get_supervisor_version(),
|
||||
dependencies=REQUIREMENTS.split("/n"),
|
||||
)
|
||||
|
@@ -5,7 +5,15 @@ import logging
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from supervisor import bootstrap
|
||||
import zlib_fast
|
||||
|
||||
# Enable fast zlib before importing supervisor
|
||||
zlib_fast.enable()
|
||||
|
||||
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
|
||||
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
|
||||
activate_log_queue_handler,
|
||||
)
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -38,6 +46,8 @@ if __name__ == "__main__":
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
activate_log_queue_handler()
|
||||
|
||||
_LOGGER.info("Initializing Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.set_debug(coresys.config.debug)
|
||||
|
@@ -1,427 +1 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Union
|
||||
|
||||
from ..const import AddonBoot, AddonStartup, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerAPIError,
|
||||
DockerError,
|
||||
DockerNotFound,
|
||||
HomeAssistantAPIError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils import check_exception_chain
|
||||
from ..utils.sentry import capture_exception
|
||||
from .addon import Addon
|
||||
from .const import ADDON_UPDATE_CONDITIONS
|
||||
from .data import AddonsData
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
AnyAddon = Union[Addon, AddonStore]
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.data: AddonsData = AddonsData(coresys)
|
||||
self.local: dict[str, Addon] = {}
|
||||
self.store: dict[str, AddonStore] = {}
|
||||
|
||||
@property
|
||||
def all(self) -> list[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
def installed(self) -> list[Addon]:
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
Prio:
|
||||
1 - Local
|
||||
2 - Store
|
||||
"""
|
||||
if addon_slug in self.local:
|
||||
return self.local[addon_slug]
|
||||
if not local_only:
|
||||
return self.store.get(addon_slug)
|
||||
return None
|
||||
|
||||
def from_token(self, token: str) -> Addon | None:
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
if token == addon.supervisor_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
tasks = []
|
||||
for slug in self.data.system:
|
||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||
tasks.append(addon.load())
|
||||
|
||||
# Run initial tasks
|
||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# Sync DNS
|
||||
await self.sync_dns()
|
||||
|
||||
async def boot(self, stage: AddonStartup) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon)
|
||||
|
||||
# Evaluate add-ons which need to be started
|
||||
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Start Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
for addon in tasks:
|
||||
try:
|
||||
await addon.start()
|
||||
except AddonsError as err:
|
||||
# Check if there is an system/user issue
|
||||
if check_exception_chain(
|
||||
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
||||
):
|
||||
addon.boot = AddonBoot.MANUAL
|
||||
addon.save_persist()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
capture_exception(err)
|
||||
else:
|
||||
continue
|
||||
|
||||
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
||||
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage: AddonStartup) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.state != AddonState.STARTED or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon)
|
||||
|
||||
# Evaluate add-ons which need to be stopped
|
||||
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Stop Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
for addon in tasks:
|
||||
try:
|
||||
await addon.stop()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||
capture_exception(err)
|
||||
|
||||
@Job(
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
if slug in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
||||
store = self.store.get(slug)
|
||||
|
||||
if not store:
|
||||
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
||||
|
||||
store.validate_availability()
|
||||
|
||||
self.data.install(store)
|
||||
addon = Addon(self.coresys, slug)
|
||||
await addon.load()
|
||||
|
||||
if not addon.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", addon.path_data
|
||||
)
|
||||
addon.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
try:
|
||||
await addon.instance.install(store.version, store.image, arch=addon.arch)
|
||||
except DockerError as err:
|
||||
self.data.uninstall(addon)
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.local[slug] = addon
|
||||
|
||||
# Reload ingress tokens
|
||||
if addon.with_ingress:
|
||||
await self.sys_ingress.reload()
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
async def uninstall(self, slug: str) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||
return
|
||||
addon = self.local[slug]
|
||||
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
addon.state = AddonState.UNKNOWN
|
||||
|
||||
await addon.unload()
|
||||
|
||||
# Cleanup audio settings
|
||||
if addon.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
addon.path_pulse.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
await addon.uninstall_apparmor()
|
||||
|
||||
# Cleanup Ingress panel from sidebar
|
||||
if addon.ingress_panel:
|
||||
addon.ingress_panel = False
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
# Cleanup Ingress dynamic port assignment
|
||||
if addon.with_ingress:
|
||||
self.sys_create_task(self.sys_ingress.reload())
|
||||
self.sys_ingress.del_dynamic_port(slug)
|
||||
|
||||
# Cleanup discovery data
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != addon.slug:
|
||||
continue
|
||||
self.sys_discovery.remove(message)
|
||||
|
||||
# Cleanup services data
|
||||
for service in self.sys_services.list_services:
|
||||
if addon.slug not in service.active:
|
||||
continue
|
||||
service.del_service_data(addon)
|
||||
|
||||
self.data.uninstall(addon)
|
||||
self.local.pop(slug)
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||
|
||||
@Job(
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(self, slug: str, backup: bool | None = False) -> None:
|
||||
"""Update add-on."""
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
if addon.version == store.version:
|
||||
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
||||
|
||||
# Check if available, Maybe something have changed
|
||||
store.validate_availability()
|
||||
|
||||
if backup:
|
||||
await self.sys_backups.do_backup_partial(
|
||||
name=f"addon_{addon.slug}_{addon.version}",
|
||||
homeassistant=False,
|
||||
addons=[addon.slug],
|
||||
)
|
||||
|
||||
# Update instance
|
||||
last_state: AddonState = addon.state
|
||||
old_image = addon.image
|
||||
try:
|
||||
await addon.instance.update(store.version, store.image)
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||
self.data.update(store)
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerError):
|
||||
await addon.instance.cleanup(old_image=old_image)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == AddonState.STARTED:
|
||||
await addon.start()
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
raise AddonsError(
|
||||
"Version changed, use Update instead Rebuild", _LOGGER.error
|
||||
)
|
||||
if not addon.need_build:
|
||||
raise AddonsNotSupportedError(
|
||||
"Can't rebuild a image based add-on", _LOGGER.error
|
||||
)
|
||||
|
||||
# remove docker container but not addon config
|
||||
last_state: AddonState = addon.state
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
await addon.instance.install(addon.version)
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.data.update(store)
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
||||
|
||||
# restore state
|
||||
if last_state == AddonState.STARTED:
|
||||
await addon.start()
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
|
||||
await addon.restore(tar_file)
|
||||
|
||||
# Check if new
|
||||
if slug not in self.local:
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
|
||||
# Update ingress
|
||||
if addon.with_ingress:
|
||||
await self.sys_ingress.reload()
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
|
||||
async def repair(self) -> None:
|
||||
"""Repair local add-ons."""
|
||||
needs_repair: list[Addon] = []
|
||||
|
||||
# Evaluate Add-ons to repair
|
||||
for addon in self.installed:
|
||||
if await addon.instance.exists():
|
||||
continue
|
||||
needs_repair.append(addon)
|
||||
|
||||
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||
if not needs_repair:
|
||||
return
|
||||
|
||||
for addon in needs_repair:
|
||||
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
||||
with suppress(DockerError, KeyError):
|
||||
# Need pull a image again
|
||||
if not addon.need_build:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
# Need local lookup
|
||||
if addon.need_build and not addon.is_detached:
|
||||
store = self.store[addon.slug]
|
||||
# If this add-on is available for rebuild
|
||||
if addon.version == store.version:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
_LOGGER.error("Can't repair %s", addon.slug)
|
||||
with suppress(AddonsError):
|
||||
await self.uninstall(addon.slug)
|
||||
|
||||
async def sync_dns(self) -> None:
|
||||
"""Sync add-ons DNS names."""
|
||||
# Update hosts
|
||||
for addon in self.installed:
|
||||
try:
|
||||
if not await addon.instance.is_running():
|
||||
continue
|
||||
except DockerError as err:
|
||||
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_DOCKER,
|
||||
ContextType.ADDON,
|
||||
reference=addon.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
capture_exception(err)
|
||||
else:
|
||||
self.sys_plugins.dns.add_host(
|
||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||
)
|
||||
|
||||
# Write hosts files
|
||||
with suppress(CoreDNSError):
|
||||
self.sys_plugins.dns.write_hosts()
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -102,11 +102,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
except HassioArchNotFound:
|
||||
return False
|
||||
|
||||
def get_docker_args(self, version: AwesomeVersion):
|
||||
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{self.addon.image}:{version!s}",
|
||||
"tag": f"{image or self.addon.image}:{version!s}",
|
||||
"dockerfile": str(self.dockerfile),
|
||||
"pull": True,
|
||||
"forcerm": not self.sys_dev,
|
||||
|
11
supervisor/addons/configuration.py
Normal file
11
supervisor/addons/configuration.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Confgiuration Objects for Addon Config."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class FolderMapping:
|
||||
"""Represent folder mapping configuration."""
|
||||
|
||||
path: str | None
|
||||
read_only: bool
|
@@ -1,19 +1,37 @@
|
||||
"""Add-on static data."""
|
||||
from datetime import timedelta
|
||||
from enum import Enum
|
||||
from enum import StrEnum
|
||||
|
||||
from ..jobs.const import JobCondition
|
||||
|
||||
|
||||
class AddonBackupMode(str, Enum):
|
||||
class AddonBackupMode(StrEnum):
|
||||
"""Backup mode of an Add-on."""
|
||||
|
||||
HOT = "hot"
|
||||
COLD = "cold"
|
||||
|
||||
|
||||
class MappingType(StrEnum):
|
||||
"""Mapping type of an Add-on Folder."""
|
||||
|
||||
DATA = "data"
|
||||
CONFIG = "config"
|
||||
SSL = "ssl"
|
||||
ADDONS = "addons"
|
||||
BACKUP = "backup"
|
||||
SHARE = "share"
|
||||
MEDIA = "media"
|
||||
HOMEASSISTANT_CONFIG = "homeassistant_config"
|
||||
ALL_ADDON_CONFIGS = "all_addon_configs"
|
||||
ADDON_CONFIG = "addon_config"
|
||||
|
||||
|
||||
ATTR_BACKUP = "backup"
|
||||
ATTR_BREAKING_VERSIONS = "breaking_versions"
|
||||
ATTR_CODENOTARY = "codenotary"
|
||||
ATTR_READ_ONLY = "read_only"
|
||||
ATTR_PATH = "path"
|
||||
WATCHDOG_RETRY_SECONDS = 10
|
||||
WATCHDOG_MAX_ATTEMPTS = 5
|
||||
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
|
||||
@@ -26,3 +44,5 @@ ADDON_UPDATE_CONDITIONS = [
|
||||
JobCondition.PLUGINS_UPDATED,
|
||||
JobCondition.SUPERVISOR_UPDATED,
|
||||
]
|
||||
|
||||
RE_SLUG = r"[-_.A-Za-z0-9]+"
|
||||
|
379
supervisor/addons/manager.py
Normal file
379
supervisor/addons/manager.py
Normal file
@@ -0,0 +1,379 @@
|
||||
"""Supervisor add-on manager."""
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Union
|
||||
|
||||
from ..const import AddonBoot, AddonStartup, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerAPIError,
|
||||
DockerError,
|
||||
DockerNotFound,
|
||||
HassioError,
|
||||
HomeAssistantAPIError,
|
||||
)
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils import check_exception_chain
|
||||
from ..utils.sentry import capture_exception
|
||||
from .addon import Addon
|
||||
from .const import ADDON_UPDATE_CONDITIONS
|
||||
from .data import AddonsData
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
AnyAddon = Union[Addon, AddonStore]
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.data: AddonsData = AddonsData(coresys)
|
||||
self.local: dict[str, Addon] = {}
|
||||
self.store: dict[str, AddonStore] = {}
|
||||
|
||||
@property
|
||||
def all(self) -> list[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
def installed(self) -> list[Addon]:
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
Prio:
|
||||
1 - Local
|
||||
2 - Store
|
||||
"""
|
||||
if addon_slug in self.local:
|
||||
return self.local[addon_slug]
|
||||
if not local_only:
|
||||
return self.store.get(addon_slug)
|
||||
return None
|
||||
|
||||
def from_token(self, token: str) -> Addon | None:
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
if token == addon.supervisor_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
# Refresh cache for all store addons
|
||||
tasks: list[Awaitable[None]] = [
|
||||
store.refresh_path_cache() for store in self.store.values()
|
||||
]
|
||||
|
||||
# Load all installed addons
|
||||
for slug in self.data.system:
|
||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||
tasks.append(addon.load())
|
||||
|
||||
# Run initial tasks
|
||||
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
|
||||
if tasks:
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
# Sync DNS
|
||||
await self.sync_dns()
|
||||
|
||||
async def boot(self, stage: AddonStartup) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon)
|
||||
|
||||
# Evaluate add-ons which need to be started
|
||||
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Start Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
# Config.wait_boot is deprecated. Until addons update with healthchecks,
|
||||
# add a sleep task for it to keep the same minimum amount of wait time
|
||||
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
|
||||
for addon in tasks:
|
||||
try:
|
||||
if start_task := await addon.start():
|
||||
wait_boot.append(start_task)
|
||||
except AddonsError as err:
|
||||
# Check if there is an system/user issue
|
||||
if check_exception_chain(
|
||||
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
||||
):
|
||||
addon.boot = AddonBoot.MANUAL
|
||||
addon.save_persist()
|
||||
except HassioError:
|
||||
pass # These are already handled
|
||||
else:
|
||||
continue
|
||||
|
||||
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
||||
|
||||
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
||||
await asyncio.gather(*wait_boot, return_exceptions=True)
|
||||
|
||||
async def shutdown(self, stage: AddonStartup) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.state != AddonState.STARTED or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon)
|
||||
|
||||
# Evaluate add-ons which need to be stopped
|
||||
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Stop Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
for addon in tasks:
|
||||
try:
|
||||
await addon.stop()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||
capture_exception(err)
|
||||
|
||||
@Job(
|
||||
name="addon_manager_install",
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
if slug in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
||||
store = self.store.get(slug)
|
||||
|
||||
if not store:
|
||||
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
||||
|
||||
store.validate_availability()
|
||||
|
||||
await Addon(self.coresys, slug).install()
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||
return
|
||||
|
||||
await self.local[slug].uninstall(remove_config=remove_config)
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||
|
||||
@Job(
|
||||
name="addon_manager_update",
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(
|
||||
self, slug: str, backup: bool | None = False
|
||||
) -> asyncio.Task | None:
|
||||
"""Update add-on.
|
||||
|
||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||
if addon is started after update. Else nothing is returned.
|
||||
"""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
if addon.version == store.version:
|
||||
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
||||
|
||||
# Check if available, Maybe something have changed
|
||||
store.validate_availability()
|
||||
|
||||
if backup:
|
||||
await self.sys_backups.do_backup_partial(
|
||||
name=f"addon_{addon.slug}_{addon.version}",
|
||||
homeassistant=False,
|
||||
addons=[addon.slug],
|
||||
)
|
||||
|
||||
return await addon.update()
|
||||
|
||||
@Job(
|
||||
name="addon_manager_rebuild",
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str) -> asyncio.Task | None:
|
||||
"""Perform a rebuild of local build add-on.
|
||||
|
||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||
if addon is started after rebuild. Else nothing is returned.
|
||||
"""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
raise AddonsError(
|
||||
"Version changed, use Update instead Rebuild", _LOGGER.error
|
||||
)
|
||||
if not addon.need_build:
|
||||
raise AddonsNotSupportedError(
|
||||
"Can't rebuild a image based add-on", _LOGGER.error
|
||||
)
|
||||
|
||||
return await addon.rebuild()
|
||||
|
||||
@Job(
|
||||
name="addon_manager_restore",
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restore(
|
||||
self, slug: str, tar_file: tarfile.TarFile
|
||||
) -> asyncio.Task | None:
|
||||
"""Restore state of an add-on.
|
||||
|
||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||
if addon is started after restore. Else nothing is returned.
|
||||
"""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
had_ingress = False
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
had_ingress = addon.ingress_panel
|
||||
|
||||
wait_for_start = await addon.restore(tar_file)
|
||||
|
||||
# Check if new
|
||||
if slug not in self.local:
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
|
||||
# Update ingress
|
||||
if had_ingress != addon.ingress_panel:
|
||||
await self.sys_ingress.reload()
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
return wait_for_start
|
||||
|
||||
@Job(
|
||||
name="addon_manager_repair",
|
||||
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
|
||||
)
|
||||
async def repair(self) -> None:
|
||||
"""Repair local add-ons."""
|
||||
needs_repair: list[Addon] = []
|
||||
|
||||
# Evaluate Add-ons to repair
|
||||
for addon in self.installed:
|
||||
if await addon.instance.exists():
|
||||
continue
|
||||
needs_repair.append(addon)
|
||||
|
||||
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||
if not needs_repair:
|
||||
return
|
||||
|
||||
for addon in needs_repair:
|
||||
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
||||
with suppress(DockerError, KeyError):
|
||||
# Need pull a image again
|
||||
if not addon.need_build:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
# Need local lookup
|
||||
if addon.need_build and not addon.is_detached:
|
||||
store = self.store[addon.slug]
|
||||
# If this add-on is available for rebuild
|
||||
if addon.version == store.version:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
_LOGGER.error("Can't repair %s", addon.slug)
|
||||
with suppress(AddonsError):
|
||||
await self.uninstall(addon.slug)
|
||||
|
||||
async def sync_dns(self) -> None:
|
||||
"""Sync add-ons DNS names."""
|
||||
# Update hosts
|
||||
add_host_coros: list[Awaitable[None]] = []
|
||||
for addon in self.installed:
|
||||
try:
|
||||
if not await addon.instance.is_running():
|
||||
continue
|
||||
except DockerError as err:
|
||||
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_DOCKER,
|
||||
ContextType.ADDON,
|
||||
reference=addon.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
capture_exception(err)
|
||||
else:
|
||||
add_host_coros.append(
|
||||
self.sys_plugins.dns.add_host(
|
||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||
)
|
||||
)
|
||||
|
||||
await asyncio.gather(*add_host_coros)
|
||||
|
||||
# Write hosts files
|
||||
with suppress(CoreDNSError):
|
||||
await self.sys_plugins.dns.write_hosts()
|
@@ -1,13 +1,17 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import defaultdict
|
||||
from collections.abc import Awaitable, Callable
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||
|
||||
from supervisor.utils.dt import utc_from_timestamp
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
@@ -64,11 +68,13 @@ from ..const import (
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_TRANSLATIONS,
|
||||
ATTR_TYPE,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_TIMESTAMP,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_WEBUI,
|
||||
@@ -79,25 +85,43 @@ from ..const import (
|
||||
AddonStage,
|
||||
AddonStartup,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..coresys import CoreSys
|
||||
from ..docker.const import Capabilities
|
||||
from ..exceptions import AddonsNotSupportedError
|
||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
|
||||
from ..jobs.const import JOB_GROUP_ADDON
|
||||
from ..jobs.job_group import JobGroup
|
||||
from ..utils import version_is_new_enough
|
||||
from .configuration import FolderMapping
|
||||
from .const import (
|
||||
ATTR_BACKUP,
|
||||
ATTR_BREAKING_VERSIONS,
|
||||
ATTR_CODENOTARY,
|
||||
ATTR_PATH,
|
||||
ATTR_READ_ONLY,
|
||||
AddonBackupMode,
|
||||
MappingType,
|
||||
)
|
||||
from .options import AddonOptions, UiOptions
|
||||
from .validate import RE_SERVICE, RE_VOLUME
|
||||
from .validate import RE_SERVICE
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
Data = dict[str, Any]
|
||||
|
||||
|
||||
class AddonModel(CoreSysAttributes, ABC):
|
||||
class AddonModel(JobGroup, ABC):
|
||||
"""Add-on Data layout."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
super().__init__(
|
||||
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
||||
)
|
||||
self.slug: str = slug
|
||||
self._path_icon_exists: bool = False
|
||||
self._path_logo_exists: bool = False
|
||||
self._path_changelog_exists: bool = False
|
||||
self._path_documentation_exists: bool = False
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
@@ -206,6 +230,11 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Return latest version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def latest_version_timestamp(self) -> datetime:
|
||||
"""Return when latest version was first seen."""
|
||||
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
|
||||
|
||||
@property
|
||||
def version(self) -> AwesomeVersion:
|
||||
"""Return version of add-on."""
|
||||
@@ -486,22 +515,22 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
@property
|
||||
def with_icon(self) -> bool:
|
||||
"""Return True if an icon exists."""
|
||||
return self.path_icon.exists()
|
||||
return self._path_icon_exists
|
||||
|
||||
@property
|
||||
def with_logo(self) -> bool:
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
return self._path_logo_exists
|
||||
|
||||
@property
|
||||
def with_changelog(self) -> bool:
|
||||
"""Return True if a changelog exists."""
|
||||
return self.path_changelog.exists()
|
||||
return self._path_changelog_exists
|
||||
|
||||
@property
|
||||
def with_documentation(self) -> bool:
|
||||
"""Return True if a documentation exists."""
|
||||
return self.path_documentation.exists()
|
||||
return self._path_documentation_exists
|
||||
|
||||
@property
|
||||
def supported_arch(self) -> list[str]:
|
||||
@@ -532,14 +561,13 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return ATTR_IMAGE not in self.data
|
||||
|
||||
@property
|
||||
def map_volumes(self) -> dict[str, str]:
|
||||
"""Return a dict of {volume: policy} from add-on."""
|
||||
def map_volumes(self) -> dict[MappingType, FolderMapping]:
|
||||
"""Return a dict of {MappingType: FolderMapping} from add-on."""
|
||||
volumes = {}
|
||||
for volume in self.data[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
if not result:
|
||||
continue
|
||||
volumes[result.group(1)] = result.group(2) or "ro"
|
||||
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
|
||||
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY]
|
||||
)
|
||||
|
||||
return volumes
|
||||
|
||||
@@ -606,6 +634,22 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Return Signer email address for CAS."""
|
||||
return self.data.get(ATTR_CODENOTARY)
|
||||
|
||||
@property
|
||||
def breaking_versions(self) -> list[AwesomeVersion]:
|
||||
"""Return breaking versions of addon."""
|
||||
return self.data[ATTR_BREAKING_VERSIONS]
|
||||
|
||||
def refresh_path_cache(self) -> Awaitable[None]:
|
||||
"""Refresh cache of existing paths."""
|
||||
|
||||
def check_paths():
|
||||
self._path_icon_exists = self.path_icon.exists()
|
||||
self._path_logo_exists = self.path_logo.exists()
|
||||
self._path_changelog_exists = self.path_changelog.exists()
|
||||
self._path_documentation_exists = self.path_documentation.exists()
|
||||
|
||||
return self.sys_run_in_executor(check_paths)
|
||||
|
||||
def validate_availability(self) -> None:
|
||||
"""Validate if addon is available for current system."""
|
||||
return self._validate_availability(self.data, logger=_LOGGER.error)
|
||||
@@ -640,7 +684,9 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
# Home Assistant
|
||||
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
|
||||
with suppress(AwesomeVersionException, TypeError):
|
||||
if self.sys_homeassistant.version < version:
|
||||
if version and not version_is_new_enough(
|
||||
self.sys_homeassistant.version, version
|
||||
):
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
|
||||
logger,
|
||||
@@ -664,19 +710,3 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
|
||||
# local build
|
||||
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||
|
||||
def install(self) -> Awaitable[None]:
|
||||
"""Install this add-on."""
|
||||
return self.sys_addons.install(self.slug)
|
||||
|
||||
def uninstall(self) -> Awaitable[None]:
|
||||
"""Uninstall this add-on."""
|
||||
return self.sys_addons.uninstall(self.slug)
|
||||
|
||||
def update(self, backup: bool | None = False) -> Awaitable[None]:
|
||||
"""Update this add-on."""
|
||||
return self.sys_addons.update(self.slug, backup=backup)
|
||||
|
||||
def rebuild(self) -> Awaitable[None]:
|
||||
"""Rebuild this add-on."""
|
||||
return self.sys_addons.rebuild(self.slug)
|
||||
|
@@ -44,12 +44,15 @@ def rating_security(addon: AddonModel) -> int:
|
||||
any(
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
Capabilities.NET_ADMIN,
|
||||
Capabilities.SYS_ADMIN,
|
||||
Capabilities.SYS_RAWIO,
|
||||
Capabilities.SYS_PTRACE,
|
||||
Capabilities.SYS_MODULE,
|
||||
Capabilities.BPF,
|
||||
Capabilities.DAC_READ_SEARCH,
|
||||
Capabilities.NET_ADMIN,
|
||||
Capabilities.NET_RAW,
|
||||
Capabilities.PERFMON,
|
||||
Capabilities.SYS_ADMIN,
|
||||
Capabilities.SYS_MODULE,
|
||||
Capabilities.SYS_PTRACE,
|
||||
Capabilities.SYS_RAWIO,
|
||||
)
|
||||
)
|
||||
or addon.with_kernel_modules
|
||||
|
@@ -81,6 +81,7 @@ from ..const import (
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_TRANSLATIONS,
|
||||
ATTR_TYPE,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
@@ -98,7 +99,6 @@ from ..const import (
|
||||
AddonStartup,
|
||||
AddonState,
|
||||
)
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..docker.const import Capabilities
|
||||
from ..validate import (
|
||||
docker_image,
|
||||
@@ -109,12 +109,23 @@ from ..validate import (
|
||||
uuid_match,
|
||||
version_tag,
|
||||
)
|
||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
|
||||
from .const import (
|
||||
ATTR_BACKUP,
|
||||
ATTR_BREAKING_VERSIONS,
|
||||
ATTR_CODENOTARY,
|
||||
ATTR_PATH,
|
||||
ATTR_READ_ONLY,
|
||||
RE_SLUG,
|
||||
AddonBackupMode,
|
||||
MappingType,
|
||||
)
|
||||
from .options import RE_SCHEMA_ELEMENT
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
|
||||
RE_VOLUME = re.compile(
|
||||
r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$"
|
||||
)
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||
|
||||
|
||||
@@ -130,6 +141,7 @@ RE_MACHINE = re.compile(
|
||||
r"|generic-x86-64"
|
||||
r"|odroid-c2"
|
||||
r"|odroid-c4"
|
||||
r"|odroid-m1"
|
||||
r"|odroid-n2"
|
||||
r"|odroid-xu"
|
||||
r"|qemuarm-64"
|
||||
@@ -142,10 +154,15 @@ RE_MACHINE = re.compile(
|
||||
r"|raspberrypi3"
|
||||
r"|raspberrypi4-64"
|
||||
r"|raspberrypi4"
|
||||
r"|raspberrypi5-64"
|
||||
r"|yellow"
|
||||
r"|green"
|
||||
r"|tinker"
|
||||
r")$"
|
||||
)
|
||||
|
||||
RE_SLUG_FIELD = re.compile(r"^" + RE_SLUG + r"$")
|
||||
|
||||
|
||||
def _warn_addon_config(config: dict[str, Any]):
|
||||
"""Warn about miss configs."""
|
||||
@@ -193,9 +210,9 @@ def _migrate_addon_config(protocol=False):
|
||||
name,
|
||||
)
|
||||
if value == "before":
|
||||
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
|
||||
config[ATTR_STARTUP] = AddonStartup.SERVICES
|
||||
elif value == "after":
|
||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
|
||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION
|
||||
|
||||
# UART 2021-01-20
|
||||
if "auto_uart" in config:
|
||||
@@ -241,6 +258,48 @@ def _migrate_addon_config(protocol=False):
|
||||
name,
|
||||
)
|
||||
|
||||
# 2023-11 "map" entries can also be dict to allow path configuration
|
||||
volumes = []
|
||||
for entry in config.get(ATTR_MAP, []):
|
||||
if isinstance(entry, dict):
|
||||
volumes.append(entry)
|
||||
if isinstance(entry, str):
|
||||
result = RE_VOLUME.match(entry)
|
||||
if not result:
|
||||
continue
|
||||
volumes.append(
|
||||
{
|
||||
ATTR_TYPE: result.group(1),
|
||||
ATTR_READ_ONLY: result.group(2) != "rw",
|
||||
}
|
||||
)
|
||||
|
||||
if volumes:
|
||||
config[ATTR_MAP] = volumes
|
||||
|
||||
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
|
||||
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
|
||||
if any(
|
||||
volume
|
||||
and volume[ATTR_TYPE]
|
||||
in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG}
|
||||
for volume in volumes
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
|
||||
MappingType.ADDON_CONFIG,
|
||||
MappingType.HOMEASSISTANT_CONFIG,
|
||||
MappingType.CONFIG,
|
||||
name,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
|
||||
MappingType.CONFIG,
|
||||
MappingType.HOMEASSISTANT_CONFIG,
|
||||
name,
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
return _migrate
|
||||
@@ -251,7 +310,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): str,
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
vol.Required(ATTR_SLUG): str,
|
||||
vol.Required(ATTR_SLUG): vol.Match(RE_SLUG_FIELD),
|
||||
vol.Required(ATTR_DESCRIPTON): str,
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
|
||||
@@ -289,7 +348,15 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_DEVICES): [str],
|
||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_MAP, default=list): [
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TYPE): vol.Coerce(MappingType),
|
||||
vol.Optional(ATTR_READ_ONLY, default=True): bool,
|
||||
vol.Optional(ATTR_PATH): str,
|
||||
}
|
||||
)
|
||||
],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
@@ -310,7 +377,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Optional(ATTR_DISCOVERY): [str],
|
||||
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
|
||||
vol.Optional(ATTR_BACKUP_PRE): str,
|
||||
vol.Optional(ATTR_BACKUP_POST): str,
|
||||
@@ -341,6 +408,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||
),
|
||||
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
@@ -5,15 +5,18 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher
|
||||
|
||||
from ..const import AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import APIAddonNotInstalled
|
||||
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
||||
from ..utils.sentry import capture_exception
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
from .backups import APIBackups
|
||||
from .cli import APICli
|
||||
from .const import CONTENT_TYPE_TEXT
|
||||
from .discovery import APIDiscovery
|
||||
from .dns import APICoreDNS
|
||||
from .docker import APIDocker
|
||||
@@ -23,6 +26,7 @@ from .host import APIHost
|
||||
from .ingress import APIIngress
|
||||
from .jobs import APIJobs
|
||||
from .middleware.security import SecurityMiddleware
|
||||
from .mounts import APIMounts
|
||||
from .multicast import APIMulticast
|
||||
from .network import APINetwork
|
||||
from .observer import APIObserver
|
||||
@@ -34,7 +38,7 @@ from .security import APISecurity
|
||||
from .services import APIServices
|
||||
from .store import APIStore
|
||||
from .supervisor import APISupervisor
|
||||
from .utils import api_process
|
||||
from .utils import api_process, api_process_raw
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -56,19 +60,27 @@ class RestAPI(CoreSysAttributes):
|
||||
self.security.block_bad_requests,
|
||||
self.security.system_validation,
|
||||
self.security.token_validation,
|
||||
self.security.core_proxy,
|
||||
],
|
||||
handler_args={
|
||||
"max_line_size": MAX_LINE_SIZE,
|
||||
"max_field_size": MAX_LINE_SIZE,
|
||||
},
|
||||
)
|
||||
attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher())
|
||||
|
||||
# service stuff
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
|
||||
self._site: web.TCPSite | None = None
|
||||
|
||||
# share single host API handler for reuse in logging endpoints
|
||||
self._api_host: APIHost | None = None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
self._api_host = APIHost()
|
||||
self._api_host.coresys = self.coresys
|
||||
|
||||
self._register_addons()
|
||||
self._register_audio()
|
||||
self._register_auth()
|
||||
@@ -80,27 +92,59 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_hardware()
|
||||
self._register_homeassistant()
|
||||
self._register_host()
|
||||
self._register_root()
|
||||
self._register_jobs()
|
||||
self._register_ingress()
|
||||
self._register_mounts()
|
||||
self._register_multicast()
|
||||
self._register_network()
|
||||
self._register_observer()
|
||||
self._register_os()
|
||||
self._register_jobs()
|
||||
self._register_panel()
|
||||
self._register_proxy()
|
||||
self._register_resolution()
|
||||
self._register_services()
|
||||
self._register_supervisor()
|
||||
self._register_store()
|
||||
self._register_root()
|
||||
self._register_security()
|
||||
self._register_services()
|
||||
self._register_store()
|
||||
self._register_supervisor()
|
||||
|
||||
await self.start()
|
||||
|
||||
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get(
|
||||
f"{path}/logs",
|
||||
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||
),
|
||||
web.get(
|
||||
f"{path}/logs/follow",
|
||||
partial(
|
||||
self._api_host.advanced_logs,
|
||||
identifier=syslog_identifier,
|
||||
follow=True,
|
||||
),
|
||||
),
|
||||
web.get(
|
||||
f"{path}/logs/boots/{{bootid}}",
|
||||
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||
),
|
||||
web.get(
|
||||
f"{path}/logs/boots/{{bootid}}/follow",
|
||||
partial(
|
||||
self._api_host.advanced_logs,
|
||||
identifier=syslog_identifier,
|
||||
follow=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_host(self) -> None:
|
||||
"""Register hostcontrol functions."""
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
api_host = self._api_host
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
@@ -177,12 +221,16 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||
web.get("/os/datadisk/list", api_os.list_data),
|
||||
web.post("/os/datadisk/wipe", api_os.wipe_data),
|
||||
web.post("/os/boot-slot", api_os.set_boot_slot),
|
||||
]
|
||||
)
|
||||
|
||||
# Boards endpoints
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/os/boards/green", api_os.boards_green_info),
|
||||
web.post("/os/boards/green", api_os.boards_green_options),
|
||||
web.get("/os/boards/yellow", api_os.boards_yellow_info),
|
||||
web.post("/os/boards/yellow", api_os.boards_yellow_options),
|
||||
web.get("/os/boards/{board}", api_os.boards_other_info),
|
||||
@@ -212,6 +260,8 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/jobs/info", api_jobs.info),
|
||||
web.post("/jobs/options", api_jobs.options),
|
||||
web.post("/jobs/reset", api_jobs.reset),
|
||||
web.get("/jobs/{uuid}", api_jobs.job_info),
|
||||
web.delete("/jobs/{uuid}", api_jobs.remove_job),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -250,11 +300,11 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/multicast/info", api_multicast.info),
|
||||
web.get("/multicast/stats", api_multicast.stats),
|
||||
web.get("/multicast/logs", api_multicast.logs),
|
||||
web.post("/multicast/update", api_multicast.update),
|
||||
web.post("/multicast/restart", api_multicast.restart),
|
||||
]
|
||||
)
|
||||
self._register_advanced_logs("/multicast", "hassio_multicast")
|
||||
|
||||
def _register_hardware(self) -> None:
|
||||
"""Register hardware functions."""
|
||||
@@ -327,6 +377,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/auth", api_auth.auth),
|
||||
web.post("/auth/reset", api_auth.reset),
|
||||
web.delete("/auth/cache", api_auth.cache),
|
||||
web.get("/auth/list", api_auth.list_users),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -340,7 +391,6 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/supervisor/ping", api_supervisor.ping),
|
||||
web.get("/supervisor/info", api_supervisor.info),
|
||||
web.get("/supervisor/stats", api_supervisor.stats),
|
||||
web.get("/supervisor/logs", api_supervisor.logs),
|
||||
web.post("/supervisor/update", api_supervisor.update),
|
||||
web.post("/supervisor/reload", api_supervisor.reload),
|
||||
web.post("/supervisor/restart", api_supervisor.restart),
|
||||
@@ -349,6 +399,38 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
async def get_supervisor_logs(*args, **kwargs):
|
||||
try:
|
||||
return await self._api_host.advanced_logs_handler(
|
||||
*args, identifier="hassio_supervisor", **kwargs
|
||||
)
|
||||
except Exception as err: # pylint: disable=broad-exception-caught
|
||||
# Supervisor logs are critical, so catch everything, log the exception
|
||||
# and try to return Docker container logs as the fallback
|
||||
_LOGGER.exception(
|
||||
"Failed to get supervisor logs using advanced_logs API"
|
||||
)
|
||||
if not isinstance(err, HostNotSupportedError):
|
||||
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||
# is known and reported to the user using the resolution center.
|
||||
capture_exception(err)
|
||||
return await api_supervisor.logs(*args, **kwargs)
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/supervisor/logs", get_supervisor_logs),
|
||||
web.get(
|
||||
"/supervisor/logs/follow",
|
||||
partial(get_supervisor_logs, follow=True),
|
||||
),
|
||||
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
|
||||
web.get(
|
||||
"/supervisor/logs/boots/{bootid}/follow",
|
||||
partial(get_supervisor_logs, follow=True),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_homeassistant(self) -> None:
|
||||
"""Register Home Assistant functions."""
|
||||
api_hass = APIHomeAssistant()
|
||||
@@ -357,7 +439,6 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/core/info", api_hass.info),
|
||||
web.get("/core/logs", api_hass.logs),
|
||||
web.get("/core/stats", api_hass.stats),
|
||||
web.post("/core/options", api_hass.options),
|
||||
web.post("/core/update", api_hass.update),
|
||||
@@ -369,11 +450,12 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
self._register_advanced_logs("/core", "homeassistant")
|
||||
|
||||
# Reroute from legacy
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/homeassistant/info", api_hass.info),
|
||||
web.get("/homeassistant/logs", api_hass.logs),
|
||||
web.get("/homeassistant/stats", api_hass.stats),
|
||||
web.post("/homeassistant/options", api_hass.options),
|
||||
web.post("/homeassistant/restart", api_hass.restart),
|
||||
@@ -385,6 +467,8 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
self._register_advanced_logs("/homeassistant", "homeassistant")
|
||||
|
||||
def _register_proxy(self) -> None:
|
||||
"""Register Home Assistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
@@ -431,13 +515,33 @@ class RestAPI(CoreSysAttributes):
|
||||
),
|
||||
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||
web.post("/addons/{addon}/security", api_addons.security),
|
||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||
]
|
||||
)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||
async def get_addon_logs(request, *args, **kwargs):
|
||||
addon = api_addons.get_addon_for_request(request)
|
||||
kwargs["identifier"] = f"addon_{addon.slug}"
|
||||
return await self._api_host.advanced_logs(request, *args, **kwargs)
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons/{addon}/logs", get_addon_logs),
|
||||
web.get(
|
||||
"/addons/{addon}/logs/follow",
|
||||
partial(get_addon_logs, follow=True),
|
||||
),
|
||||
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
|
||||
web.get(
|
||||
"/addons/{addon}/logs/boots/{bootid}/follow",
|
||||
partial(get_addon_logs, follow=True),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
# Legacy routing to support requests for not installed addons
|
||||
api_store = APIStore()
|
||||
api_store.coresys = self.coresys
|
||||
@@ -482,6 +586,8 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/backups/info", api_backups.info),
|
||||
web.post("/backups/options", api_backups.options),
|
||||
web.post("/backups/reload", api_backups.reload),
|
||||
web.post("/backups/freeze", api_backups.freeze),
|
||||
web.post("/backups/thaw", api_backups.thaw),
|
||||
web.post("/backups/new/full", api_backups.backup_full),
|
||||
web.post("/backups/new/partial", api_backups.backup_partial),
|
||||
web.post("/backups/new/upload", api_backups.upload),
|
||||
@@ -533,7 +639,6 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/dns/info", api_dns.info),
|
||||
web.get("/dns/stats", api_dns.stats),
|
||||
web.get("/dns/logs", api_dns.logs),
|
||||
web.post("/dns/update", api_dns.update),
|
||||
web.post("/dns/options", api_dns.options),
|
||||
web.post("/dns/restart", api_dns.restart),
|
||||
@@ -541,18 +646,17 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
self._register_advanced_logs("/dns", "hassio_dns")
|
||||
|
||||
def _register_audio(self) -> None:
|
||||
"""Register Audio functions."""
|
||||
api_audio = APIAudio()
|
||||
api_audio.coresys = self.coresys
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/audio/info", api_audio.info),
|
||||
web.get("/audio/stats", api_audio.stats),
|
||||
web.get("/audio/logs", api_audio.logs),
|
||||
web.post("/audio/update", api_audio.update),
|
||||
web.post("/audio/restart", api_audio.restart),
|
||||
web.post("/audio/reload", api_audio.reload),
|
||||
@@ -565,6 +669,24 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
self._register_advanced_logs("/audio", "hassio_audio")
|
||||
|
||||
def _register_mounts(self) -> None:
|
||||
"""Register mounts endpoints."""
|
||||
api_mounts = APIMounts()
|
||||
api_mounts.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/mounts", api_mounts.info),
|
||||
web.post("/mounts/options", api_mounts.options),
|
||||
web.post("/mounts", api_mounts.create_mount),
|
||||
web.put("/mounts/{mount}", api_mounts.update_mount),
|
||||
web.delete("/mounts/{mount}", api_mounts.delete_mount),
|
||||
web.post("/mounts/{mount}/reload", api_mounts.reload_mount),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_store(self) -> None:
|
||||
"""Register store endpoints."""
|
||||
api_store = APIStore()
|
||||
@@ -575,7 +697,6 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/store", api_store.store_info),
|
||||
web.get("/store/addons", api_store.addons_list),
|
||||
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
||||
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
||||
web.get(
|
||||
@@ -597,6 +718,8 @@ class RestAPI(CoreSysAttributes):
|
||||
"/store/addons/{addon}/update/{version}",
|
||||
api_store.addons_addon_update,
|
||||
),
|
||||
# Must be below others since it has a wildcard in resource path
|
||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||
web.post("/store/reload", api_store.reload),
|
||||
web.get("/store/repositories", api_store.repositories_list),
|
||||
web.get(
|
||||
@@ -648,9 +771,7 @@ class RestAPI(CoreSysAttributes):
|
||||
async def start(self) -> None:
|
||||
"""Run RESTful API webserver."""
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
|
||||
)
|
||||
self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80)
|
||||
|
||||
try:
|
||||
await self._site.start()
|
||||
|
@@ -8,8 +8,8 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..addons import AnyAddon
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.manager import AnyAddon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
@@ -106,8 +106,8 @@ from ..exceptions import (
|
||||
PwnedSecret,
|
||||
)
|
||||
from ..validate import docker_ports
|
||||
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate, json_loads
|
||||
from .const import ATTR_REMOVE_CONFIG, ATTR_SIGNED
|
||||
from .utils import api_process, api_validate, json_loads
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -126,15 +126,19 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||
|
||||
SCHEMA_UNINSTALL = vol.Schema(
|
||||
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
|
||||
)
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
def get_addon_for_request(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception if it doesn't exist."""
|
||||
addon_slug: str = request.match_info.get("addon")
|
||||
|
||||
# Lookup itself
|
||||
@@ -187,7 +191,7 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
addon: AnyAddon = self.get_addon_for_request(request)
|
||||
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -268,7 +272,7 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Store user options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
addon = self.get_addon_for_request(request)
|
||||
|
||||
# Update secrets for validation
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
@@ -303,7 +307,7 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def options_validate(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
addon = self.get_addon_for_request(request)
|
||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||
|
||||
options = await request.json(loads=json_loads) or addon.options
|
||||
@@ -345,7 +349,7 @@ class APIAddons(CoreSysAttributes):
|
||||
slug: str = request.match_info.get("addon")
|
||||
if slug != "self":
|
||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||
addon = self._extract_addon(request)
|
||||
addon = self.get_addon_for_request(request)
|
||||
|
||||
# Lookup/reload secrets
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
@@ -357,7 +361,7 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def security(self, request: web.Request) -> None:
|
||||
"""Store security options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
addon = self.get_addon_for_request(request)
|
||||
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||
|
||||
if ATTR_PROTECTED in body:
|
||||
@@ -369,7 +373,7 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
addon = self._extract_addon(request)
|
||||
addon = self.get_addon_for_request(request)
|
||||
|
||||
stats: DockerStats = await addon.stats()
|
||||
|
||||
@@ -385,45 +389,47 @@ class APIAddons(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
async def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Uninstall add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
addon = self.get_addon_for_request(request)
|
||||
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
|
||||
return await asyncio.shield(
|
||||
self.sys_addons.uninstall(
|
||||
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
def start(self, request: web.Request) -> Awaitable[None]:
|
||||
async def start(self, request: web.Request) -> None:
|
||||
"""Start add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.start())
|
||||
addon = self.get_addon_for_request(request)
|
||||
if start_task := await asyncio.shield(addon.start()):
|
||||
await start_task
|
||||
|
||||
@api_process
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
addon = self.get_addon_for_request(request)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
async def restart(self, request: web.Request) -> None:
|
||||
"""Restart add-on."""
|
||||
addon: Addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart())
|
||||
addon: Addon = self.get_addon_for_request(request)
|
||||
if start_task := await asyncio.shield(addon.restart()):
|
||||
await start_task
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
async def rebuild(self, request: web.Request) -> None:
|
||||
"""Rebuild local build add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return logs from add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
addon = self.get_addon_for_request(request)
|
||||
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
|
||||
await start_task
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request: web.Request) -> None:
|
||||
"""Write to stdin of add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
addon = self.get_addon_for_request(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
||||
|
||||
|
@@ -1,11 +1,11 @@
|
||||
"""Init file for Supervisor Audio RESTful API."""
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from dataclasses import asdict
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
@@ -35,8 +35,7 @@ from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..host.sound import StreamType
|
||||
from ..validate import version_tag
|
||||
from .const import CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -76,15 +75,11 @@ class APIAudio(CoreSysAttributes):
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
|
||||
ATTR_HOST: str(self.sys_docker.network.audio),
|
||||
ATTR_AUDIO: {
|
||||
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
|
||||
ATTR_INPUT: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.inputs
|
||||
],
|
||||
ATTR_OUTPUT: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.outputs
|
||||
],
|
||||
ATTR_CARD: [asdict(card) for card in self.sys_host.sound.cards],
|
||||
ATTR_INPUT: [asdict(stream) for stream in self.sys_host.sound.inputs],
|
||||
ATTR_OUTPUT: [asdict(stream) for stream in self.sys_host.sound.outputs],
|
||||
ATTR_APPLICATION: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.applications
|
||||
asdict(stream) for stream in self.sys_host.sound.applications
|
||||
],
|
||||
},
|
||||
}
|
||||
@@ -115,11 +110,6 @@ class APIAudio(CoreSysAttributes):
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.audio.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Audio Docker logs."""
|
||||
return self.sys_plugins.audio.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Audio plugin."""
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import BasicAuth, web
|
||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||
@@ -8,10 +9,19 @@ from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
||||
from ..utils.json import json_loads
|
||||
from .const import (
|
||||
ATTR_GROUP_IDS,
|
||||
ATTR_IS_ACTIVE,
|
||||
ATTR_IS_OWNER,
|
||||
ATTR_LOCAL_ONLY,
|
||||
ATTR_USERS,
|
||||
CONTENT_TYPE_JSON,
|
||||
CONTENT_TYPE_URL,
|
||||
)
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -67,7 +77,7 @@ class APIAuth(CoreSysAttributes):
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
data = await request.json()
|
||||
data = await request.json(loads=json_loads)
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
# URL encoded
|
||||
@@ -89,3 +99,21 @@ class APIAuth(CoreSysAttributes):
|
||||
async def cache(self, request: web.Request) -> None:
|
||||
"""Process cache reset request."""
|
||||
self.sys_auth.reset_data()
|
||||
|
||||
@api_process
|
||||
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
||||
"""List users on the Home Assistant instance."""
|
||||
return {
|
||||
ATTR_USERS: [
|
||||
{
|
||||
ATTR_USERNAME: user[ATTR_USERNAME],
|
||||
ATTR_NAME: user[ATTR_NAME],
|
||||
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
|
||||
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
|
||||
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
|
||||
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
|
||||
}
|
||||
for user in await self.sys_auth.list_users()
|
||||
if user[ATTR_USERNAME]
|
||||
]
|
||||
}
|
||||
|
@@ -1,14 +1,18 @@
|
||||
"""Backups RESTful API."""
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import errno
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
import voluptuous as vol
|
||||
|
||||
from ..backups.backup import Backup
|
||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
@@ -19,6 +23,8 @@ from ..const import (
|
||||
ATTR_DAYS_UNTIL_STALE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
||||
ATTR_LOCATON,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PROTECTED,
|
||||
@@ -26,12 +32,18 @@ from ..const import (
|
||||
ATTR_SIZE,
|
||||
ATTR_SLUG,
|
||||
ATTR_SUPERVISOR_VERSION,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
BusEvent,
|
||||
CoreState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from .const import CONTENT_TYPE_TAR
|
||||
from ..jobs import JobSchedulerOptions
|
||||
from ..mounts.const import MountUsage
|
||||
from ..resolution.const import UnhealthyReason
|
||||
from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -43,22 +55,29 @@ RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
||||
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema(
|
||||
SCHEMA_RESTORE_FULL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
|
||||
|
||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_NAME): str,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
||||
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -76,6 +95,12 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_FREEZE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIBackups(CoreSysAttributes):
|
||||
"""Handle RESTful API for backups functions."""
|
||||
@@ -96,6 +121,7 @@ class APIBackups(CoreSysAttributes):
|
||||
ATTR_DATE: backup.date,
|
||||
ATTR_TYPE: backup.sys_type,
|
||||
ATTR_SIZE: backup.size,
|
||||
ATTR_LOCATON: backup.location,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_COMPRESSED: backup.compressed,
|
||||
ATTR_CONTENT: {
|
||||
@@ -137,7 +163,7 @@ class APIBackups(CoreSysAttributes):
|
||||
self.sys_backups.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
async def reload(self, _):
|
||||
"""Reload backup list."""
|
||||
await asyncio.shield(self.sys_backups.reload())
|
||||
return True
|
||||
@@ -168,46 +194,140 @@ class APIBackups(CoreSysAttributes):
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
||||
ATTR_LOCATON: backup.location,
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: backup.repositories,
|
||||
ATTR_FOLDERS: backup.folders,
|
||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
|
||||
}
|
||||
|
||||
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Change location field to mount if necessary."""
|
||||
if not body.get(ATTR_LOCATON):
|
||||
return body
|
||||
|
||||
body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
|
||||
if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
|
||||
raise APIError(
|
||||
f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
|
||||
)
|
||||
|
||||
return body
|
||||
|
||||
async def _background_backup_task(
|
||||
self, backup_method: Callable, *args, **kwargs
|
||||
) -> tuple[asyncio.Task, str]:
|
||||
"""Start backup task in background and return task and job ID."""
|
||||
event = asyncio.Event()
|
||||
job, backup_task = self.sys_jobs.schedule_job(
|
||||
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||
)
|
||||
|
||||
async def release_on_freeze(new_state: CoreState):
|
||||
if new_state == CoreState.FREEZE:
|
||||
event.set()
|
||||
|
||||
# Wait for system to get into freeze state before returning
|
||||
# If the backup fails validation it will raise before getting there
|
||||
listener = self.sys_bus.register_event(
|
||||
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
||||
)
|
||||
try:
|
||||
await asyncio.wait(
|
||||
(
|
||||
backup_task,
|
||||
self.sys_create_task(event.wait()),
|
||||
),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
return (backup_task, job.uuid)
|
||||
finally:
|
||||
self.sys_bus.remove_listener(listener)
|
||||
|
||||
@api_process
|
||||
async def backup_full(self, request):
|
||||
"""Create full backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||
backup = await asyncio.shield(self.sys_backups.do_backup_full(**body))
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
backup_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_backup_full, **self._location_to_mount(body)
|
||||
)
|
||||
|
||||
if background and not backup_task.done():
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
|
||||
backup: Backup = await backup_task
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
return False
|
||||
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||
raise APIError(
|
||||
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||
job_id=job_id,
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def backup_partial(self, request):
|
||||
"""Create a partial backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||
backup = await asyncio.shield(self.sys_backups.do_backup_partial(**body))
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
backup_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_backup_partial, **self._location_to_mount(body)
|
||||
)
|
||||
|
||||
if background and not backup_task.done():
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
|
||||
backup: Backup = await backup_task
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
return False
|
||||
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||
raise APIError(
|
||||
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||
job_id=job_id,
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def restore_full(self, request):
|
||||
"""Full restore of a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
restore_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_restore_full, backup, **body
|
||||
)
|
||||
|
||||
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
|
||||
if background and not restore_task.done() or await restore_task:
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
raise APIError(
|
||||
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||
job_id=job_id,
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial restore a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
restore_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_restore_partial, backup, **body
|
||||
)
|
||||
|
||||
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
||||
if background and not restore_task.done() or await restore_task:
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
raise APIError(
|
||||
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||
job_id=job_id,
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def freeze(self, request):
|
||||
"""Initiate manual freeze for external backup."""
|
||||
body = await api_validate(SCHEMA_FREEZE, request)
|
||||
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
||||
|
||||
@api_process
|
||||
async def thaw(self, request):
|
||||
"""Begin thaw after manual freeze."""
|
||||
await self.sys_backups.thaw_all()
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
@@ -243,6 +363,8 @@ class APIBackups(CoreSysAttributes):
|
||||
backup.write(chunk)
|
||||
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
|
@@ -1,11 +1,14 @@
|
||||
"""Const for API."""
|
||||
|
||||
from enum import StrEnum
|
||||
|
||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
||||
CONTENT_TYPE_JSON = "application/json"
|
||||
CONTENT_TYPE_PNG = "image/png"
|
||||
CONTENT_TYPE_TAR = "application/tar"
|
||||
CONTENT_TYPE_TEXT = "text/plain"
|
||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||
CONTENT_TYPE_X_LOG = "text/x-log"
|
||||
|
||||
COOKIE_INGRESS = "ingress_session"
|
||||
|
||||
@@ -13,6 +16,9 @@ ATTR_AGENT_VERSION = "agent_version"
|
||||
ATTR_APPARMOR_VERSION = "apparmor_version"
|
||||
ATTR_ATTRIBUTES = "attributes"
|
||||
ATTR_AVAILABLE_UPDATES = "available_updates"
|
||||
ATTR_BACKGROUND = "background"
|
||||
ATTR_BOOT_SLOT = "boot_slot"
|
||||
ATTR_BOOT_SLOTS = "boot_slots"
|
||||
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
||||
ATTR_BOOTS = "boots"
|
||||
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
||||
@@ -23,30 +29,49 @@ ATTR_CONNECTION_BUS = "connection_bus"
|
||||
ATTR_DATA_DISK = "data_disk"
|
||||
ATTR_DEVICE = "device"
|
||||
ATTR_DEV_PATH = "dev_path"
|
||||
ATTR_DISK_LED = "disk_led"
|
||||
ATTR_DISKS = "disks"
|
||||
ATTR_DRIVES = "drives"
|
||||
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
||||
ATTR_DT_UTC = "dt_utc"
|
||||
ATTR_EJECTABLE = "ejectable"
|
||||
ATTR_FALLBACK = "fallback"
|
||||
ATTR_FILESYSTEMS = "filesystems"
|
||||
ATTR_HEARTBEAT_LED = "heartbeat_led"
|
||||
ATTR_GROUP_IDS = "group_ids"
|
||||
ATTR_IDENTIFIERS = "identifiers"
|
||||
ATTR_IS_ACTIVE = "is_active"
|
||||
ATTR_IS_OWNER = "is_owner"
|
||||
ATTR_JOB_ID = "job_id"
|
||||
ATTR_JOBS = "jobs"
|
||||
ATTR_LLMNR = "llmnr"
|
||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||
ATTR_LOCAL_ONLY = "local_only"
|
||||
ATTR_MDNS = "mdns"
|
||||
ATTR_MODEL = "model"
|
||||
ATTR_MOUNTS = "mounts"
|
||||
ATTR_MOUNT_POINTS = "mount_points"
|
||||
ATTR_PANEL_PATH = "panel_path"
|
||||
ATTR_POWER_LED = "power_led"
|
||||
ATTR_REMOVABLE = "removable"
|
||||
ATTR_REMOVE_CONFIG = "remove_config"
|
||||
ATTR_REVISION = "revision"
|
||||
ATTR_SAFE_MODE = "safe_mode"
|
||||
ATTR_SEAT = "seat"
|
||||
ATTR_SIGNED = "signed"
|
||||
ATTR_STARTUP_TIME = "startup_time"
|
||||
ATTR_STATUS = "status"
|
||||
ATTR_SUBSYSTEM = "subsystem"
|
||||
ATTR_SYSFS = "sysfs"
|
||||
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
||||
ATTR_TIME_DETECTED = "time_detected"
|
||||
ATTR_UPDATE_TYPE = "update_type"
|
||||
ATTR_USAGE = "usage"
|
||||
ATTR_USE_NTP = "use_ntp"
|
||||
ATTR_USERS = "users"
|
||||
ATTR_VENDOR = "vendor"
|
||||
ATTR_VIRTUALIZATION = "virtualization"
|
||||
|
||||
|
||||
class BootSlot(StrEnum):
|
||||
"""Boot slots used by HAOS."""
|
||||
|
||||
A = "A"
|
||||
B = "B"
|
||||
|
@@ -1,6 +1,9 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_ADDON,
|
||||
ATTR_CONFIG,
|
||||
@@ -9,16 +12,18 @@ from ..const import (
|
||||
ATTR_SERVICES,
|
||||
ATTR_UUID,
|
||||
REQUEST_FROM,
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||
vol.Required(ATTR_SERVICE): str,
|
||||
vol.Required(ATTR_CONFIG): dict,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -36,19 +41,19 @@ class APIDiscovery(CoreSysAttributes):
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
|
||||
"""Show registered and available services."""
|
||||
# Get available discovery
|
||||
discovery = []
|
||||
for message in self.sys_discovery.list_messages:
|
||||
discovery.append(
|
||||
{
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
)
|
||||
discovery = [
|
||||
{
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
for message in self.sys_discovery.list_messages
|
||||
if (addon := self.sys_addons.get(message.addon, local_only=True))
|
||||
and addon.state == AddonState.STARTED
|
||||
]
|
||||
|
||||
# Get available services/add-ons
|
||||
services = {}
|
||||
@@ -62,11 +67,19 @@ class APIDiscovery(CoreSysAttributes):
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
addon = request[REQUEST_FROM]
|
||||
addon: Addon = request[REQUEST_FROM]
|
||||
service = body[ATTR_SERVICE]
|
||||
|
||||
# Access?
|
||||
if body[ATTR_SERVICE] not in addon.discovery:
|
||||
raise APIForbidden("Can't use discovery!")
|
||||
_LOGGER.error(
|
||||
"Add-on %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the add-on",
|
||||
addon.name,
|
||||
service,
|
||||
)
|
||||
raise APIForbidden(
|
||||
"Add-ons must list services they provide via discovery in their config!"
|
||||
)
|
||||
|
||||
# Process discovery message
|
||||
message = self.sys_discovery.send(addon, **body)
|
||||
|
@@ -26,8 +26,8 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import dns_server_list, version_tag
|
||||
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -105,11 +105,6 @@ class APICoreDNS(CoreSysAttributes):
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.dns.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return DNS Docker logs."""
|
||||
return self.sys_plugins.dns.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart CoreDNS plugin."""
|
||||
|
@@ -16,7 +16,7 @@ from ..const import (
|
||||
ATTR_SYSTEM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..dbus.udisks2 import UDisks2
|
||||
from ..dbus.udisks2 import UDisks2Manager
|
||||
from ..dbus.udisks2.block import UDisks2Block
|
||||
from ..dbus.udisks2.drive import UDisks2Drive
|
||||
from ..hardware.data import Device
|
||||
@@ -72,7 +72,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
|
||||
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
|
||||
"""Return a dict with information of a disk to be used in the API."""
|
||||
return {
|
||||
ATTR_VENDOR: drive.vendor,
|
||||
|
@@ -12,6 +12,7 @@ from ..const import (
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_BACKUP,
|
||||
ATTR_BACKUPS_EXCLUDE_DATABASE,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
@@ -35,8 +36,8 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import docker_image, network_port, version_tag
|
||||
from .const import CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from .const import ATTR_SAFE_MODE
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -51,6 +52,7 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -61,6 +63,12 @@ SCHEMA_UPDATE = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_RESTART = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle RESTful API for Home Assistant functions."""
|
||||
@@ -82,6 +90,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
||||
ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -91,6 +100,9 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.override_image = (
|
||||
self.sys_homeassistant.image != self.sys_homeassistant.default_image
|
||||
)
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
@@ -113,6 +125,11 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
if ATTR_BACKUPS_EXCLUDE_DATABASE in body:
|
||||
self.sys_homeassistant.backups_exclude_database = body[
|
||||
ATTR_BACKUPS_EXCLUDE_DATABASE
|
||||
]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
@@ -156,19 +173,22 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
return asyncio.shield(self.sys_homeassistant.core.start())
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
async def restart(self, request: web.Request) -> None:
|
||||
"""Restart Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.core.restart())
|
||||
body = await api_validate(SCHEMA_RESTART, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
|
||||
)
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
async def rebuild(self, request: web.Request) -> None:
|
||||
"""Rebuild Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
||||
body = await api_validate(SCHEMA_RESTART, request)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Home Assistant Docker logs."""
|
||||
return self.sys_homeassistant.core.logs()
|
||||
await asyncio.shield(
|
||||
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def check(self, request: web.Request) -> None:
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Init file for Supervisor host RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
@@ -28,7 +29,14 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, HostLogError
|
||||
from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER
|
||||
from ..host.const import (
|
||||
PARAM_BOOT_ID,
|
||||
PARAM_FOLLOW,
|
||||
PARAM_SYSLOG_IDENTIFIER,
|
||||
LogFormat,
|
||||
LogFormatter,
|
||||
)
|
||||
from ..utils.systemd_journal import journal_logs_reader
|
||||
from .const import (
|
||||
ATTR_AGENT_VERSION,
|
||||
ATTR_APPARMOR_VERSION,
|
||||
@@ -42,9 +50,11 @@ from .const import (
|
||||
ATTR_LLMNR_HOSTNAME,
|
||||
ATTR_STARTUP_TIME,
|
||||
ATTR_USE_NTP,
|
||||
ATTR_VIRTUALIZATION,
|
||||
CONTENT_TYPE_TEXT,
|
||||
CONTENT_TYPE_X_LOG,
|
||||
)
|
||||
from .utils import api_process, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -65,6 +75,7 @@ class APIHost(CoreSysAttributes):
|
||||
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
||||
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||
@@ -153,11 +164,11 @@ class APIHost(CoreSysAttributes):
|
||||
raise APIError() from err
|
||||
return possible_offset
|
||||
|
||||
@api_process
|
||||
async def advanced_logs(
|
||||
async def advanced_logs_handler(
|
||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs."""
|
||||
log_formatter = LogFormatter.PLAIN
|
||||
params = {}
|
||||
if identifier:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
||||
@@ -165,6 +176,8 @@ class APIHost(CoreSysAttributes):
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
||||
else:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
||||
# host logs should be always verbose, no matter what Accept header is used
|
||||
log_formatter = LogFormatter.VERBOSE
|
||||
|
||||
if BOOTID in request.match_info:
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
||||
@@ -175,28 +188,40 @@ class APIHost(CoreSysAttributes):
|
||||
|
||||
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
||||
CONTENT_TYPE_TEXT,
|
||||
CONTENT_TYPE_X_LOG,
|
||||
"*/*",
|
||||
]:
|
||||
raise APIError(
|
||||
"Invalid content type requested. Only text/plain supported for now."
|
||||
"Invalid content type requested. Only text/plain and text/x-log "
|
||||
"supported for now."
|
||||
)
|
||||
|
||||
if request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
|
||||
log_formatter = LogFormatter.VERBOSE
|
||||
|
||||
if RANGE in request.headers:
|
||||
range_header = request.headers.get(RANGE)
|
||||
else:
|
||||
range_header = f"entries=:-{DEFAULT_RANGE}:"
|
||||
|
||||
async with self.sys_host.logs.journald_logs(
|
||||
params=params, range_header=range_header
|
||||
params=params, range_header=range_header, accept=LogFormat.JOURNAL
|
||||
) as resp:
|
||||
try:
|
||||
response = web.StreamResponse()
|
||||
response.content_type = CONTENT_TYPE_TEXT
|
||||
await response.prepare(request)
|
||||
async for data in resp.content:
|
||||
await response.write(data)
|
||||
async for line in journal_logs_reader(resp, log_formatter):
|
||||
await response.write(line.encode("utf-8") + b"\n")
|
||||
except ConnectionResetError as ex:
|
||||
raise APIError(
|
||||
"Connection reset when trying to fetch data from systemd-journald."
|
||||
) from ex
|
||||
return response
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||
async def advanced_logs(
|
||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs. Wrapped as standard API handler."""
|
||||
return await self.advanced_logs_handler(request, identifier, follow)
|
||||
|
@@ -21,11 +21,18 @@ from ..const import (
|
||||
ATTR_ICON,
|
||||
ATTR_PANELS,
|
||||
ATTR_SESSION,
|
||||
ATTR_SESSION_DATA_USER_ID,
|
||||
ATTR_TITLE,
|
||||
HEADER_REMOTE_USER_DISPLAY_NAME,
|
||||
HEADER_REMOTE_USER_ID,
|
||||
HEADER_REMOTE_USER_NAME,
|
||||
HEADER_TOKEN,
|
||||
HEADER_TOKEN_OLD,
|
||||
IngressSessionData,
|
||||
IngressSessionDataUser,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HomeAssistantAPIError
|
||||
from .const import COOKIE_INGRESS
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
@@ -33,10 +40,46 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
|
||||
|
||||
"""Expected optional payload of create session request"""
|
||||
SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SESSION_DATA_USER_ID): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1
|
||||
def must_be_empty_body(method: str, code: int) -> bool:
|
||||
"""Check if a request must return an empty body."""
|
||||
return (
|
||||
status_code_must_be_empty_body(code)
|
||||
or method_must_be_empty_body(method)
|
||||
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
||||
)
|
||||
|
||||
|
||||
def method_must_be_empty_body(method: str) -> bool:
|
||||
"""Check if a method must return an empty body."""
|
||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
||||
return method.upper() == hdrs.METH_HEAD
|
||||
|
||||
|
||||
def status_code_must_be_empty_body(code: int) -> bool:
|
||||
"""Check if a status code must return an empty body."""
|
||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
||||
return code in {204, 304} or 100 <= code < 200
|
||||
|
||||
|
||||
class APIIngress(CoreSysAttributes):
|
||||
"""Ingress view to handle add-on webui routing."""
|
||||
|
||||
_list_of_users: list[IngressSessionDataUser]
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize APIIngress."""
|
||||
self._list_of_users = []
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info.get("token")
|
||||
@@ -71,7 +114,19 @@ class APIIngress(CoreSysAttributes):
|
||||
@require_home_assistant
|
||||
async def create_session(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
session = self.sys_ingress.create_session()
|
||||
schema_ingress_config_session_data = await api_validate(
|
||||
SCHEMA_INGRESS_CREATE_SESSION_DATA, request
|
||||
)
|
||||
data: IngressSessionData | None = None
|
||||
|
||||
if ATTR_SESSION_DATA_USER_ID in schema_ingress_config_session_data:
|
||||
user = await self._find_user_by_id(
|
||||
schema_ingress_config_session_data[ATTR_SESSION_DATA_USER_ID]
|
||||
)
|
||||
if user:
|
||||
data = IngressSessionData(user)
|
||||
|
||||
session = self.sys_ingress.create_session(data)
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
@api_process
|
||||
@@ -85,7 +140,6 @@ class APIIngress(CoreSysAttributes):
|
||||
_LOGGER.warning("No valid ingress session %s", data[ATTR_SESSION])
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
@require_home_assistant
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> web.Response | web.StreamResponse | web.WebSocketResponse:
|
||||
@@ -100,13 +154,14 @@ class APIIngress(CoreSysAttributes):
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path")
|
||||
session_data = self.sys_ingress.get_session_data(session)
|
||||
try:
|
||||
# Websocket
|
||||
if _is_websocket(request):
|
||||
return await self._handle_websocket(request, addon, path)
|
||||
return await self._handle_websocket(request, addon, path, session_data)
|
||||
|
||||
# Request
|
||||
return await self._handle_request(request, addon, path)
|
||||
return await self._handle_request(request, addon, path, session_data)
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Ingress error: %s", err)
|
||||
@@ -114,7 +169,11 @@ class APIIngress(CoreSysAttributes):
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def _handle_websocket(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
self,
|
||||
request: web.Request,
|
||||
addon: Addon,
|
||||
path: str,
|
||||
session_data: IngressSessionData | None,
|
||||
) -> web.WebSocketResponse:
|
||||
"""Ingress route for websocket."""
|
||||
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||
@@ -132,7 +191,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
# Preparing
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon)
|
||||
source_header = _init_header(request, addon, session_data)
|
||||
|
||||
# Support GET query
|
||||
if request.query_string:
|
||||
@@ -149,8 +208,8 @@ class APIIngress(CoreSysAttributes):
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
_websocket_forward(ws_server, ws_client),
|
||||
_websocket_forward(ws_client, ws_server),
|
||||
self.sys_create_task(_websocket_forward(ws_server, ws_client)),
|
||||
self.sys_create_task(_websocket_forward(ws_client, ws_server)),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
@@ -158,11 +217,15 @@ class APIIngress(CoreSysAttributes):
|
||||
return ws_server
|
||||
|
||||
async def _handle_request(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
self,
|
||||
request: web.Request,
|
||||
addon: Addon,
|
||||
path: str,
|
||||
session_data: IngressSessionData | None,
|
||||
) -> web.Response | web.StreamResponse:
|
||||
"""Ingress route for request."""
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon)
|
||||
source_header = _init_header(request, addon, session_data)
|
||||
|
||||
# Passing the raw stream breaks requests for some webservers
|
||||
# since we just need it for POST requests really, for all other methods
|
||||
@@ -185,10 +248,18 @@ class APIIngress(CoreSysAttributes):
|
||||
skip_auto_headers={hdrs.CONTENT_TYPE},
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
|
||||
# Avoid parsing content_type in simple cases for better performance
|
||||
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
|
||||
content_type = (maybe_content_type.partition(";"))[0].strip()
|
||||
else:
|
||||
content_type = result.content_type
|
||||
# Simple request
|
||||
if (
|
||||
hdrs.CONTENT_LENGTH in result.headers
|
||||
# empty body responses should not be streamed,
|
||||
# otherwise aiohttp < 3.9.0 may generate
|
||||
# an invalid "0\r\n\r\n" chunk instead of an empty response.
|
||||
must_be_empty_body(request.method, result.status)
|
||||
or hdrs.CONTENT_LENGTH in result.headers
|
||||
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||
):
|
||||
# Return Response
|
||||
@@ -196,13 +267,13 @@ class APIIngress(CoreSysAttributes):
|
||||
return web.Response(
|
||||
headers=headers,
|
||||
status=result.status,
|
||||
content_type=result.content_type,
|
||||
content_type=content_type,
|
||||
body=body,
|
||||
)
|
||||
|
||||
# Stream response
|
||||
response = web.StreamResponse(status=result.status, headers=headers)
|
||||
response.content_type = result.content_type
|
||||
response.content_type = content_type
|
||||
|
||||
try:
|
||||
await response.prepare(request)
|
||||
@@ -218,11 +289,35 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
return response
|
||||
|
||||
async def _find_user_by_id(self, user_id: str) -> IngressSessionDataUser | None:
|
||||
"""Find user object by the user's ID."""
|
||||
try:
|
||||
list_of_users = await self.sys_homeassistant.get_users()
|
||||
except (HomeAssistantAPIError, TypeError) as err:
|
||||
_LOGGER.error(
|
||||
"%s error occurred while requesting list of users: %s", type(err), err
|
||||
)
|
||||
return None
|
||||
|
||||
def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, str]:
|
||||
if list_of_users is not None:
|
||||
self._list_of_users = list_of_users
|
||||
|
||||
return next((user for user in self._list_of_users if user.id == user_id), None)
|
||||
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: Addon, session_data: IngressSessionData | None
|
||||
) -> CIMultiDict | dict[str, str]:
|
||||
"""Create initial header."""
|
||||
headers = {}
|
||||
|
||||
if session_data is not None:
|
||||
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
|
||||
if session_data.user.username is not None:
|
||||
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
|
||||
if session_data.user.display_name is not None:
|
||||
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
|
||||
|
||||
# filter flags
|
||||
for name, value in request.headers.items():
|
||||
if name in (
|
||||
@@ -235,6 +330,9 @@ def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, st
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
istr(HEADER_TOKEN),
|
||||
istr(HEADER_TOKEN_OLD),
|
||||
istr(HEADER_REMOTE_USER_ID),
|
||||
istr(HEADER_REMOTE_USER_NAME),
|
||||
istr(HEADER_REMOTE_USER_DISPLAY_NAME),
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
@@ -6,7 +6,10 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..jobs import SupervisorJob
|
||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||
from .const import ATTR_JOBS
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -19,11 +22,47 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
class APIJobs(CoreSysAttributes):
|
||||
"""Handle RESTful API for OS functions."""
|
||||
|
||||
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
||||
"""Return current job tree."""
|
||||
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
||||
for job in self.sys_jobs.jobs:
|
||||
if job.internal:
|
||||
continue
|
||||
|
||||
if job.parent_id not in jobs_by_parent:
|
||||
jobs_by_parent[job.parent_id] = [job]
|
||||
else:
|
||||
jobs_by_parent[job.parent_id].append(job)
|
||||
|
||||
job_list: list[dict[str, Any]] = []
|
||||
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
||||
[(job_list, start)]
|
||||
if start
|
||||
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
|
||||
)
|
||||
|
||||
while queue:
|
||||
(current_list, current_job) = queue.pop(0)
|
||||
child_jobs: list[dict[str, Any]] = []
|
||||
|
||||
# We remove parent_id and instead use that info to represent jobs as a tree
|
||||
job_dict = current_job.as_dict() | {"child_jobs": child_jobs}
|
||||
job_dict.pop("parent_id")
|
||||
current_list.append(job_dict)
|
||||
|
||||
if current_job.uuid in jobs_by_parent:
|
||||
queue.extend(
|
||||
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
|
||||
)
|
||||
|
||||
return job_list
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return JobManager information."""
|
||||
return {
|
||||
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
||||
ATTR_JOBS: self._list_jobs(),
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -42,3 +81,19 @@ class APIJobs(CoreSysAttributes):
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Reset options for JobManager."""
|
||||
self.sys_jobs.reset_data()
|
||||
|
||||
@api_process
|
||||
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get details of a job by ID."""
|
||||
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||
return self._list_jobs(job)[0]
|
||||
|
||||
@api_process
|
||||
async def remove_job(self, request: web.Request) -> None:
|
||||
"""Remove a completed job."""
|
||||
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||
|
||||
if not job.done:
|
||||
raise APIError(f"Job {job.uuid} is not done!")
|
||||
|
||||
self.sys_jobs.remove_job(job)
|
||||
|
@@ -6,7 +6,9 @@ from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Request, RequestHandler, Response, middleware
|
||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
from ...addons.const import RE_SLUG
|
||||
from ...const import (
|
||||
REQUEST_FROM,
|
||||
ROLE_ADMIN,
|
||||
@@ -17,12 +19,24 @@ from ...const import (
|
||||
CoreState,
|
||||
)
|
||||
from ...coresys import CoreSys, CoreSysAttributes
|
||||
from ...utils import version_is_new_enough
|
||||
from ..utils import api_return_error, excract_supervisor_token
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
_CORE_VERSION: Final = AwesomeVersion("2023.3.4")
|
||||
|
||||
# fmt: off
|
||||
|
||||
_CORE_FRONTEND_PATHS: Final = (
|
||||
r"|/app/.*\.(?:js|gz|json|map|woff2)"
|
||||
r"|/(store/)?addons/" + RE_SLUG + r"/(logo|icon)"
|
||||
)
|
||||
|
||||
CORE_FRONTEND: Final = re.compile(
|
||||
r"^(?:" + _CORE_FRONTEND_PATHS + r")$"
|
||||
)
|
||||
|
||||
|
||||
# Block Anytime
|
||||
BLACKLIST: Final = re.compile(
|
||||
r"^(?:"
|
||||
@@ -39,7 +53,9 @@ NO_SECURITY_CHECK: Final = re.compile(
|
||||
r"|/core/api/.*"
|
||||
r"|/core/websocket"
|
||||
r"|/supervisor/ping"
|
||||
r")$"
|
||||
r"|/ingress/[-_A-Za-z0-9]+/.*"
|
||||
+ _CORE_FRONTEND_PATHS
|
||||
+ r")$"
|
||||
)
|
||||
|
||||
# Observer allow API calls
|
||||
@@ -84,9 +100,11 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
||||
ROLE_MANAGER: re.compile(
|
||||
r"^(?:"
|
||||
r"|/.+/info"
|
||||
r"|/addons(?:/[^/]+/(?!security).+|/reload)?"
|
||||
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
||||
r"|/audio/.+"
|
||||
r"|/auth/cache"
|
||||
r"|/available_updates"
|
||||
r"|/backups.*"
|
||||
r"|/cli/.+"
|
||||
r"|/core/.+"
|
||||
r"|/dns/.+"
|
||||
@@ -96,16 +114,17 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
||||
r"|/hassos/.+"
|
||||
r"|/homeassistant/.+"
|
||||
r"|/host/.+"
|
||||
r"|/mounts.*"
|
||||
r"|/multicast/.+"
|
||||
r"|/network/.+"
|
||||
r"|/observer/.+"
|
||||
r"|/os/.+"
|
||||
r"|/os/(?!datadisk/wipe).+"
|
||||
r"|/refresh_updates"
|
||||
r"|/resolution/.+"
|
||||
r"|/backups.*"
|
||||
r"|/security/.+"
|
||||
r"|/snapshots.*"
|
||||
r"|/store.*"
|
||||
r"|/supervisor/.+"
|
||||
r"|/security/.+"
|
||||
r")$"
|
||||
),
|
||||
ROLE_ADMIN: re.compile(
|
||||
@@ -180,7 +199,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
CoreState.FREEZE,
|
||||
):
|
||||
return api_return_error(
|
||||
message=f"System is not ready with state: {self.sys_core.state.value}"
|
||||
message=f"System is not ready with state: {self.sys_core.state}"
|
||||
)
|
||||
|
||||
return await handler(request)
|
||||
@@ -201,6 +220,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
# Ignore security check
|
||||
if NO_SECURITY_CHECK.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
request[REQUEST_FROM] = None
|
||||
return await handler(request)
|
||||
|
||||
# Not token
|
||||
@@ -253,3 +273,44 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
|
||||
_LOGGER.error("Invalid token for access %s", request.path)
|
||||
raise HTTPForbidden()
|
||||
|
||||
@middleware
|
||||
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
|
||||
"""Validate user from Core API proxy."""
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough(
|
||||
self.sys_homeassistant.version, _CORE_VERSION
|
||||
):
|
||||
return await handler(request)
|
||||
|
||||
authorization_index: int | None = None
|
||||
content_type_index: int | None = None
|
||||
user_request: bool = False
|
||||
admin_request: bool = False
|
||||
ingress_request: bool = False
|
||||
|
||||
for idx, (key, value) in enumerate(request.raw_headers):
|
||||
if key in (b"Authorization", b"X-Hassio-Key"):
|
||||
authorization_index = idx
|
||||
elif key == b"Content-Type":
|
||||
content_type_index = idx
|
||||
elif key == b"X-Hass-User-ID":
|
||||
user_request = True
|
||||
elif key == b"X-Hass-Is-Admin":
|
||||
admin_request = value == b"1"
|
||||
elif key == b"X-Ingress-Path":
|
||||
ingress_request = True
|
||||
|
||||
if (user_request or admin_request) and not ingress_request:
|
||||
return await handler(request)
|
||||
|
||||
is_proxy_request = (
|
||||
authorization_index is not None
|
||||
and content_type_index is not None
|
||||
and content_type_index - authorization_index == 1
|
||||
)
|
||||
|
||||
if (
|
||||
not CORE_FRONTEND.match(request.path) and is_proxy_request
|
||||
) or ingress_request:
|
||||
raise HTTPBadRequest()
|
||||
return await handler(request)
|
||||
|
124
supervisor/api/mounts.py
Normal file
124
supervisor/api/mounts.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""Inits file for supervisor mounts REST API."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import ATTR_NAME, ATTR_STATE
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
||||
from ..mounts.mount import Mount
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
||||
from .const import ATTR_MOUNTS
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_DEFAULT_BACKUP_MOUNT): vol.Maybe(str),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIMounts(CoreSysAttributes):
|
||||
"""Handle REST API for mounting options."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return MountManager info."""
|
||||
return {
|
||||
ATTR_DEFAULT_BACKUP_MOUNT: self.sys_mounts.default_backup_mount.name
|
||||
if self.sys_mounts.default_backup_mount
|
||||
else None,
|
||||
ATTR_MOUNTS: [
|
||||
mount.to_dict() | {ATTR_STATE: mount.state}
|
||||
for mount in self.sys_mounts.mounts
|
||||
],
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set Mount Manager options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_DEFAULT_BACKUP_MOUNT in body:
|
||||
name: str | None = body[ATTR_DEFAULT_BACKUP_MOUNT]
|
||||
if name is None:
|
||||
self.sys_mounts.default_backup_mount = None
|
||||
elif (mount := self.sys_mounts.get(name)).usage != MountUsage.BACKUP:
|
||||
raise APIError(
|
||||
f"Mount {name} is not used for backups, cannot use it as default backup mount"
|
||||
)
|
||||
else:
|
||||
self.sys_mounts.default_backup_mount = mount
|
||||
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def create_mount(self, request: web.Request) -> None:
|
||||
"""Create a new mount in supervisor."""
|
||||
body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
|
||||
|
||||
if body[ATTR_NAME] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
|
||||
# If it's a backup mount, reload backups
|
||||
if mount.usage == MountUsage.BACKUP:
|
||||
self.sys_create_task(self.sys_backups.reload())
|
||||
|
||||
# If there's no default backup mount, set it to the new mount
|
||||
if not self.sys_mounts.default_backup_mount:
|
||||
self.sys_mounts.default_backup_mount = mount
|
||||
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def update_mount(self, request: web.Request) -> None:
|
||||
"""Update an existing mount in supervisor."""
|
||||
name = request.match_info.get("mount")
|
||||
name_schema = vol.Schema(
|
||||
{vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
|
||||
)
|
||||
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
||||
|
||||
if name not in self.sys_mounts:
|
||||
raise APIError(f"No mount exists with name {name}")
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
|
||||
# If it's a backup mount, reload backups
|
||||
if mount.usage == MountUsage.BACKUP:
|
||||
self.sys_create_task(self.sys_backups.reload())
|
||||
|
||||
# If this mount was the default backup mount and isn't for backups any more, remove it
|
||||
elif self.sys_mounts.default_backup_mount == mount:
|
||||
self.sys_mounts.default_backup_mount = None
|
||||
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def delete_mount(self, request: web.Request) -> None:
|
||||
"""Delete an existing mount in supervisor."""
|
||||
name = request.match_info.get("mount")
|
||||
mount = await self.sys_mounts.remove_mount(name)
|
||||
|
||||
# If it was a backup mount, reload backups
|
||||
if mount.usage == MountUsage.BACKUP:
|
||||
self.sys_create_task(self.sys_backups.reload())
|
||||
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload_mount(self, request: web.Request) -> None:
|
||||
"""Reload an existing mount in supervisor."""
|
||||
name = request.match_info.get("mount")
|
||||
await self.sys_mounts.reload_mount(name)
|
||||
|
||||
# If it's a backup mount, reload backups
|
||||
if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
|
||||
self.sys_create_task(self.sys_backups.reload())
|
@@ -23,8 +23,7 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import version_tag
|
||||
from .const import CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -69,11 +68,6 @@ class APIMulticast(CoreSysAttributes):
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Multicast Docker logs."""
|
||||
return self.sys_plugins.multicast.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Multicast plugin."""
|
||||
|
@@ -1,11 +1,11 @@
|
||||
"""REST API for network."""
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from dataclasses import replace
|
||||
from ipaddress import ip_address, ip_interface
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
@@ -43,8 +43,7 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, HostNetworkNotFound
|
||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||
from ..host.network import (
|
||||
from ..host.configuration import (
|
||||
AccessPoint,
|
||||
Interface,
|
||||
InterfaceMethod,
|
||||
@@ -52,6 +51,7 @@ from ..host.network import (
|
||||
VlanConfig,
|
||||
WifiConfig,
|
||||
)
|
||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_SCHEMA_IP_CONFIG = vol.Schema(
|
||||
@@ -121,6 +121,7 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
||||
ATTR_ENABLED: interface.enabled,
|
||||
ATTR_CONNECTED: interface.connected,
|
||||
ATTR_PRIMARY: interface.primary,
|
||||
ATTR_MAC: interface.mac,
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
@@ -196,19 +197,19 @@ class APINetwork(CoreSysAttributes):
|
||||
# Apply config
|
||||
for key, config in body.items():
|
||||
if key == ATTR_IPV4:
|
||||
interface.ipv4 = attr.evolve(
|
||||
interface.ipv4 = replace(
|
||||
interface.ipv4
|
||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_IPV6:
|
||||
interface.ipv6 = attr.evolve(
|
||||
interface.ipv6 = replace(
|
||||
interface.ipv6
|
||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_WIFI:
|
||||
interface.wifi = attr.evolve(
|
||||
interface.wifi = replace(
|
||||
interface.wifi
|
||||
or WifiConfig(
|
||||
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
|
||||
@@ -276,6 +277,8 @@ class APINetwork(CoreSysAttributes):
|
||||
)
|
||||
|
||||
vlan_interface = Interface(
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
True,
|
||||
True,
|
||||
|
@@ -2,16 +2,24 @@
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACTIVITY_LED,
|
||||
ATTR_BOARD,
|
||||
ATTR_BOOT,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DISK_LED,
|
||||
ATTR_HEARTBEAT_LED,
|
||||
ATTR_ID,
|
||||
ATTR_NAME,
|
||||
ATTR_POWER_LED,
|
||||
ATTR_SERIAL,
|
||||
ATTR_SIZE,
|
||||
ATTR_STATE,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
@@ -21,20 +29,27 @@ from ..exceptions import BoardInvalidError
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..validate import version_tag
|
||||
from .const import (
|
||||
ATTR_BOOT_SLOT,
|
||||
ATTR_BOOT_SLOTS,
|
||||
ATTR_DATA_DISK,
|
||||
ATTR_DEV_PATH,
|
||||
ATTR_DEVICE,
|
||||
ATTR_DISK_LED,
|
||||
ATTR_HEARTBEAT_LED,
|
||||
ATTR_POWER_LED,
|
||||
ATTR_DISKS,
|
||||
ATTR_MODEL,
|
||||
ATTR_STATUS,
|
||||
ATTR_SYSTEM_HEALTH_LED,
|
||||
ATTR_VENDOR,
|
||||
BootSlot,
|
||||
)
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): vol.All(str, vol.Coerce(Path))})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
|
||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
||||
|
||||
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
|
||||
@@ -42,6 +57,14 @@ SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
SCHEMA_GREEN_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
|
||||
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
||||
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
class APIOS(CoreSysAttributes):
|
||||
@@ -56,7 +79,16 @@ class APIOS(CoreSysAttributes):
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_os.need_update,
|
||||
ATTR_BOARD: self.sys_os.board,
|
||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used,
|
||||
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
||||
ATTR_BOOT_SLOTS: {
|
||||
slot.bootname: {
|
||||
ATTR_STATE: slot.state,
|
||||
ATTR_STATUS: slot.boot_status,
|
||||
ATTR_VERSION: slot.bundle_version,
|
||||
}
|
||||
for slot in self.sys_os.slots
|
||||
if slot.bootname
|
||||
},
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -79,13 +111,65 @@ class APIOS(CoreSysAttributes):
|
||||
|
||||
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
||||
|
||||
@api_process
|
||||
def wipe_data(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger data disk wipe on Host."""
|
||||
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
|
||||
|
||||
@api_process
|
||||
async def set_boot_slot(self, request: web.Request) -> None:
|
||||
"""Change the active boot slot and reboot into it."""
|
||||
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
|
||||
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
|
||||
|
||||
@api_process
|
||||
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return possible data targets."""
|
||||
return {
|
||||
ATTR_DEVICES: self.sys_os.datadisk.available_disks,
|
||||
ATTR_DEVICES: [disk.id for disk in self.sys_os.datadisk.available_disks],
|
||||
ATTR_DISKS: [
|
||||
{
|
||||
ATTR_NAME: disk.name,
|
||||
ATTR_VENDOR: disk.vendor,
|
||||
ATTR_MODEL: disk.model,
|
||||
ATTR_SERIAL: disk.serial,
|
||||
ATTR_SIZE: disk.size,
|
||||
ATTR_ID: disk.id,
|
||||
ATTR_DEV_PATH: disk.device_path.as_posix(),
|
||||
}
|
||||
for disk in self.sys_os.datadisk.available_disks
|
||||
],
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get green board settings."""
|
||||
return {
|
||||
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
|
||||
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
|
||||
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def boards_green_options(self, request: web.Request) -> None:
|
||||
"""Update green board settings."""
|
||||
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
||||
|
||||
if ATTR_ACTIVITY_LED in body:
|
||||
await self.sys_dbus.agent.board.green.set_activity_led(
|
||||
body[ATTR_ACTIVITY_LED]
|
||||
)
|
||||
|
||||
if ATTR_POWER_LED in body:
|
||||
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
|
||||
|
||||
if ATTR_SYSTEM_HEALTH_LED in body:
|
||||
await self.sys_dbus.agent.board.green.set_user_led(
|
||||
body[ATTR_SYSTEM_HEALTH_LED]
|
||||
)
|
||||
|
||||
self.sys_dbus.agent.board.green.save_data()
|
||||
|
||||
@api_process
|
||||
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get yellow board settings."""
|
||||
@@ -101,14 +185,17 @@ class APIOS(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
||||
|
||||
if ATTR_DISK_LED in body:
|
||||
self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
|
||||
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
|
||||
|
||||
if ATTR_HEARTBEAT_LED in body:
|
||||
self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
|
||||
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
|
||||
body[ATTR_HEARTBEAT_LED]
|
||||
)
|
||||
|
||||
if ATTR_POWER_LED in body:
|
||||
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
|
||||
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
||||
|
||||
self.sys_dbus.agent.board.yellow.save_data()
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
|
@@ -1,16 +1 @@
|
||||
|
||||
function loadES5() {
|
||||
var el = document.createElement('script');
|
||||
el.src = '/api/hassio/app/frontend_es5/entrypoint.5c6aba93.js';
|
||||
document.body.appendChild(el);
|
||||
}
|
||||
if (/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent)) {
|
||||
loadES5();
|
||||
} else {
|
||||
try {
|
||||
new Function("import('/api/hassio/app/frontend_latest/entrypoint.499355be.js')")();
|
||||
} catch (err) {
|
||||
loadES5();
|
||||
}
|
||||
}
|
||||
|
||||
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
2
supervisor/api/panel/frontend_es5/1036-G1AUvfK_ULU.js
Normal file
2
supervisor/api/panel/frontend_es5/1036-G1AUvfK_ULU.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1036-G1AUvfK_ULU.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1036-G1AUvfK_ULU.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/1047-g7fFLS9eP4I.js
Normal file
2
supervisor/api/panel/frontend_es5/1047-g7fFLS9eP4I.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
|
||||
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map
|
BIN
supervisor/api/panel/frontend_es5/1047-g7fFLS9eP4I.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1047-g7fFLS9eP4I.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}
|
2
supervisor/api/panel/frontend_es5/1074-djfpWNdWsA8.js
Normal file
2
supervisor/api/panel/frontend_es5/1074-djfpWNdWsA8.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1074-djfpWNdWsA8.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1074-djfpWNdWsA8.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
2
supervisor/api/panel/frontend_es5/1116-xNyDWQHsExg.js
Normal file
2
supervisor/api/panel/frontend_es5/1116-xNyDWQHsExg.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1116-xNyDWQHsExg.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1116-xNyDWQHsExg.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
/*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */
|
Binary file not shown.
2
supervisor/api/panel/frontend_es5/1193--qnpEuA6qSY.js
Normal file
2
supervisor/api/panel/frontend_es5/1193--qnpEuA6qSY.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1193--qnpEuA6qSY.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1193--qnpEuA6qSY.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/1265-yCkoy0FMl6o.js
Normal file
2
supervisor/api/panel/frontend_es5/1265-yCkoy0FMl6o.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1265-yCkoy0FMl6o.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1265-yCkoy0FMl6o.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/1281-On4tZThCfZs.js
Normal file
2
supervisor/api/panel/frontend_es5/1281-On4tZThCfZs.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1281-On4tZThCfZs.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1281-On4tZThCfZs.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/1402-6WKUruvoXtM.js
Normal file
2
supervisor/api/panel/frontend_es5/1402-6WKUruvoXtM.js
Normal file
@@ -0,0 +1,2 @@
|
||||
!function(){"use strict";var n,t,e={14595:function(n,t,e){e(58556);var r,i,o=e(93217),u=e(422),a=e(62173),s=function(n,t,e){if("input"===n){if("type"===t&&"checkbox"===e||"checked"===t||"disabled"===t)return;return""}},c={renderMarkdown:function(n,t){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign(Object.assign({},(0,a.getDefaultWhiteList)()),{},{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign(Object.assign({},r),{},{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(n,t),{whiteList:e,onTagAttr:s})}};(0,o.Jj)(c)}},r={};function i(n){var t=r[n];if(void 0!==t)return t.exports;var o=r[n]={exports:{}};return e[n](o,o.exports,i),o.exports}i.m=e,i.x=function(){var n=i.O(void 0,[9191,215],(function(){return i(14595)}));return n=i.O(n)},n=[],i.O=function(t,e,r,o){if(!e){var u=1/0;for(f=0;f<n.length;f++){e=n[f][0],r=n[f][1],o=n[f][2];for(var a=!0,s=0;s<e.length;s++)(!1&o||u>=o)&&Object.keys(i.O).every((function(n){return i.O[n](e[s])}))?e.splice(s--,1):(a=!1,o<u&&(u=o));if(a){n.splice(f--,1);var c=r();void 0!==c&&(t=c)}}return t}o=o||0;for(var f=n.length;f>0&&n[f-1][2]>o;f--)n[f]=n[f-1];n[f]=[e,r,o]},i.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return i.d(t,{a:t}),t},i.d=function(n,t){for(var e in t)i.o(t,e)&&!i.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},i.f={},i.e=function(n){return Promise.all(Object.keys(i.f).reduce((function(t,e){return i.f[e](n,t),t}),[]))},i.u=function(n){return n+"-"+{215:"FPZmDYZTPdk",9191:"37260H-osZ4"}[n]+".js"},i.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},i.p="/api/hassio/app/frontend_es5/",function(){var n={1402:1};i.f.i=function(t,e){n[t]||importScripts(i.p+i.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var r=t[0],o=t[1],u=t[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)n[r.pop()]=1;e(t)}}(),t=i.x,i.x=function(){return Promise.all([i.e(9191),i.e(215)]).then(t)};i.x()}();
|
||||
//# sourceMappingURL=1402-6WKUruvoXtM.js.map
|
BIN
supervisor/api/panel/frontend_es5/1402-6WKUruvoXtM.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1402-6WKUruvoXtM.js.gz
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user