mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-10-25 03:29:32 +00:00
Compare commits
595 Commits
2023.08.2
...
trigger-sy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e415923553 | ||
|
|
95c638991d | ||
|
|
e2ada42001 | ||
|
|
22e50b4ace | ||
|
|
334484de7f | ||
|
|
180a7c3990 | ||
|
|
d5f33de808 | ||
|
|
6539f0df6f | ||
|
|
1504278223 | ||
|
|
9f3767b23d | ||
|
|
e0d7985369 | ||
|
|
2968a5717c | ||
|
|
e2b25fe7ce | ||
|
|
8601f5c49a | ||
|
|
42279461e0 | ||
|
|
409447d6ca | ||
|
|
5b313db49d | ||
|
|
d64618600d | ||
|
|
1ee01b1d5e | ||
|
|
af590202c3 | ||
|
|
12ca2fb624 | ||
|
|
ea95f83742 | ||
|
|
e4d4da601c | ||
|
|
0582f6fd39 | ||
|
|
f254af8326 | ||
|
|
3333770246 | ||
|
|
ee5ded29ac | ||
|
|
f530db98ff | ||
|
|
911f9d661f | ||
|
|
9935eac146 | ||
|
|
eae2c9e221 | ||
|
|
1a67fe8a83 | ||
|
|
3af565267b | ||
|
|
d09460a971 | ||
|
|
c65329442a | ||
|
|
48430dfa28 | ||
|
|
70e2de372d | ||
|
|
75784480ab | ||
|
|
8a70ba841d | ||
|
|
77733829d7 | ||
|
|
d4b67f1946 | ||
|
|
51ab138bb1 | ||
|
|
b81413c8b2 | ||
|
|
2ec33c6ef3 | ||
|
|
68b2c38c7c | ||
|
|
1ca22799d1 | ||
|
|
549dddcb11 | ||
|
|
131af90469 | ||
|
|
c7c39da7c6 | ||
|
|
8310c426f0 | ||
|
|
bb8f91e39a | ||
|
|
a359b9a3d5 | ||
|
|
e130ebad1f | ||
|
|
f5b996b66c | ||
|
|
05e0c7c3ab | ||
|
|
6c1203e4bf | ||
|
|
5fbcaa8edd | ||
|
|
00d217b5f7 | ||
|
|
c0e35376f3 | ||
|
|
2be84e1282 | ||
|
|
08f10c96ef | ||
|
|
12f8ccdf02 | ||
|
|
d63e78cf34 | ||
|
|
65d97ca924 | ||
|
|
5770cafea9 | ||
|
|
0177cd9528 | ||
|
|
91a8fae9b5 | ||
|
|
f16a4ce3ef | ||
|
|
306f63c75b | ||
|
|
2a0312318d | ||
|
|
695a23a454 | ||
|
|
7366673eea | ||
|
|
53fa0fe215 | ||
|
|
1ba621be60 | ||
|
|
5117364625 | ||
|
|
986b92aee4 | ||
|
|
12d26b05af | ||
|
|
e6c9704505 | ||
|
|
8ab396d77c | ||
|
|
8438448843 | ||
|
|
362edb9a61 | ||
|
|
1ff53e1853 | ||
|
|
cfd28dbb5c | ||
|
|
cbec558289 | ||
|
|
ca3a2937d0 | ||
|
|
3e67fc12c5 | ||
|
|
f6faa18409 | ||
|
|
21ae2c2e54 | ||
|
|
eb3986bea2 | ||
|
|
5d6738ced8 | ||
|
|
2f2fecddf2 | ||
|
|
218ba3601e | ||
|
|
4c3f60c44b | ||
|
|
cb85e5e464 | ||
|
|
5b46235872 | ||
|
|
70f675ac82 | ||
|
|
bf0c714ea4 | ||
|
|
c95df56e8d | ||
|
|
5f3d851954 | ||
|
|
10c69dcdae | ||
|
|
bdd81ce3a9 | ||
|
|
17ee234be4 | ||
|
|
61034dfa7b | ||
|
|
185cd362fb | ||
|
|
e2ca357774 | ||
|
|
3dea7fc4e8 | ||
|
|
01ba591bc9 | ||
|
|
640b7d46e3 | ||
|
|
d6560c51ee | ||
|
|
3e9b1938c6 | ||
|
|
44ce8de71f | ||
|
|
0bbd15bfda | ||
|
|
591b9a4d87 | ||
|
|
5ee7d16687 | ||
|
|
4ab4350c58 | ||
|
|
4ea7133fa8 | ||
|
|
627d67f9d0 | ||
|
|
eb37655598 | ||
|
|
19b62dd0d4 | ||
|
|
b2ad1ceea3 | ||
|
|
c1545b5b78 | ||
|
|
2c2f04ba85 | ||
|
|
77e7bf51b7 | ||
|
|
a42d71dcef | ||
|
|
1ff0432f4d | ||
|
|
54afd6e1c8 | ||
|
|
458c493a74 | ||
|
|
8ac8ecb17e | ||
|
|
eac167067e | ||
|
|
aa7f4aafeb | ||
|
|
d2183fa12b | ||
|
|
928f32bb4f | ||
|
|
cbe21303c4 | ||
|
|
94987c04b8 | ||
|
|
d4ba46a846 | ||
|
|
1a22d83895 | ||
|
|
6b73bf5c28 | ||
|
|
c9c9451c36 | ||
|
|
1882d448ea | ||
|
|
2f11c9c9e3 | ||
|
|
02bdc4b555 | ||
|
|
1a1ee50d9d | ||
|
|
50dc09d1a9 | ||
|
|
130efd340c | ||
|
|
00bc13c049 | ||
|
|
3caad67f61 | ||
|
|
13783f0d4a | ||
|
|
eae97ba3f4 | ||
|
|
134dad7357 | ||
|
|
1c4d2e8dec | ||
|
|
f2d7be3aac | ||
|
|
d06edb2dd6 | ||
|
|
7fa15b334a | ||
|
|
ffb4e2d6d7 | ||
|
|
bd8047ae9c | ||
|
|
49bc0624af | ||
|
|
5e1d764eb3 | ||
|
|
0064d93d75 | ||
|
|
5a838ecfe7 | ||
|
|
c37b5effd7 | ||
|
|
ca7f3e8acb | ||
|
|
b0cdb91d5e | ||
|
|
4829eb8ae1 | ||
|
|
1bb814b793 | ||
|
|
918fcb7d62 | ||
|
|
bbfd899564 | ||
|
|
12c4d9da87 | ||
|
|
6b4fd9b6b8 | ||
|
|
07c22f4a60 | ||
|
|
252e1e2ac0 | ||
|
|
b684c8673e | ||
|
|
547f42439d | ||
|
|
c51ceb000f | ||
|
|
4cbede1bc8 | ||
|
|
5eac8c7780 | ||
|
|
ab78d87304 | ||
|
|
09166e3867 | ||
|
|
8a5c813cdd | ||
|
|
4200622f43 | ||
|
|
c4452a85b4 | ||
|
|
e57de4a3c1 | ||
|
|
9fd2c91c55 | ||
|
|
fbd70013a8 | ||
|
|
8d18f3e66e | ||
|
|
5f5754e860 | ||
|
|
974c882b9a | ||
|
|
a9ea90096b | ||
|
|
45c72c426e | ||
|
|
4e5b75fe19 | ||
|
|
3cd617e68f | ||
|
|
ddff02f73b | ||
|
|
b59347b3d3 | ||
|
|
1dc769076f | ||
|
|
f150a19c0f | ||
|
|
c4bc1e3824 | ||
|
|
eca99b69db | ||
|
|
043af72847 | ||
|
|
05c7b6c639 | ||
|
|
3385c99f1f | ||
|
|
895117f857 | ||
|
|
9e3135e2de | ||
|
|
9a1c517437 | ||
|
|
c0c0c4b7ad | ||
|
|
be6e39fed0 | ||
|
|
b384921ee0 | ||
|
|
0d05a6eae3 | ||
|
|
430aef68c6 | ||
|
|
eac6070e12 | ||
|
|
6693b7c2e6 | ||
|
|
7898c3e433 | ||
|
|
420ecd064e | ||
|
|
4289be53f8 | ||
|
|
29b41b564e | ||
|
|
998eb69583 | ||
|
|
8ebc097ff4 | ||
|
|
c05984ca49 | ||
|
|
1a700c3013 | ||
|
|
a9c92cdec8 | ||
|
|
da8b938d5b | ||
|
|
71e91328f1 | ||
|
|
6356be4c52 | ||
|
|
e26e5440b6 | ||
|
|
fecfbd1a3e | ||
|
|
c00d6dfc76 | ||
|
|
85be66d90d | ||
|
|
1ac506b391 | ||
|
|
f7738b77de | ||
|
|
824037bb7d | ||
|
|
221292ad14 | ||
|
|
16f8c75e9f | ||
|
|
90a37079f1 | ||
|
|
798092af5e | ||
|
|
2a622a929d | ||
|
|
ca8eeaa68c | ||
|
|
d1b8ac1249 | ||
|
|
3f629c4d60 | ||
|
|
3fa910e68b | ||
|
|
e3cf2989c9 | ||
|
|
136b2f402d | ||
|
|
8d18d2d9c6 | ||
|
|
f18213361a | ||
|
|
18d9d32bca | ||
|
|
1246e429c9 | ||
|
|
77bc46bc37 | ||
|
|
ce16963c94 | ||
|
|
a70e8cfe58 | ||
|
|
ba922a1aaa | ||
|
|
b09230a884 | ||
|
|
f1cb9ca08e | ||
|
|
06513e88c6 | ||
|
|
b4a79bd068 | ||
|
|
dfd8fe84e0 | ||
|
|
4857c2e243 | ||
|
|
7d384f6160 | ||
|
|
672a7621f9 | ||
|
|
f0e2fb3f57 | ||
|
|
8c3a520512 | ||
|
|
22e50d56db | ||
|
|
a0735f3585 | ||
|
|
50a2e8fde3 | ||
|
|
55ed63cc79 | ||
|
|
97e9dfff3f | ||
|
|
501c9579fb | ||
|
|
f9aedadee6 | ||
|
|
c3c17b2bc3 | ||
|
|
a894c4589e | ||
|
|
56a8a1b5a1 | ||
|
|
be3f7a6c37 | ||
|
|
906e400ab7 | ||
|
|
a9265afd4c | ||
|
|
d26058ac80 | ||
|
|
ebd1f30606 | ||
|
|
c78e077649 | ||
|
|
07619223b0 | ||
|
|
25c326ec6c | ||
|
|
df167b94c2 | ||
|
|
3730908881 | ||
|
|
975dc1bc11 | ||
|
|
31409f0c32 | ||
|
|
b19273227b | ||
|
|
f89179fb03 | ||
|
|
90c971f9f1 | ||
|
|
d685780a4a | ||
|
|
b6bc8b7b7c | ||
|
|
92daba898f | ||
|
|
138843591e | ||
|
|
0814552b2a | ||
|
|
0e0fadd72d | ||
|
|
5426bd4392 | ||
|
|
3520a65099 | ||
|
|
b15a5c2c87 | ||
|
|
a8af04ff82 | ||
|
|
2148de45a0 | ||
|
|
c4143dacee | ||
|
|
a8025e77b3 | ||
|
|
dd1e76be93 | ||
|
|
36f997959a | ||
|
|
c1faed163a | ||
|
|
9ca927dbe7 | ||
|
|
02c6011818 | ||
|
|
2e96b16396 | ||
|
|
53b8de6c1c | ||
|
|
daea9f893c | ||
|
|
d1b5b1734c | ||
|
|
74a5899626 | ||
|
|
202ebf6d4e | ||
|
|
2c7b417e25 | ||
|
|
bb5e138134 | ||
|
|
3a2c3e2f84 | ||
|
|
d5be0c34ac | ||
|
|
ea5431ef2b | ||
|
|
9c4cdcd11f | ||
|
|
e5ef6333e4 | ||
|
|
98779a48b1 | ||
|
|
9d4848ee77 | ||
|
|
5126820619 | ||
|
|
8b5c808e8c | ||
|
|
9c75996c40 | ||
|
|
d524778e42 | ||
|
|
52d4bc660e | ||
|
|
8884696a6c | ||
|
|
d493ccde28 | ||
|
|
1ececaaaa2 | ||
|
|
91b48ad432 | ||
|
|
f3fe40a19f | ||
|
|
cf4b29c425 | ||
|
|
4344e14a9d | ||
|
|
df935ec423 | ||
|
|
e7f9f7504e | ||
|
|
5721b2353a | ||
|
|
c9de846d0e | ||
|
|
a598108c26 | ||
|
|
5467aa399d | ||
|
|
da052b074a | ||
|
|
90c035edd0 | ||
|
|
fc4eb44a24 | ||
|
|
a71111b378 | ||
|
|
52e0c7e484 | ||
|
|
e32970f191 | ||
|
|
897cc36017 | ||
|
|
d79c575860 | ||
|
|
1f19f84edd | ||
|
|
27c37b8b84 | ||
|
|
06a5dd3153 | ||
|
|
b5bf270d22 | ||
|
|
8e71d69a64 | ||
|
|
06edb6f8a8 | ||
|
|
dca82ec0a1 | ||
|
|
9c82ce4103 | ||
|
|
8a23a9eb1b | ||
|
|
e1b7e515df | ||
|
|
c8ff335ed7 | ||
|
|
5736da8ab7 | ||
|
|
060bba4dce | ||
|
|
4c573991d2 | ||
|
|
7fd6dce55f | ||
|
|
1861d756e9 | ||
|
|
c36c041f5e | ||
|
|
c3d877bdd2 | ||
|
|
1242030d4a | ||
|
|
1626e74608 | ||
|
|
b1b913777f | ||
|
|
190894010c | ||
|
|
765265723c | ||
|
|
7e20502379 | ||
|
|
366fc30e9d | ||
|
|
aa91788a69 | ||
|
|
375789b019 | ||
|
|
140b769a42 | ||
|
|
88d718271d | ||
|
|
6ed26cdd1f | ||
|
|
d1851fa607 | ||
|
|
e846157c52 | ||
|
|
e190bb4c1a | ||
|
|
137fbe7acd | ||
|
|
9ccdb2ae3a | ||
|
|
f5f7515744 | ||
|
|
ddadbec7e3 | ||
|
|
d24543e103 | ||
|
|
f80c4c9565 | ||
|
|
480b383782 | ||
|
|
d3efd4c24b | ||
|
|
67a0acffa2 | ||
|
|
41b07da399 | ||
|
|
a6ce55d5b5 | ||
|
|
98c01fe1b3 | ||
|
|
51df986222 | ||
|
|
9c625f93a5 | ||
|
|
7101d47e2e | ||
|
|
eb85be2770 | ||
|
|
2da27937a5 | ||
|
|
2a29b801a4 | ||
|
|
57e65714b0 | ||
|
|
0ae40cb51c | ||
|
|
ddd195dfc6 | ||
|
|
54b9f23ec5 | ||
|
|
242dd3e626 | ||
|
|
1b8acb5b60 | ||
|
|
a7ab96ab12 | ||
|
|
06ab11cf87 | ||
|
|
1410a1b06e | ||
|
|
5baf19f7a3 | ||
|
|
6c66a7ba17 | ||
|
|
37b6e09475 | ||
|
|
e08c8ca26d | ||
|
|
2c09e7929f | ||
|
|
3e760f0d85 | ||
|
|
3cc6bd19ad | ||
|
|
b7ddfba71d | ||
|
|
32f21d208f | ||
|
|
ed7edd9fe0 | ||
|
|
fd3c995c7c | ||
|
|
c0d1a2d53b | ||
|
|
76bc3015a7 | ||
|
|
ad2896243b | ||
|
|
d0dcded42d | ||
|
|
a0dfa01287 | ||
|
|
4ec5c90180 | ||
|
|
a0c813bfc1 | ||
|
|
5f7b3a7087 | ||
|
|
6426f02a2c | ||
|
|
7fef92c480 | ||
|
|
c64744dedf | ||
|
|
72a2088931 | ||
|
|
db54556b0f | ||
|
|
a2653d8462 | ||
|
|
ef778238f6 | ||
|
|
4cc0ddc35d | ||
|
|
a0429179a0 | ||
|
|
5cfb45c668 | ||
|
|
a53b7041f5 | ||
|
|
f534fae293 | ||
|
|
f7cbd968d2 | ||
|
|
844d76290c | ||
|
|
8c8122eee0 | ||
|
|
d63f0d5e0b | ||
|
|
96f4ba5d25 | ||
|
|
72e64676da | ||
|
|
883e54f989 | ||
|
|
c2d4be3304 | ||
|
|
de737ddb91 | ||
|
|
11ec6dd9ac | ||
|
|
df7541e397 | ||
|
|
95ac53d780 | ||
|
|
e8c4b32a65 | ||
|
|
eca535c978 | ||
|
|
9088810b49 | ||
|
|
172a7053ed | ||
|
|
3d5bd2adef | ||
|
|
cb03d039f4 | ||
|
|
bb31b1bc6e | ||
|
|
727532858e | ||
|
|
c0868d9dac | ||
|
|
ce26e1dac6 | ||
|
|
c74f87ca12 | ||
|
|
043111b91c | ||
|
|
5c579e557c | ||
|
|
f8f51740c1 | ||
|
|
176b63df52 | ||
|
|
e1979357a5 | ||
|
|
030527a4f2 | ||
|
|
cca74da1f3 | ||
|
|
928aff342f | ||
|
|
60a97235df | ||
|
|
c77779cf9d | ||
|
|
9351796ba8 | ||
|
|
bef0f023d4 | ||
|
|
3116f183f5 | ||
|
|
16b71a22d1 | ||
|
|
5f4581042c | ||
|
|
6976a4cf2e | ||
|
|
68d86b3b7b | ||
|
|
d7d34d36c8 | ||
|
|
68da328cc5 | ||
|
|
78870186d7 | ||
|
|
d634273b48 | ||
|
|
2d970eee02 | ||
|
|
1f0ea3c6f7 | ||
|
|
d736913f7f | ||
|
|
3e95a9d282 | ||
|
|
7cd7259992 | ||
|
|
87385cf28e | ||
|
|
3a00c94325 | ||
|
|
38d5d2307f | ||
|
|
a0c12e7228 | ||
|
|
b6625ad909 | ||
|
|
6f01341055 | ||
|
|
6762a4153a | ||
|
|
31200df89f | ||
|
|
18e422ca77 | ||
|
|
1b362716e3 | ||
|
|
1e49129197 | ||
|
|
a8f818fca5 | ||
|
|
0f600da096 | ||
|
|
b04efe4eac | ||
|
|
7361d39231 | ||
|
|
059c0df16c | ||
|
|
6f6b849335 | ||
|
|
a390500309 | ||
|
|
7c576da32c | ||
|
|
6d021c1659 | ||
|
|
37c1c89d44 | ||
|
|
010043f116 | ||
|
|
b1010c3c61 | ||
|
|
7f0204bfc3 | ||
|
|
a508cc5efd | ||
|
|
65c90696d5 | ||
|
|
b9f47898d6 | ||
|
|
26f554e46a | ||
|
|
b57889c84f | ||
|
|
77fd1b4017 | ||
|
|
ab6745bc99 | ||
|
|
a5ea3cae72 | ||
|
|
8bcd1b4efd | ||
|
|
a24657e565 | ||
|
|
b7721420fa | ||
|
|
6c564fe4fd | ||
|
|
012bfd7e6c | ||
|
|
a70f81aa01 | ||
|
|
1376a38de5 | ||
|
|
1827ecda65 | ||
|
|
994c981228 | ||
|
|
5bbfbf44ae | ||
|
|
ace58ba735 | ||
|
|
f9840306a0 | ||
|
|
322b3bbb4e | ||
|
|
501318f468 | ||
|
|
0234f38b23 | ||
|
|
8743e0072f | ||
|
|
a79e06afa7 | ||
|
|
682b8e0535 | ||
|
|
d70aa5f9a9 | ||
|
|
1c815dcad1 | ||
|
|
afa467a32b | ||
|
|
274218d48e | ||
|
|
7e73df26ab | ||
|
|
ef8fc80c95 | ||
|
|
05c39144e3 | ||
|
|
f5cd35af47 | ||
|
|
c69ecdafd0 | ||
|
|
fa90c247ec | ||
|
|
0cd7bd47bb | ||
|
|
36d48d19fc | ||
|
|
9322b68d47 | ||
|
|
e11ff64b15 | ||
|
|
3776dabfcf | ||
|
|
d4e5831f0f | ||
|
|
7b3b478e88 | ||
|
|
f5afe13e91 | ||
|
|
49ce468d83 | ||
|
|
b26551c812 | ||
|
|
394ba580d2 | ||
|
|
2f7a54f5fd | ||
|
|
360e085926 | ||
|
|
042921925d | ||
|
|
dcf024387b | ||
|
|
e1232bc9e7 | ||
|
|
d96598b5dd | ||
|
|
2605f85668 | ||
|
|
2c8e6ca0cd | ||
|
|
0225f574be | ||
|
|
34090bf2eb | ||
|
|
5ae585ce13 | ||
|
|
2bb10a32d7 | ||
|
|
435743dd2c | ||
|
|
98589fba6d | ||
|
|
32da679e02 | ||
|
|
44daffc65b | ||
|
|
0aafda1477 | ||
|
|
60604e33b9 | ||
|
|
98268b377a | ||
|
|
de54979471 | ||
|
|
ee6e339587 | ||
|
|
c16cf89318 | ||
|
|
c66cb7423e | ||
|
|
f5bd95a519 | ||
|
|
500f9ec1c1 | ||
|
|
a4713d4a1e | ||
|
|
04452dfb1a | ||
|
|
69d09851d9 | ||
|
|
1b649fe5cd | ||
|
|
38572a5a86 | ||
|
|
f5f51169e6 | ||
|
|
07c2178ae1 | ||
|
|
f30d21361f | ||
|
|
6adb4fbcf7 | ||
|
|
d73962bd7d | ||
|
|
f4b43739da | ||
|
|
4838b280ad | ||
|
|
f93b753c03 | ||
|
|
de06361cb0 | ||
|
|
15ce48c8aa | ||
|
|
38758d05a8 | ||
|
|
a79fa14ee7 | ||
|
|
1eb95b4d33 |
@@ -4,37 +4,45 @@
|
|||||||
"containerEnv": {
|
"containerEnv": {
|
||||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
||||||
},
|
},
|
||||||
|
"remoteEnv": {
|
||||||
|
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
|
||||||
|
},
|
||||||
"appPort": ["9123:8123", "7357:4357"],
|
"appPort": ["9123:8123", "7357:4357"],
|
||||||
"postCreateCommand": "bash devcontainer_bootstrap",
|
"postCreateCommand": "bash devcontainer_setup",
|
||||||
|
"postStartCommand": "bash devcontainer_bootstrap",
|
||||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||||
"extensions": [
|
"customizations": {
|
||||||
"ms-python.python",
|
"vscode": {
|
||||||
"ms-python.vscode-pylance",
|
"extensions": [
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
"charliermarsh.ruff",
|
||||||
"esbenp.prettier-vscode"
|
"ms-python.pylint",
|
||||||
],
|
"ms-python.vscode-pylance",
|
||||||
"mounts": ["type=volume,target=/var/lib/docker"],
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
"settings": {
|
"redhat.vscode-yaml",
|
||||||
"terminal.integrated.profiles.linux": {
|
"esbenp.prettier-vscode",
|
||||||
"zsh": {
|
"GitHub.vscode-pull-request-github"
|
||||||
"path": "/usr/bin/zsh"
|
],
|
||||||
|
"settings": {
|
||||||
|
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||||
|
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||||
|
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||||
|
"python.testing.pytestArgs": ["--no-cov"],
|
||||||
|
"pylint.importStrategy": "fromEnvironment",
|
||||||
|
"editor.formatOnPaste": false,
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.formatOnType": true,
|
||||||
|
"files.trimTrailingWhitespace": true,
|
||||||
|
"terminal.integrated.profiles.linux": {
|
||||||
|
"zsh": {
|
||||||
|
"path": "/usr/bin/zsh"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||||
|
"[python]": {
|
||||||
|
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
},
|
||||||
"editor.formatOnPaste": false,
|
"mounts": ["type=volume,target=/var/lib/docker"]
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"editor.formatOnType": true,
|
|
||||||
"files.trimTrailingWhitespace": true,
|
|
||||||
"python.pythonPath": "/usr/local/bin/python3",
|
|
||||||
"python.linting.pylintEnabled": true,
|
|
||||||
"python.linting.enabled": true,
|
|
||||||
"python.formatting.provider": "black",
|
|
||||||
"python.formatting.blackArgs": ["--target-version", "py310"],
|
|
||||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
|
||||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
|
||||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
|
||||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
|
||||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
|
||||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -38,6 +38,7 @@
|
|||||||
- This PR is related to issue:
|
- This PR is related to issue:
|
||||||
- Link to documentation pull request:
|
- Link to documentation pull request:
|
||||||
- Link to cli pull request:
|
- Link to cli pull request:
|
||||||
|
- Link to client library pull request:
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|
||||||
@@ -52,12 +53,14 @@
|
|||||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||||
- [ ] There is no commented out code in this PR.
|
- [ ] There is no commented out code in this PR.
|
||||||
- [ ] I have followed the [development checklist][dev-checklist]
|
- [ ] I have followed the [development checklist][dev-checklist]
|
||||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
|
||||||
- [ ] Tests have been added to verify that the new code works.
|
- [ ] Tests have been added to verify that the new code works.
|
||||||
|
|
||||||
If API endpoints of add-on configuration are added/changed:
|
If API endpoints or add-on configuration are added/changed:
|
||||||
|
|
||||||
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
||||||
|
- [ ] [CLI][cli-repository] updated (if necessary)
|
||||||
|
- [ ] [Client library][client-library-repository] updated (if necessary)
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
Thank you for contributing <3
|
Thank you for contributing <3
|
||||||
@@ -67,3 +70,5 @@ If API endpoints of add-on configuration are added/changed:
|
|||||||
|
|
||||||
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
||||||
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
||||||
|
[cli-repository]: https://github.com/home-assistant/cli
|
||||||
|
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/
|
||||||
|
|||||||
38
.github/workflows/builder.yml
vendored
38
.github/workflows/builder.yml
vendored
@@ -33,7 +33,7 @@ on:
|
|||||||
- setup.py
|
- setup.py
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.11"
|
DEFAULT_PYTHON: "3.12"
|
||||||
BUILD_NAME: supervisor
|
BUILD_NAME: supervisor
|
||||||
BUILD_TYPE: supervisor
|
BUILD_TYPE: supervisor
|
||||||
|
|
||||||
@@ -53,7 +53,7 @@ jobs:
|
|||||||
requirements: ${{ steps.requirements.outputs.changed }}
|
requirements: ${{ steps.requirements.outputs.changed }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -70,13 +70,13 @@ jobs:
|
|||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed_files
|
id: changed_files
|
||||||
if: steps.version.outputs.publish == 'false'
|
if: steps.version.outputs.publish == 'false'
|
||||||
uses: jitterbit/get-changed-files@v1
|
uses: masesgroup/retrieve-changed-files@v3.0.0
|
||||||
|
|
||||||
- name: Check if requirements files changed
|
- name: Check if requirements files changed
|
||||||
id: requirements
|
id: requirements
|
||||||
run: |
|
run: |
|
||||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then
|
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
|
||||||
echo "::set-output name=changed::true"
|
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
build:
|
build:
|
||||||
@@ -92,7 +92,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -106,13 +106,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
if: needs.init.outputs.requirements == 'true'
|
if: needs.init.outputs.requirements == 'true'
|
||||||
uses: home-assistant/wheels@2023.04.0
|
uses: home-assistant/wheels@2024.07.1
|
||||||
with:
|
with:
|
||||||
abi: cp311
|
abi: cp312
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
apk: "libffi-dev;openssl-dev"
|
apk: "libffi-dev;openssl-dev;yaml-dev"
|
||||||
skip-binary: aiohttp
|
skip-binary: aiohttp
|
||||||
env-file: true
|
env-file: true
|
||||||
requirements: "requirements.txt"
|
requirements: "requirements.txt"
|
||||||
@@ -125,20 +125,20 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/setup-python@v4.7.0
|
uses: actions/setup-python@v5.2.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: sigstore/cosign-installer@v3.1.1
|
uses: sigstore/cosign-installer@v3.7.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.0.2"
|
cosign-release: "v2.4.0"
|
||||||
|
|
||||||
- name: Install dirhash and calc hash
|
- name: Install dirhash and calc hash
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
run: |
|
run: |
|
||||||
pip3 install dirhash
|
pip3 install setuptools dirhash
|
||||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||||
echo "${dir_hash}" > rootfs/supervisor.sha256
|
echo "${dir_hash}" > rootfs/supervisor.sha256
|
||||||
|
|
||||||
@@ -149,7 +149,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: docker/login-action@v2.2.0
|
uses: docker/login-action@v3.3.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -160,7 +160,7 @@ jobs:
|
|||||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build supervisor
|
- name: Build supervisor
|
||||||
uses: home-assistant/builder@2023.08.0
|
uses: home-assistant/builder@2024.08.2
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@@ -178,7 +178,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
@@ -203,11 +203,11 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
|
|
||||||
- name: Build the Supervisor
|
- name: Build the Supervisor
|
||||||
if: needs.init.outputs.publish != 'true'
|
if: needs.init.outputs.publish != 'true'
|
||||||
uses: home-assistant/builder@2023.08.0
|
uses: home-assistant/builder@2024.08.2
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
--test \
|
--test \
|
||||||
@@ -324,7 +324,7 @@ jobs:
|
|||||||
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
|
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
|
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Uninstall SSH add-on
|
- name: Uninstall SSH add-on
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
253
.github/workflows/ci.yaml
vendored
253
.github/workflows/ci.yaml
vendored
@@ -8,8 +8,8 @@ on:
|
|||||||
pull_request: ~
|
pull_request: ~
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.11"
|
DEFAULT_PYTHON: "3.12"
|
||||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||||
@@ -25,15 +25,15 @@ jobs:
|
|||||||
name: Prepare Python dependencies
|
name: Prepare Python dependencies
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v4.7.0
|
uses: actions/setup-python@v5.2.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -47,9 +47,10 @@ jobs:
|
|||||||
pip install -r requirements.txt -r requirements_tests.txt
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
|
lookup-only: true
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
@@ -60,21 +61,21 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit install-hooks
|
pre-commit install-hooks
|
||||||
|
|
||||||
lint-black:
|
lint-ruff-format:
|
||||||
name: Check black
|
name: Check ruff-format
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.7.0
|
uses: actions/setup-python@v5.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -84,10 +85,67 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Run black
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v4.1.1
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run ruff-format
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
black --target-version py38 --check supervisor tests setup.py
|
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||||
|
env:
|
||||||
|
RUFF_OUTPUT_FORMAT: github
|
||||||
|
|
||||||
|
lint-ruff:
|
||||||
|
name: Check ruff
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v4.2.1
|
||||||
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
|
uses: actions/setup-python@v5.2.0
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v4.1.1
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v4.1.1
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run ruff
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||||
|
env:
|
||||||
|
RUFF_OUTPUT_FORMAT: github
|
||||||
|
|
||||||
lint-dockerfile:
|
lint-dockerfile:
|
||||||
name: Check Dockerfile
|
name: Check Dockerfile
|
||||||
@@ -95,7 +153,7 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -110,15 +168,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.7.0
|
uses: actions/setup-python@v5.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -130,9 +188,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
- name: Fail job if cache restore failed
|
- name: Fail job if cache restore failed
|
||||||
@@ -148,94 +206,21 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||||
|
|
||||||
lint-flake8:
|
|
||||||
name: Check flake8
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v3.5.3
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v4.7.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.1
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Register flake8 problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
|
||||||
- name: Run flake8
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
flake8 supervisor tests
|
|
||||||
|
|
||||||
lint-isort:
|
|
||||||
name: Check isort
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v3.5.3
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v4.7.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.1
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v3.3.1
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run isort
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
|
||||||
|
|
||||||
lint-json:
|
lint-json:
|
||||||
name: Check JSON
|
name: Check JSON
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.7.0
|
uses: actions/setup-python@v5.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -247,9 +232,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
- name: Fail job if cache restore failed
|
- name: Fail job if cache restore failed
|
||||||
@@ -271,15 +256,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.7.0
|
uses: actions/setup-python@v5.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -297,66 +282,25 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pylint supervisor tests
|
pylint supervisor tests
|
||||||
|
|
||||||
lint-pyupgrade:
|
|
||||||
name: Check pyupgrade
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v3.5.3
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v4.7.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.1
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v3.3.1
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_HOME }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run pyupgrade
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
|
||||||
|
|
||||||
pytest:
|
pytest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.7.0
|
uses: actions/setup-python@v5.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.1.1
|
uses: sigstore/cosign-installer@v3.7.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.0.2"
|
cosign-release: "v2.4.0"
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -369,7 +313,7 @@ jobs:
|
|||||||
- name: Install additional system dependencies
|
- name: Install additional system dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
|
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
|
||||||
- name: Register Python problem matcher
|
- name: Register Python problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||||
@@ -391,10 +335,11 @@ jobs:
|
|||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
tests
|
tests
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
uses: actions/upload-artifact@v3.1.2
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}
|
name: coverage-${{ matrix.python-version }}
|
||||||
path: .coverage
|
path: .coverage
|
||||||
|
include-hidden-files: true
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
name: Process test coverage
|
name: Process test coverage
|
||||||
@@ -402,15 +347,15 @@ jobs:
|
|||||||
needs: ["pytest", "prepare"]
|
needs: ["pytest", "prepare"]
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v4.7.0
|
uses: actions/setup-python@v5.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.1
|
uses: actions/cache@v4.1.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -421,7 +366,7 @@ jobs:
|
|||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4.1.8
|
||||||
- name: Combine coverage results
|
- name: Combine coverage results
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
@@ -429,4 +374,4 @@ jobs:
|
|||||||
coverage report
|
coverage report
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v3.1.4
|
uses: codecov/codecov-action@v4.6.0
|
||||||
|
|||||||
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
lock:
|
lock:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v4.0.1
|
- uses: dessant/lock-threads@v5.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ github.token }}
|
github-token: ${{ github.token }}
|
||||||
issue-inactive-days: "30"
|
issue-inactive-days: "30"
|
||||||
|
|||||||
30
.github/workflows/matchers/flake8.json
vendored
30
.github/workflows/matchers/flake8.json
vendored
@@ -1,30 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "flake8-error",
|
|
||||||
"severity": "error",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"owner": "flake8-warning",
|
|
||||||
"severity": "warning",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
6
.github/workflows/release-drafter.yml
vendored
6
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
name: Release Drafter
|
name: Release Drafter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -33,10 +33,10 @@ jobs:
|
|||||||
|
|
||||||
echo Current version: $latest
|
echo Current version: $latest
|
||||||
echo New target version: $datepre.$newpost
|
echo New target version: $datepre.$newpost
|
||||||
echo "::set-output name=version::$datepre.$newpost"
|
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Run Release Drafter
|
- name: Run Release Drafter
|
||||||
uses: release-drafter/release-drafter@v5.24.0
|
uses: release-drafter/release-drafter@v6.0.0
|
||||||
with:
|
with:
|
||||||
tag: ${{ steps.version.outputs.version }}
|
tag: ${{ steps.version.outputs.version }}
|
||||||
name: ${{ steps.version.outputs.version }}
|
name: ${{ steps.version.outputs.version }}
|
||||||
|
|||||||
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@@ -10,9 +10,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v3.5.3
|
uses: actions/checkout@v4.2.1
|
||||||
- name: Sentry Release
|
- name: Sentry Release
|
||||||
uses: getsentry/action-release@v1.4.1
|
uses: getsentry/action-release@v1.7.0
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
|||||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v8.0.0
|
- uses: actions/stale@v9.0.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 30
|
days-before-stale: 30
|
||||||
|
|||||||
@@ -3,4 +3,5 @@ ignored:
|
|||||||
- DL3006
|
- DL3006
|
||||||
- DL3013
|
- DL3013
|
||||||
- DL3018
|
- DL3018
|
||||||
|
- DL3042
|
||||||
- SC2155
|
- SC2155
|
||||||
|
|||||||
@@ -1,34 +1,15 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 23.1.0
|
rev: v0.5.7
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
- --safe
|
- --fix
|
||||||
- --quiet
|
- id: ruff-format
|
||||||
- --target-version
|
|
||||||
- py310
|
|
||||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||||
- repo: https://github.com/PyCQA/flake8
|
|
||||||
rev: 6.0.0
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
additional_dependencies:
|
|
||||||
- flake8-docstrings==1.7.0
|
|
||||||
- pydocstyle==6.3.0
|
|
||||||
files: ^(supervisor|script|tests)/.+\.py$
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.3.0
|
rev: v4.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- repo: https://github.com/PyCQA/isort
|
|
||||||
rev: 5.12.0
|
|
||||||
hooks:
|
|
||||||
- id: isort
|
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
|
||||||
rev: v3.4.0
|
|
||||||
hooks:
|
|
||||||
- id: pyupgrade
|
|
||||||
args: [--py310-plus]
|
|
||||||
|
|||||||
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@@ -58,9 +58,23 @@
|
|||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Flake8",
|
"label": "Ruff Check",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "flake8 supervisor tests",
|
"command": "ruff check --fix supervisor tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Ruff Format",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "ruff format supervisor tests",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
|
|||||||
19
Dockerfile
19
Dockerfile
@@ -4,7 +4,8 @@ FROM ${BUILD_FROM}
|
|||||||
ENV \
|
ENV \
|
||||||
S6_SERVICES_GRACETIME=10000 \
|
S6_SERVICES_GRACETIME=10000 \
|
||||||
SUPERVISOR_API=http://localhost \
|
SUPERVISOR_API=http://localhost \
|
||||||
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
|
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
|
||||||
|
UV_SYSTEM_PYTHON=true
|
||||||
|
|
||||||
ARG \
|
ARG \
|
||||||
COSIGN_VERSION \
|
COSIGN_VERSION \
|
||||||
@@ -15,6 +16,7 @@ WORKDIR /usr/src
|
|||||||
RUN \
|
RUN \
|
||||||
set -x \
|
set -x \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
|
findutils \
|
||||||
eudev \
|
eudev \
|
||||||
eudev-libs \
|
eudev-libs \
|
||||||
git \
|
git \
|
||||||
@@ -22,23 +24,26 @@ RUN \
|
|||||||
libpulse \
|
libpulse \
|
||||||
musl \
|
musl \
|
||||||
openssl \
|
openssl \
|
||||||
|
yaml \
|
||||||
\
|
\
|
||||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
||||||
&& chmod a+x /usr/bin/cosign
|
&& chmod a+x /usr/bin/cosign \
|
||||||
|
&& pip3 install uv==0.2.21
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN \
|
RUN \
|
||||||
export MAKEFLAGS="-j$(nproc)" \
|
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
linux32 uv pip install --no-build -r requirements.txt; \
|
||||||
"https://wheels.home-assistant.io/musllinux/" \
|
else \
|
||||||
-r ./requirements.txt \
|
uv pip install --no-build -r requirements.txt; \
|
||||||
|
fi \
|
||||||
&& rm -f requirements.txt
|
&& rm -f requirements.txt
|
||||||
|
|
||||||
# Install Home Assistant Supervisor
|
# Install Home Assistant Supervisor
|
||||||
COPY . supervisor
|
COPY . supervisor
|
||||||
RUN \
|
RUN \
|
||||||
pip3 install --no-cache-dir -e ./supervisor \
|
pip3 install -e ./supervisor \
|
||||||
&& python3 -m compileall ./supervisor/supervisor
|
&& python3 -m compileall ./supervisor/supervisor
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -30,3 +30,5 @@ Releases are done in 3 stages (channels) with this structure:
|
|||||||
|
|
||||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||||
|
|
||||||
|
[](https://www.openhomefoundation.org/)
|
||||||
|
|||||||
12
build.yaml
12
build.yaml
@@ -1,10 +1,10 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.16
|
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
|
||||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.16
|
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
|
||||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.11-alpine3.16
|
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
|
||||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.11-alpine3.16
|
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
|
||||||
i386: ghcr.io/home-assistant/i386-base-python:3.11-alpine3.16
|
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
@@ -12,7 +12,7 @@ cosign:
|
|||||||
base_identity: https://github.com/home-assistant/docker-base/.*
|
base_identity: https://github.com/home-assistant/docker-base/.*
|
||||||
identity: https://github.com/home-assistant/supervisor/.*
|
identity: https://github.com/home-assistant/supervisor/.*
|
||||||
args:
|
args:
|
||||||
COSIGN_VERSION: 2.0.2
|
COSIGN_VERSION: 2.4.0
|
||||||
labels:
|
labels:
|
||||||
io.hass.type: supervisor
|
io.hass.type: supervisor
|
||||||
org.opencontainers.image.title: Home Assistant Supervisor
|
org.opencontainers.image.title: Home Assistant Supervisor
|
||||||
|
|||||||
45
pylintrc
45
pylintrc
@@ -1,45 +0,0 @@
|
|||||||
[MASTER]
|
|
||||||
reports=no
|
|
||||||
jobs=2
|
|
||||||
|
|
||||||
good-names=id,i,j,k,ex,Run,_,fp,T,os
|
|
||||||
|
|
||||||
extension-pkg-whitelist=
|
|
||||||
ciso8601
|
|
||||||
|
|
||||||
# Reasons disabled:
|
|
||||||
# format - handled by black
|
|
||||||
# locally-disabled - it spams too much
|
|
||||||
# duplicate-code - unavoidable
|
|
||||||
# cyclic-import - doesn't test if both import on load
|
|
||||||
# abstract-class-not-used - is flaky, should not show up but does
|
|
||||||
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
|
||||||
# too-many-* - are not enforced for the sake of readability
|
|
||||||
# too-few-* - same as too-many-*
|
|
||||||
# abstract-method - with intro of async there are always methods missing
|
|
||||||
disable=
|
|
||||||
format,
|
|
||||||
abstract-method,
|
|
||||||
cyclic-import,
|
|
||||||
duplicate-code,
|
|
||||||
locally-disabled,
|
|
||||||
no-else-return,
|
|
||||||
not-context-manager,
|
|
||||||
too-few-public-methods,
|
|
||||||
too-many-arguments,
|
|
||||||
too-many-branches,
|
|
||||||
too-many-instance-attributes,
|
|
||||||
too-many-lines,
|
|
||||||
too-many-locals,
|
|
||||||
too-many-public-methods,
|
|
||||||
too-many-return-statements,
|
|
||||||
too-many-statements,
|
|
||||||
unused-argument,
|
|
||||||
consider-using-with
|
|
||||||
|
|
||||||
[EXCEPTIONS]
|
|
||||||
overgeneral-exceptions=builtins.Exception
|
|
||||||
|
|
||||||
|
|
||||||
[TYPECHECK]
|
|
||||||
ignored-modules = distutils
|
|
||||||
373
pyproject.toml
Normal file
373
pyproject.toml
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "Supervisor"
|
||||||
|
dynamic = ["version", "dependencies"]
|
||||||
|
license = { text = "Apache-2.0" }
|
||||||
|
description = "Open-source private cloud os for Home-Assistant based on HassOS"
|
||||||
|
readme = "README.md"
|
||||||
|
authors = [
|
||||||
|
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
||||||
|
]
|
||||||
|
keywords = ["docker", "home-assistant", "api"]
|
||||||
|
requires-python = ">=3.12.0"
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
"Homepage" = "https://www.home-assistant.io/"
|
||||||
|
"Source Code" = "https://github.com/home-assistant/supervisor"
|
||||||
|
"Bug Reports" = "https://github.com/home-assistant/supervisor/issues"
|
||||||
|
"Docs: Dev" = "https://developers.home-assistant.io/"
|
||||||
|
"Discord" = "https://www.home-assistant.io/join-chat/"
|
||||||
|
"Forum" = "https://community.home-assistant.io/"
|
||||||
|
|
||||||
|
[tool.setuptools]
|
||||||
|
platforms = ["any"]
|
||||||
|
zip-safe = false
|
||||||
|
include-package-data = true
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
include = ["supervisor*"]
|
||||||
|
|
||||||
|
[tool.pylint.MAIN]
|
||||||
|
py-version = "3.12"
|
||||||
|
# Use a conservative default here; 2 should speed up most setups and not hurt
|
||||||
|
# any too bad. Override on command line as appropriate.
|
||||||
|
jobs = 2
|
||||||
|
persistent = false
|
||||||
|
extension-pkg-allow-list = ["ciso8601"]
|
||||||
|
|
||||||
|
[tool.pylint.BASIC]
|
||||||
|
class-const-naming-style = "any"
|
||||||
|
good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
|
||||||
|
|
||||||
|
[tool.pylint."MESSAGES CONTROL"]
|
||||||
|
# Reasons disabled:
|
||||||
|
# format - handled by ruff
|
||||||
|
# abstract-method - with intro of async there are always methods missing
|
||||||
|
# cyclic-import - doesn't test if both import on load
|
||||||
|
# duplicate-code - unavoidable
|
||||||
|
# locally-disabled - it spams too much
|
||||||
|
# too-many-* - are not enforced for the sake of readability
|
||||||
|
# too-few-* - same as too-many-*
|
||||||
|
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
||||||
|
disable = [
|
||||||
|
"format",
|
||||||
|
"abstract-method",
|
||||||
|
"cyclic-import",
|
||||||
|
"duplicate-code",
|
||||||
|
"locally-disabled",
|
||||||
|
"no-else-return",
|
||||||
|
"not-context-manager",
|
||||||
|
"too-few-public-methods",
|
||||||
|
"too-many-arguments",
|
||||||
|
"too-many-branches",
|
||||||
|
"too-many-instance-attributes",
|
||||||
|
"too-many-lines",
|
||||||
|
"too-many-locals",
|
||||||
|
"too-many-public-methods",
|
||||||
|
"too-many-return-statements",
|
||||||
|
"too-many-statements",
|
||||||
|
"unused-argument",
|
||||||
|
"consider-using-with",
|
||||||
|
|
||||||
|
# Handled by ruff
|
||||||
|
# Ref: <https://github.com/astral-sh/ruff/issues/970>
|
||||||
|
"await-outside-async", # PLE1142
|
||||||
|
"bad-str-strip-call", # PLE1310
|
||||||
|
"bad-string-format-type", # PLE1307
|
||||||
|
"bidirectional-unicode", # PLE2502
|
||||||
|
"continue-in-finally", # PLE0116
|
||||||
|
"duplicate-bases", # PLE0241
|
||||||
|
"format-needs-mapping", # F502
|
||||||
|
"function-redefined", # F811
|
||||||
|
# Needed because ruff does not understand type of __all__ generated by a function
|
||||||
|
# "invalid-all-format", # PLE0605
|
||||||
|
"invalid-all-object", # PLE0604
|
||||||
|
"invalid-character-backspace", # PLE2510
|
||||||
|
"invalid-character-esc", # PLE2513
|
||||||
|
"invalid-character-nul", # PLE2514
|
||||||
|
"invalid-character-sub", # PLE2512
|
||||||
|
"invalid-character-zero-width-space", # PLE2515
|
||||||
|
"logging-too-few-args", # PLE1206
|
||||||
|
"logging-too-many-args", # PLE1205
|
||||||
|
"missing-format-string-key", # F524
|
||||||
|
"mixed-format-string", # F506
|
||||||
|
"no-method-argument", # N805
|
||||||
|
"no-self-argument", # N805
|
||||||
|
"nonexistent-operator", # B002
|
||||||
|
"nonlocal-without-binding", # PLE0117
|
||||||
|
"not-in-loop", # F701, F702
|
||||||
|
"notimplemented-raised", # F901
|
||||||
|
"return-in-init", # PLE0101
|
||||||
|
"return-outside-function", # F706
|
||||||
|
"syntax-error", # E999
|
||||||
|
"too-few-format-args", # F524
|
||||||
|
"too-many-format-args", # F522
|
||||||
|
"too-many-star-expressions", # F622
|
||||||
|
"truncated-format-string", # F501
|
||||||
|
"undefined-all-variable", # F822
|
||||||
|
"undefined-variable", # F821
|
||||||
|
"used-prior-global-declaration", # PLE0118
|
||||||
|
"yield-inside-async-function", # PLE1700
|
||||||
|
"yield-outside-function", # F704
|
||||||
|
"anomalous-backslash-in-string", # W605
|
||||||
|
"assert-on-string-literal", # PLW0129
|
||||||
|
"assert-on-tuple", # F631
|
||||||
|
"bad-format-string", # W1302, F
|
||||||
|
"bad-format-string-key", # W1300, F
|
||||||
|
"bare-except", # E722
|
||||||
|
"binary-op-exception", # PLW0711
|
||||||
|
"cell-var-from-loop", # B023
|
||||||
|
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
|
||||||
|
"duplicate-except", # B014
|
||||||
|
"duplicate-key", # F601
|
||||||
|
"duplicate-string-formatting-argument", # F
|
||||||
|
"duplicate-value", # F
|
||||||
|
"eval-used", # PGH001
|
||||||
|
"exec-used", # S102
|
||||||
|
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
|
||||||
|
"f-string-without-interpolation", # F541
|
||||||
|
"forgotten-debug-statement", # T100
|
||||||
|
"format-string-without-interpolation", # F
|
||||||
|
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
||||||
|
"global-variable-not-assigned", # PLW0602
|
||||||
|
"implicit-str-concat", # ISC001
|
||||||
|
"import-self", # PLW0406
|
||||||
|
"inconsistent-quotes", # Q000
|
||||||
|
"invalid-envvar-default", # PLW1508
|
||||||
|
"keyword-arg-before-vararg", # B026
|
||||||
|
"logging-format-interpolation", # G
|
||||||
|
"logging-fstring-interpolation", # G
|
||||||
|
"logging-not-lazy", # G
|
||||||
|
"misplaced-future", # F404
|
||||||
|
"named-expr-without-context", # PLW0131
|
||||||
|
"nested-min-max", # PLW3301
|
||||||
|
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
||||||
|
"raise-missing-from", # TRY200
|
||||||
|
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
||||||
|
"try-except-raise", # TRY302
|
||||||
|
"unused-argument", # ARG001, we don't use it
|
||||||
|
"unused-format-string-argument", #F507
|
||||||
|
"unused-format-string-key", # F504
|
||||||
|
"unused-import", # F401
|
||||||
|
"unused-variable", # F841
|
||||||
|
"useless-else-on-loop", # PLW0120
|
||||||
|
"wildcard-import", # F403
|
||||||
|
"bad-classmethod-argument", # N804
|
||||||
|
"consider-iterating-dictionary", # SIM118
|
||||||
|
"empty-docstring", # D419
|
||||||
|
"invalid-name", # N815
|
||||||
|
"line-too-long", # E501, disabled globally
|
||||||
|
"missing-class-docstring", # D101
|
||||||
|
"missing-final-newline", # W292
|
||||||
|
"missing-function-docstring", # D103
|
||||||
|
"missing-module-docstring", # D100
|
||||||
|
"multiple-imports", #E401
|
||||||
|
"singleton-comparison", # E711, E712
|
||||||
|
"subprocess-run-check", # PLW1510
|
||||||
|
"superfluous-parens", # UP034
|
||||||
|
"ungrouped-imports", # I001
|
||||||
|
"unidiomatic-typecheck", # E721
|
||||||
|
"unnecessary-direct-lambda-call", # PLC3002
|
||||||
|
"unnecessary-lambda-assignment", # PLC3001
|
||||||
|
"unneeded-not", # SIM208
|
||||||
|
"useless-import-alias", # PLC0414
|
||||||
|
"wrong-import-order", # I001
|
||||||
|
"wrong-import-position", # E402
|
||||||
|
"comparison-of-constants", # PLR0133
|
||||||
|
"comparison-with-itself", # PLR0124
|
||||||
|
# "consider-alternative-union-syntax", # UP007, typing extension
|
||||||
|
"consider-merging-isinstance", # PLR1701
|
||||||
|
# "consider-using-alias", # UP006, typing extension
|
||||||
|
"consider-using-dict-comprehension", # C402
|
||||||
|
"consider-using-generator", # C417
|
||||||
|
"consider-using-get", # SIM401
|
||||||
|
"consider-using-set-comprehension", # C401
|
||||||
|
"consider-using-sys-exit", # PLR1722
|
||||||
|
"consider-using-ternary", # SIM108
|
||||||
|
"literal-comparison", # F632
|
||||||
|
"property-with-parameters", # PLR0206
|
||||||
|
"super-with-arguments", # UP008
|
||||||
|
"too-many-branches", # PLR0912
|
||||||
|
"too-many-return-statements", # PLR0911
|
||||||
|
"too-many-statements", # PLR0915
|
||||||
|
"trailing-comma-tuple", # COM818
|
||||||
|
"unnecessary-comprehension", # C416
|
||||||
|
"use-a-generator", # C417
|
||||||
|
"use-dict-literal", # C406
|
||||||
|
"use-list-literal", # C405
|
||||||
|
"useless-object-inheritance", # UP004
|
||||||
|
"useless-return", # PLR1711
|
||||||
|
# "no-self-use", # PLR6301 # Optional plugin, not enabled
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.pylint.REPORTS]
|
||||||
|
score = false
|
||||||
|
|
||||||
|
[tool.pylint.TYPECHECK]
|
||||||
|
ignored-modules = ["distutils"]
|
||||||
|
|
||||||
|
[tool.pylint.FORMAT]
|
||||||
|
expected-line-ending-format = "LF"
|
||||||
|
|
||||||
|
[tool.pylint.EXCEPTIONS]
|
||||||
|
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
|
||||||
|
|
||||||
|
[tool.pylint.DESIGN]
|
||||||
|
max-positional-arguments = 10
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
norecursedirs = [".git"]
|
||||||
|
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
||||||
|
log_date_format = "%Y-%m-%d %H:%M:%S"
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
filterwarnings = [
|
||||||
|
"error",
|
||||||
|
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
|
||||||
|
"ignore::pytest.PytestUnraisableExceptionWarning",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
lint.select = [
|
||||||
|
"B002", # Python does not support the unary prefix increment
|
||||||
|
"B007", # Loop control variable {name} not used within loop body
|
||||||
|
"B014", # Exception handler with duplicate exception
|
||||||
|
"B023", # Function definition does not bind loop variable {name}
|
||||||
|
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
|
||||||
|
"B904", # Use raise from to specify exception cause
|
||||||
|
"C", # complexity
|
||||||
|
"COM818", # Trailing comma on bare tuple prohibited
|
||||||
|
"D", # docstrings
|
||||||
|
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
|
||||||
|
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
|
||||||
|
"E", # pycodestyle
|
||||||
|
"F", # pyflakes/autoflake
|
||||||
|
"G", # flake8-logging-format
|
||||||
|
"I", # isort
|
||||||
|
"ICN001", # import concentions; {name} should be imported as {asname}
|
||||||
|
"N804", # First argument of a class method should be named cls
|
||||||
|
"N805", # First argument of a method should be named self
|
||||||
|
"N815", # Variable {name} in class scope should not be mixedCase
|
||||||
|
"PGH004", # Use specific rule codes when using noqa
|
||||||
|
"PLC0414", # Useless import alias. Import alias does not rename original package.
|
||||||
|
"PLC", # pylint
|
||||||
|
"PLE", # pylint
|
||||||
|
"PLR", # pylint
|
||||||
|
"PLW", # pylint
|
||||||
|
"Q000", # Double quotes found but single quotes preferred
|
||||||
|
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||||
|
"S102", # Use of exec detected
|
||||||
|
"S103", # bad-file-permissions
|
||||||
|
"S108", # hardcoded-temp-file
|
||||||
|
"S306", # suspicious-mktemp-usage
|
||||||
|
"S307", # suspicious-eval-usage
|
||||||
|
"S313", # suspicious-xmlc-element-tree-usage
|
||||||
|
"S314", # suspicious-xml-element-tree-usage
|
||||||
|
"S315", # suspicious-xml-expat-reader-usage
|
||||||
|
"S316", # suspicious-xml-expat-builder-usage
|
||||||
|
"S317", # suspicious-xml-sax-usage
|
||||||
|
"S318", # suspicious-xml-mini-dom-usage
|
||||||
|
"S319", # suspicious-xml-pull-dom-usage
|
||||||
|
"S320", # suspicious-xmle-tree-usage
|
||||||
|
"S601", # paramiko-call
|
||||||
|
"S602", # subprocess-popen-with-shell-equals-true
|
||||||
|
"S604", # call-with-shell-equals-true
|
||||||
|
"S608", # hardcoded-sql-expression
|
||||||
|
"S609", # unix-command-wildcard-injection
|
||||||
|
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
|
||||||
|
"SIM117", # Merge with-statements that use the same scope
|
||||||
|
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
|
||||||
|
"SIM201", # Use {left} != {right} instead of not {left} == {right}
|
||||||
|
"SIM208", # Use {expr} instead of not (not {expr})
|
||||||
|
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
|
||||||
|
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
|
||||||
|
"SIM401", # Use get from dict with default instead of an if block
|
||||||
|
"T100", # Trace found: {name} used
|
||||||
|
"T20", # flake8-print
|
||||||
|
"TID251", # Banned imports
|
||||||
|
"TRY004", # Prefer TypeError exception for invalid type
|
||||||
|
"TRY302", # Remove exception handler; error is immediately re-raised
|
||||||
|
"UP", # pyupgrade
|
||||||
|
"W", # pycodestyle
|
||||||
|
]
|
||||||
|
|
||||||
|
lint.ignore = [
|
||||||
|
"D202", # No blank lines allowed after function docstring
|
||||||
|
"D203", # 1 blank line required before class docstring
|
||||||
|
"D213", # Multi-line docstring summary should start at the second line
|
||||||
|
"D406", # Section name should end with a newline
|
||||||
|
"D407", # Section name underlining
|
||||||
|
"E501", # line too long
|
||||||
|
"E731", # do not assign a lambda expression, use a def
|
||||||
|
|
||||||
|
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
|
||||||
|
# be ignored anymore without warnings.
|
||||||
|
# https://github.com/astral-sh/ruff/issues/7491
|
||||||
|
# "PLC1901", # Lots of false positives
|
||||||
|
|
||||||
|
# False positives https://github.com/astral-sh/ruff/issues/5386
|
||||||
|
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
|
||||||
|
"PLR0911", # Too many return statements ({returns} > {max_returns})
|
||||||
|
"PLR0912", # Too many branches ({branches} > {max_branches})
|
||||||
|
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
||||||
|
"PLR0915", # Too many statements ({statements} > {max_statements})
|
||||||
|
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
||||||
|
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||||
|
"UP006", # keep type annotation style as is
|
||||||
|
"UP007", # keep type annotation style as is
|
||||||
|
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
||||||
|
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
||||||
|
|
||||||
|
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||||
|
"W191",
|
||||||
|
"E111",
|
||||||
|
"E114",
|
||||||
|
"E117",
|
||||||
|
"D206",
|
||||||
|
"D300",
|
||||||
|
"Q000",
|
||||||
|
"Q001",
|
||||||
|
"Q002",
|
||||||
|
"Q003",
|
||||||
|
"COM812",
|
||||||
|
"COM819",
|
||||||
|
"ISC001",
|
||||||
|
"ISC002",
|
||||||
|
|
||||||
|
# Disabled because ruff does not understand type of __all__ generated by a function
|
||||||
|
"PLE0605",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
|
||||||
|
voluptuous = "vol"
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-pytest-style]
|
||||||
|
fixture-parentheses = false
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-tidy-imports.banned-api]
|
||||||
|
"pytz".msg = "use zoneinfo instead"
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
force-sort-within-sections = true
|
||||||
|
section-order = [
|
||||||
|
"future",
|
||||||
|
"standard-library",
|
||||||
|
"third-party",
|
||||||
|
"first-party",
|
||||||
|
"local-folder",
|
||||||
|
]
|
||||||
|
forced-separate = ["tests"]
|
||||||
|
known-first-party = ["supervisor", "tests"]
|
||||||
|
combine-as-imports = true
|
||||||
|
split-on-trailing-comma = false
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
|
||||||
|
# DBus Service Mocks must use typing and names understood by dbus-fast
|
||||||
|
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
|
||||||
|
|
||||||
|
[tool.ruff.lint.mccabe]
|
||||||
|
max-complexity = 25
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
[pytest]
|
|
||||||
asyncio_mode = auto
|
|
||||||
@@ -1,26 +1,29 @@
|
|||||||
aiodns==3.0.0
|
aiodns==3.2.0
|
||||||
aiohttp==3.8.5
|
aiohttp==3.10.10
|
||||||
async_timeout==4.0.3
|
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==23.1.0
|
attrs==24.2.0
|
||||||
awesomeversion==23.8.0
|
awesomeversion==24.6.0
|
||||||
brotli==1.0.9
|
brotli==1.1.0
|
||||||
ciso8601==2.3.0
|
ciso8601==2.3.1
|
||||||
colorlog==6.7.0
|
colorlog==6.8.2
|
||||||
cpe==1.2.1
|
cpe==1.3.1
|
||||||
cryptography==41.0.3
|
cryptography==43.0.1
|
||||||
debugpy==1.6.7
|
debugpy==1.8.7
|
||||||
deepmerge==1.1.0
|
deepmerge==2.0
|
||||||
dirhash==0.2.1
|
dirhash==0.5.0
|
||||||
docker==6.1.3
|
docker==7.1.0
|
||||||
faust-cchardet==2.1.18
|
faust-cchardet==2.1.19
|
||||||
gitpython==3.1.32
|
gitpython==3.1.43
|
||||||
jinja2==3.1.2
|
jinja2==3.1.4
|
||||||
pulsectl==23.5.2
|
orjson==3.10.7
|
||||||
pyudev==0.24.1
|
pulsectl==24.8.0
|
||||||
ruamel.yaml==0.17.21
|
pyudev==0.24.3
|
||||||
securetar==2023.3.0
|
PyYAML==6.0.2
|
||||||
sentry-sdk==1.29.2
|
requests==2.32.3
|
||||||
voluptuous==0.13.1
|
securetar==2024.2.1
|
||||||
dbus-fast==1.93.0
|
sentry-sdk==2.16.0
|
||||||
typing_extensions==4.7.1
|
setuptools==75.1.0
|
||||||
|
voluptuous==0.15.2
|
||||||
|
dbus-fast==2.24.3
|
||||||
|
typing_extensions==4.12.2
|
||||||
|
zlib-fast==0.2.0
|
||||||
|
|||||||
@@ -1,16 +1,12 @@
|
|||||||
black==23.7.0
|
coverage==7.6.3
|
||||||
coverage==7.3.0
|
pre-commit==4.0.1
|
||||||
flake8-docstrings==1.7.0
|
pylint==3.3.1
|
||||||
flake8==6.1.0
|
pytest-aiohttp==1.0.5
|
||||||
pre-commit==3.3.3
|
pytest-asyncio==0.23.6
|
||||||
pydocstyle==6.3.0
|
pytest-cov==5.0.0
|
||||||
pylint==2.17.5
|
pytest-timeout==2.3.1
|
||||||
pytest-aiohttp==1.0.4
|
pytest==8.3.3
|
||||||
pytest-asyncio==0.18.3
|
ruff==0.6.9
|
||||||
pytest-cov==4.1.0
|
time-machine==2.16.0
|
||||||
pytest-timeout==2.1.0
|
typing_extensions==4.12.2
|
||||||
pytest==7.4.0
|
urllib3==2.2.3
|
||||||
pyupgrade==3.10.1
|
|
||||||
time-machine==2.12.0
|
|
||||||
typing_extensions==4.7.1
|
|
||||||
urllib3==2.0.4
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ do
|
|||||||
if [[ "${supervisor_state}" = "running" ]]; then
|
if [[ "${supervisor_state}" = "running" ]]; then
|
||||||
|
|
||||||
# Check API
|
# Check API
|
||||||
if bashio::supervisor.ping; then
|
if bashio::supervisor.ping > /dev/null; then
|
||||||
failed_count=0
|
failed_count=0
|
||||||
else
|
else
|
||||||
bashio::log.warning "Maybe found an issue on API healthy"
|
bashio::log.warning "Maybe found an issue on API healthy"
|
||||||
|
|||||||
31
setup.cfg
31
setup.cfg
@@ -1,31 +0,0 @@
|
|||||||
[isort]
|
|
||||||
multi_line_output = 3
|
|
||||||
include_trailing_comma=True
|
|
||||||
force_grid_wrap=0
|
|
||||||
line_length=88
|
|
||||||
indent = " "
|
|
||||||
force_sort_within_sections = true
|
|
||||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
|
||||||
default_section = THIRDPARTY
|
|
||||||
forced_separate = tests
|
|
||||||
combine_as_imports = true
|
|
||||||
use_parentheses = true
|
|
||||||
known_first_party = supervisor,tests
|
|
||||||
|
|
||||||
[flake8]
|
|
||||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
|
||||||
doctests = True
|
|
||||||
max-line-length = 88
|
|
||||||
# E501: line too long
|
|
||||||
# W503: Line break occurred before a binary operator
|
|
||||||
# E203: Whitespace before ':'
|
|
||||||
# D202 No blank lines allowed after function docstring
|
|
||||||
# W504 line break after binary operator
|
|
||||||
ignore =
|
|
||||||
E501,
|
|
||||||
W503,
|
|
||||||
E203,
|
|
||||||
D202,
|
|
||||||
W504
|
|
||||||
per-file-ignores =
|
|
||||||
tests/dbus_service_mocks/*.py: F821,F722
|
|
||||||
76
setup.py
76
setup.py
@@ -1,60 +1,28 @@
|
|||||||
"""Home Assistant Supervisor setup."""
|
"""Home Assistant Supervisor setup."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
from supervisor.const import SUPERVISOR_VERSION
|
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$")
|
||||||
|
|
||||||
|
SUPERVISOR_DIR = Path(__file__).parent
|
||||||
|
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
|
||||||
|
CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py"
|
||||||
|
|
||||||
|
REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8")
|
||||||
|
CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def _get_supervisor_version():
|
||||||
|
for line in CONSTANTS.split("/n"):
|
||||||
|
if match := RE_SUPERVISOR_VERSION.match(line):
|
||||||
|
return match.group(1)
|
||||||
|
return "99.9.9dev"
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="Supervisor",
|
version=_get_supervisor_version(),
|
||||||
version=SUPERVISOR_VERSION,
|
dependencies=REQUIREMENTS.split("/n"),
|
||||||
license="BSD License",
|
|
||||||
author="The Home Assistant Authors",
|
|
||||||
author_email="hello@home-assistant.io",
|
|
||||||
url="https://home-assistant.io/",
|
|
||||||
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
|
|
||||||
long_description=(
|
|
||||||
"A maintainless private cloud operator system that"
|
|
||||||
"setup a Home-Assistant instance. Based on HassOS"
|
|
||||||
),
|
|
||||||
classifiers=[
|
|
||||||
"Intended Audience :: End Users/Desktop",
|
|
||||||
"Intended Audience :: Developers",
|
|
||||||
"License :: OSI Approved :: Apache Software License",
|
|
||||||
"Operating System :: OS Independent",
|
|
||||||
"Topic :: Home Automation",
|
|
||||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
||||||
"Topic :: Scientific/Engineering :: Atmospheric Science",
|
|
||||||
"Development Status :: 5 - Production/Stable",
|
|
||||||
"Intended Audience :: Developers",
|
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
],
|
|
||||||
keywords=["docker", "home-assistant", "api"],
|
|
||||||
zip_safe=False,
|
|
||||||
platforms="any",
|
|
||||||
packages=[
|
|
||||||
"supervisor.addons",
|
|
||||||
"supervisor.api",
|
|
||||||
"supervisor.backups",
|
|
||||||
"supervisor.dbus.network",
|
|
||||||
"supervisor.dbus.network.setting",
|
|
||||||
"supervisor.dbus",
|
|
||||||
"supervisor.discovery.services",
|
|
||||||
"supervisor.discovery",
|
|
||||||
"supervisor.docker",
|
|
||||||
"supervisor.homeassistant",
|
|
||||||
"supervisor.host",
|
|
||||||
"supervisor.jobs",
|
|
||||||
"supervisor.misc",
|
|
||||||
"supervisor.plugins",
|
|
||||||
"supervisor.resolution.checks",
|
|
||||||
"supervisor.resolution.evaluations",
|
|
||||||
"supervisor.resolution.fixups",
|
|
||||||
"supervisor.resolution",
|
|
||||||
"supervisor.security",
|
|
||||||
"supervisor.services.modules",
|
|
||||||
"supervisor.services",
|
|
||||||
"supervisor.store",
|
|
||||||
"supervisor.utils",
|
|
||||||
"supervisor",
|
|
||||||
],
|
|
||||||
include_package_data=True,
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,11 +1,20 @@
|
|||||||
"""Main file for Supervisor."""
|
"""Main file for Supervisor."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from supervisor import bootstrap
|
import zlib_fast
|
||||||
|
|
||||||
|
# Enable fast zlib before importing supervisor
|
||||||
|
zlib_fast.enable()
|
||||||
|
|
||||||
|
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
|
||||||
|
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
|
||||||
|
activate_log_queue_handler,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -38,6 +47,8 @@ if __name__ == "__main__":
|
|||||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||||
loop.set_default_executor(executor)
|
loop.set_default_executor(executor)
|
||||||
|
|
||||||
|
activate_log_queue_handler()
|
||||||
|
|
||||||
_LOGGER.info("Initializing Supervisor setup")
|
_LOGGER.info("Initializing Supervisor setup")
|
||||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||||
loop.set_debug(coresys.config.debug)
|
loop.set_debug(coresys.config.debug)
|
||||||
|
|||||||
@@ -1,481 +1 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
import asyncio
|
|
||||||
from collections.abc import Awaitable
|
|
||||||
from contextlib import suppress
|
|
||||||
import logging
|
|
||||||
import tarfile
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
from ..const import AddonBoot, AddonStartup, AddonState
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
|
||||||
from ..exceptions import (
|
|
||||||
AddonConfigurationError,
|
|
||||||
AddonsError,
|
|
||||||
AddonsJobError,
|
|
||||||
AddonsNotSupportedError,
|
|
||||||
CoreDNSError,
|
|
||||||
DockerAPIError,
|
|
||||||
DockerError,
|
|
||||||
DockerNotFound,
|
|
||||||
HomeAssistantAPIError,
|
|
||||||
HostAppArmorError,
|
|
||||||
)
|
|
||||||
from ..jobs.decorator import Job, JobCondition
|
|
||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
|
||||||
from ..store.addon import AddonStore
|
|
||||||
from ..utils import check_exception_chain
|
|
||||||
from ..utils.sentry import capture_exception
|
|
||||||
from .addon import Addon
|
|
||||||
from .const import ADDON_UPDATE_CONDITIONS
|
|
||||||
from .data import AddonsData
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
AnyAddon = Union[Addon, AddonStore]
|
|
||||||
|
|
||||||
|
|
||||||
class AddonManager(CoreSysAttributes):
|
|
||||||
"""Manage add-ons inside Supervisor."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
|
||||||
"""Initialize Docker base wrapper."""
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self.data: AddonsData = AddonsData(coresys)
|
|
||||||
self.local: dict[str, Addon] = {}
|
|
||||||
self.store: dict[str, AddonStore] = {}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def all(self) -> list[AnyAddon]:
|
|
||||||
"""Return a list of all add-ons."""
|
|
||||||
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
|
||||||
return list(addons.values())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def installed(self) -> list[Addon]:
|
|
||||||
"""Return a list of all installed add-ons."""
|
|
||||||
return list(self.local.values())
|
|
||||||
|
|
||||||
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
|
|
||||||
"""Return an add-on from slug.
|
|
||||||
|
|
||||||
Prio:
|
|
||||||
1 - Local
|
|
||||||
2 - Store
|
|
||||||
"""
|
|
||||||
if addon_slug in self.local:
|
|
||||||
return self.local[addon_slug]
|
|
||||||
if not local_only:
|
|
||||||
return self.store.get(addon_slug)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def from_token(self, token: str) -> Addon | None:
|
|
||||||
"""Return an add-on from Supervisor token."""
|
|
||||||
for addon in self.installed:
|
|
||||||
if token == addon.supervisor_token:
|
|
||||||
return addon
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def load(self) -> None:
|
|
||||||
"""Start up add-on management."""
|
|
||||||
tasks = []
|
|
||||||
for slug in self.data.system:
|
|
||||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
|
||||||
tasks.append(self.sys_create_task(addon.load()))
|
|
||||||
|
|
||||||
# Run initial tasks
|
|
||||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
# Sync DNS
|
|
||||||
await self.sync_dns()
|
|
||||||
|
|
||||||
async def boot(self, stage: AddonStartup) -> None:
|
|
||||||
"""Boot add-ons with mode auto."""
|
|
||||||
tasks: list[Addon] = []
|
|
||||||
for addon in self.installed:
|
|
||||||
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
|
||||||
continue
|
|
||||||
tasks.append(addon)
|
|
||||||
|
|
||||||
# Evaluate add-ons which need to be started
|
|
||||||
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
|
||||||
if not tasks:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Start Add-ons sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
# Config.wait_boot is deprecated. Until addons update with healthchecks,
|
|
||||||
# add a sleep task for it to keep the same minimum amount of wait time
|
|
||||||
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
|
|
||||||
for addon in tasks:
|
|
||||||
try:
|
|
||||||
if start_task := await addon.start():
|
|
||||||
wait_boot.append(start_task)
|
|
||||||
except AddonsError as err:
|
|
||||||
# Check if there is an system/user issue
|
|
||||||
if check_exception_chain(
|
|
||||||
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
|
||||||
):
|
|
||||||
addon.boot = AddonBoot.MANUAL
|
|
||||||
addon.save_persist()
|
|
||||||
except Exception as err: # pylint: disable=broad-except
|
|
||||||
capture_exception(err)
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
|
||||||
|
|
||||||
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
|
||||||
await asyncio.gather(*wait_boot, return_exceptions=True)
|
|
||||||
|
|
||||||
async def shutdown(self, stage: AddonStartup) -> None:
|
|
||||||
"""Shutdown addons."""
|
|
||||||
tasks: list[Addon] = []
|
|
||||||
for addon in self.installed:
|
|
||||||
if addon.state != AddonState.STARTED or addon.startup != stage:
|
|
||||||
continue
|
|
||||||
tasks.append(addon)
|
|
||||||
|
|
||||||
# Evaluate add-ons which need to be stopped
|
|
||||||
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
|
||||||
if not tasks:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Stop Add-ons sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
for addon in tasks:
|
|
||||||
try:
|
|
||||||
await addon.stop()
|
|
||||||
except Exception as err: # pylint: disable=broad-except
|
|
||||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
|
||||||
capture_exception(err)
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_install",
|
|
||||||
conditions=ADDON_UPDATE_CONDITIONS,
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def install(self, slug: str) -> None:
|
|
||||||
"""Install an add-on."""
|
|
||||||
if job := self.sys_jobs.get_job():
|
|
||||||
job.reference = slug
|
|
||||||
|
|
||||||
if slug in self.local:
|
|
||||||
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
|
||||||
store = self.store.get(slug)
|
|
||||||
|
|
||||||
if not store:
|
|
||||||
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
|
||||||
|
|
||||||
store.validate_availability()
|
|
||||||
|
|
||||||
self.data.install(store)
|
|
||||||
addon = Addon(self.coresys, slug)
|
|
||||||
await addon.load()
|
|
||||||
|
|
||||||
if not addon.path_data.is_dir():
|
|
||||||
_LOGGER.info(
|
|
||||||
"Creating Home Assistant add-on data folder %s", addon.path_data
|
|
||||||
)
|
|
||||||
addon.path_data.mkdir()
|
|
||||||
|
|
||||||
# Setup/Fix AppArmor profile
|
|
||||||
await addon.install_apparmor()
|
|
||||||
|
|
||||||
try:
|
|
||||||
await addon.instance.install(store.version, store.image, arch=addon.arch)
|
|
||||||
except DockerError as err:
|
|
||||||
self.data.uninstall(addon)
|
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
self.local[slug] = addon
|
|
||||||
|
|
||||||
# Reload ingress tokens
|
|
||||||
if addon.with_ingress:
|
|
||||||
await self.sys_ingress.reload()
|
|
||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
|
||||||
|
|
||||||
async def uninstall(self, slug: str) -> None:
|
|
||||||
"""Remove an add-on."""
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
|
||||||
return
|
|
||||||
addon = self.local[slug]
|
|
||||||
|
|
||||||
try:
|
|
||||||
await addon.instance.remove()
|
|
||||||
except DockerError as err:
|
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
addon.state = AddonState.UNKNOWN
|
|
||||||
|
|
||||||
await addon.unload()
|
|
||||||
|
|
||||||
# Cleanup audio settings
|
|
||||||
if addon.path_pulse.exists():
|
|
||||||
with suppress(OSError):
|
|
||||||
addon.path_pulse.unlink()
|
|
||||||
|
|
||||||
# Cleanup AppArmor profile
|
|
||||||
with suppress(HostAppArmorError):
|
|
||||||
await addon.uninstall_apparmor()
|
|
||||||
|
|
||||||
# Cleanup Ingress panel from sidebar
|
|
||||||
if addon.ingress_panel:
|
|
||||||
addon.ingress_panel = False
|
|
||||||
with suppress(HomeAssistantAPIError):
|
|
||||||
await self.sys_ingress.update_hass_panel(addon)
|
|
||||||
|
|
||||||
# Cleanup Ingress dynamic port assignment
|
|
||||||
if addon.with_ingress:
|
|
||||||
self.sys_create_task(self.sys_ingress.reload())
|
|
||||||
self.sys_ingress.del_dynamic_port(slug)
|
|
||||||
|
|
||||||
# Cleanup discovery data
|
|
||||||
for message in self.sys_discovery.list_messages:
|
|
||||||
if message.addon != addon.slug:
|
|
||||||
continue
|
|
||||||
self.sys_discovery.remove(message)
|
|
||||||
|
|
||||||
# Cleanup services data
|
|
||||||
for service in self.sys_services.list_services:
|
|
||||||
if addon.slug not in service.active:
|
|
||||||
continue
|
|
||||||
service.del_service_data(addon)
|
|
||||||
|
|
||||||
self.data.uninstall(addon)
|
|
||||||
self.local.pop(slug)
|
|
||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_update",
|
|
||||||
conditions=ADDON_UPDATE_CONDITIONS,
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def update(
|
|
||||||
self, slug: str, backup: bool | None = False
|
|
||||||
) -> Awaitable[None] | None:
|
|
||||||
"""Update add-on.
|
|
||||||
|
|
||||||
Returns a coroutine that completes when addon has state 'started' (see addon.start)
|
|
||||||
if addon is started after update. Else nothing is returned.
|
|
||||||
"""
|
|
||||||
if job := self.sys_jobs.get_job():
|
|
||||||
job.reference = slug
|
|
||||||
|
|
||||||
if slug not in self.local:
|
|
||||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
|
||||||
addon = self.local[slug]
|
|
||||||
|
|
||||||
if addon.is_detached:
|
|
||||||
raise AddonsError(
|
|
||||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
|
||||||
)
|
|
||||||
store = self.store[slug]
|
|
||||||
|
|
||||||
if addon.version == store.version:
|
|
||||||
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
|
||||||
|
|
||||||
# Check if available, Maybe something have changed
|
|
||||||
store.validate_availability()
|
|
||||||
|
|
||||||
if backup:
|
|
||||||
await self.sys_backups.do_backup_partial(
|
|
||||||
name=f"addon_{addon.slug}_{addon.version}",
|
|
||||||
homeassistant=False,
|
|
||||||
addons=[addon.slug],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Update instance
|
|
||||||
last_state: AddonState = addon.state
|
|
||||||
old_image = addon.image
|
|
||||||
try:
|
|
||||||
await addon.instance.update(store.version, store.image)
|
|
||||||
except DockerError as err:
|
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
|
||||||
self.data.update(store)
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
with suppress(DockerError):
|
|
||||||
await addon.instance.cleanup(old_image=old_image)
|
|
||||||
|
|
||||||
# Setup/Fix AppArmor profile
|
|
||||||
await addon.install_apparmor()
|
|
||||||
|
|
||||||
# restore state
|
|
||||||
return (
|
|
||||||
await addon.start()
|
|
||||||
if last_state in [AddonState.STARTED, AddonState.STARTUP]
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_rebuild",
|
|
||||||
conditions=[
|
|
||||||
JobCondition.FREE_SPACE,
|
|
||||||
JobCondition.INTERNET_HOST,
|
|
||||||
JobCondition.HEALTHY,
|
|
||||||
],
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def rebuild(self, slug: str) -> Awaitable[None] | None:
|
|
||||||
"""Perform a rebuild of local build add-on.
|
|
||||||
|
|
||||||
Returns a coroutine that completes when addon has state 'started' (see addon.start)
|
|
||||||
if addon is started after rebuild. Else nothing is returned.
|
|
||||||
"""
|
|
||||||
if job := self.sys_jobs.get_job():
|
|
||||||
job.reference = slug
|
|
||||||
|
|
||||||
if slug not in self.local:
|
|
||||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
|
||||||
addon = self.local[slug]
|
|
||||||
|
|
||||||
if addon.is_detached:
|
|
||||||
raise AddonsError(
|
|
||||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
|
||||||
)
|
|
||||||
store = self.store[slug]
|
|
||||||
|
|
||||||
# Check if a rebuild is possible now
|
|
||||||
if addon.version != store.version:
|
|
||||||
raise AddonsError(
|
|
||||||
"Version changed, use Update instead Rebuild", _LOGGER.error
|
|
||||||
)
|
|
||||||
if not addon.need_build:
|
|
||||||
raise AddonsNotSupportedError(
|
|
||||||
"Can't rebuild a image based add-on", _LOGGER.error
|
|
||||||
)
|
|
||||||
|
|
||||||
# remove docker container but not addon config
|
|
||||||
last_state: AddonState = addon.state
|
|
||||||
try:
|
|
||||||
await addon.instance.remove()
|
|
||||||
await addon.instance.install(addon.version)
|
|
||||||
except DockerError as err:
|
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
self.data.update(store)
|
|
||||||
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
|
||||||
|
|
||||||
# restore state
|
|
||||||
return (
|
|
||||||
await addon.start()
|
|
||||||
if last_state in [AddonState.STARTED, AddonState.STARTUP]
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_restore",
|
|
||||||
conditions=[
|
|
||||||
JobCondition.FREE_SPACE,
|
|
||||||
JobCondition.INTERNET_HOST,
|
|
||||||
JobCondition.HEALTHY,
|
|
||||||
],
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def restore(
|
|
||||||
self, slug: str, tar_file: tarfile.TarFile
|
|
||||||
) -> Awaitable[None] | None:
|
|
||||||
"""Restore state of an add-on.
|
|
||||||
|
|
||||||
Returns a coroutine that completes when addon has state 'started' (see addon.start)
|
|
||||||
if addon is started after restore. Else nothing is returned.
|
|
||||||
"""
|
|
||||||
if job := self.sys_jobs.get_job():
|
|
||||||
job.reference = slug
|
|
||||||
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
|
||||||
addon = Addon(self.coresys, slug)
|
|
||||||
else:
|
|
||||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
|
||||||
addon = self.local[slug]
|
|
||||||
|
|
||||||
wait_for_start = await addon.restore(tar_file)
|
|
||||||
|
|
||||||
# Check if new
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
|
||||||
self.local[slug] = addon
|
|
||||||
|
|
||||||
# Update ingress
|
|
||||||
if addon.with_ingress:
|
|
||||||
await self.sys_ingress.reload()
|
|
||||||
with suppress(HomeAssistantAPIError):
|
|
||||||
await self.sys_ingress.update_hass_panel(addon)
|
|
||||||
|
|
||||||
return wait_for_start
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_repair",
|
|
||||||
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
|
|
||||||
)
|
|
||||||
async def repair(self) -> None:
|
|
||||||
"""Repair local add-ons."""
|
|
||||||
needs_repair: list[Addon] = []
|
|
||||||
|
|
||||||
# Evaluate Add-ons to repair
|
|
||||||
for addon in self.installed:
|
|
||||||
if await addon.instance.exists():
|
|
||||||
continue
|
|
||||||
needs_repair.append(addon)
|
|
||||||
|
|
||||||
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
|
||||||
if not needs_repair:
|
|
||||||
return
|
|
||||||
|
|
||||||
for addon in needs_repair:
|
|
||||||
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
|
||||||
with suppress(DockerError, KeyError):
|
|
||||||
# Need pull a image again
|
|
||||||
if not addon.need_build:
|
|
||||||
await addon.instance.install(addon.version, addon.image)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Need local lookup
|
|
||||||
if addon.need_build and not addon.is_detached:
|
|
||||||
store = self.store[addon.slug]
|
|
||||||
# If this add-on is available for rebuild
|
|
||||||
if addon.version == store.version:
|
|
||||||
await addon.instance.install(addon.version, addon.image)
|
|
||||||
continue
|
|
||||||
|
|
||||||
_LOGGER.error("Can't repair %s", addon.slug)
|
|
||||||
with suppress(AddonsError):
|
|
||||||
await self.uninstall(addon.slug)
|
|
||||||
|
|
||||||
async def sync_dns(self) -> None:
|
|
||||||
"""Sync add-ons DNS names."""
|
|
||||||
# Update hosts
|
|
||||||
add_host_coros: list[Awaitable[None]] = []
|
|
||||||
for addon in self.installed:
|
|
||||||
try:
|
|
||||||
if not await addon.instance.is_running():
|
|
||||||
continue
|
|
||||||
except DockerError as err:
|
|
||||||
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
|
||||||
self.sys_resolution.create_issue(
|
|
||||||
IssueType.CORRUPT_DOCKER,
|
|
||||||
ContextType.ADDON,
|
|
||||||
reference=addon.slug,
|
|
||||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
|
||||||
)
|
|
||||||
capture_exception(err)
|
|
||||||
else:
|
|
||||||
add_host_coros.append(
|
|
||||||
self.sys_plugins.dns.add_host(
|
|
||||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
await asyncio.gather(*add_host_coros)
|
|
||||||
|
|
||||||
# Write hosts files
|
|
||||||
with suppress(CoreDNSError):
|
|
||||||
await self.sys_plugins.dns.write_hosts()
|
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from datetime import datetime
|
||||||
|
import errno
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
@@ -14,11 +17,14 @@ from tempfile import TemporaryDirectory
|
|||||||
from typing import Any, Final
|
from typing import Any, Final
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
from awesomeversion import AwesomeVersionCompareException
|
||||||
from deepmerge import Merger
|
from deepmerge import Merger
|
||||||
from securetar import atomic_contents_add, secure_path
|
from securetar import atomic_contents_add, secure_path
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
from supervisor.utils.dt import utc_from_timestamp
|
||||||
|
|
||||||
from ..bus import EventListener
|
from ..bus import EventListener
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ACCESS_TOKEN,
|
ATTR_ACCESS_TOKEN,
|
||||||
@@ -41,13 +47,17 @@ from ..const import (
|
|||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
|
ATTR_SYSTEM_MANAGED,
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_USER,
|
ATTR_USER,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
ATTR_VERSION_TIMESTAMP,
|
||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
DNS_SUFFIX,
|
DNS_SUFFIX,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
|
AddonBootConfig,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
AddonState,
|
AddonState,
|
||||||
BusEvent,
|
BusEvent,
|
||||||
@@ -64,12 +74,15 @@ from ..exceptions import (
|
|||||||
AddonsNotSupportedError,
|
AddonsNotSupportedError,
|
||||||
ConfigurationFileError,
|
ConfigurationFileError,
|
||||||
DockerError,
|
DockerError,
|
||||||
|
HomeAssistantAPIError,
|
||||||
HostAppArmorError,
|
HostAppArmorError,
|
||||||
)
|
)
|
||||||
from ..hardware.data import Device
|
from ..hardware.data import Device
|
||||||
from ..homeassistant.const import WSEvent, WSType
|
from ..homeassistant.const import WSEvent, WSType
|
||||||
from ..jobs.const import JobExecutionLimit
|
from ..jobs.const import JobExecutionLimit
|
||||||
from ..jobs.decorator import Job
|
from ..jobs.decorator import Job
|
||||||
|
from ..resolution.const import UnhealthyReason
|
||||||
|
from ..store.addon import AddonStore
|
||||||
from ..utils import check_port
|
from ..utils import check_port
|
||||||
from ..utils.apparmor import adjust_profile
|
from ..utils.apparmor import adjust_profile
|
||||||
from ..utils.json import read_json_file, write_json_file
|
from ..utils.json import read_json_file, write_json_file
|
||||||
@@ -80,6 +93,7 @@ from .const import (
|
|||||||
WATCHDOG_THROTTLE_MAX_CALLS,
|
WATCHDOG_THROTTLE_MAX_CALLS,
|
||||||
WATCHDOG_THROTTLE_PERIOD,
|
WATCHDOG_THROTTLE_PERIOD,
|
||||||
AddonBackupMode,
|
AddonBackupMode,
|
||||||
|
MappingType,
|
||||||
)
|
)
|
||||||
from .model import AddonModel, Data
|
from .model import AddonModel, Data
|
||||||
from .options import AddonOptions
|
from .options import AddonOptions
|
||||||
@@ -170,6 +184,9 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Async initialize of object."""
|
"""Async initialize of object."""
|
||||||
|
if self.is_detached:
|
||||||
|
await super().refresh_path_cache()
|
||||||
|
|
||||||
self._listeners.append(
|
self._listeners.append(
|
||||||
self.sys_bus.register_event(
|
self.sys_bus.register_event(
|
||||||
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
|
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
|
||||||
@@ -181,9 +198,21 @@ class Addon(AddonModel):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
with suppress(DockerError):
|
await self._check_ingress_port()
|
||||||
|
default_image = self._image(self.data)
|
||||||
|
try:
|
||||||
await self.instance.attach(version=self.version)
|
await self.instance.attach(version=self.version)
|
||||||
|
|
||||||
|
# Ensure we are using correct image for this system
|
||||||
|
await self.instance.check_image(self.version, default_image, self.arch)
|
||||||
|
except DockerError:
|
||||||
|
_LOGGER.info("No %s addon Docker image %s found", self.slug, self.image)
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.install(self.version, default_image, arch=self.arch)
|
||||||
|
|
||||||
|
self.persist[ATTR_IMAGE] = default_image
|
||||||
|
self.save_persist()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ip_address(self) -> IPv4Address:
|
def ip_address(self) -> IPv4Address:
|
||||||
"""Return IP of add-on instance."""
|
"""Return IP of add-on instance."""
|
||||||
@@ -199,6 +228,11 @@ class Addon(AddonModel):
|
|||||||
"""Return add-on data from store."""
|
"""Return add-on data from store."""
|
||||||
return self.sys_store.data.addons.get(self.slug, self.data)
|
return self.sys_store.data.addons.get(self.slug, self.data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def addon_store(self) -> AddonStore | None:
|
||||||
|
"""Return store representation of addon."""
|
||||||
|
return self.sys_addons.store.get(self.slug)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def persist(self) -> Data:
|
def persist(self) -> Data:
|
||||||
"""Return add-on data/config."""
|
"""Return add-on data/config."""
|
||||||
@@ -214,6 +248,34 @@ class Addon(AddonModel):
|
|||||||
"""Return True if add-on is detached."""
|
"""Return True if add-on is detached."""
|
||||||
return self.slug not in self.sys_store.data.addons
|
return self.slug not in self.sys_store.data.addons
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_icon(self) -> bool:
|
||||||
|
"""Return True if an icon exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_icon
|
||||||
|
return self.addon_store.with_icon
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_logo(self) -> bool:
|
||||||
|
"""Return True if a logo exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_logo
|
||||||
|
return self.addon_store.with_logo
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_changelog(self) -> bool:
|
||||||
|
"""Return True if a changelog exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_changelog
|
||||||
|
return self.addon_store.with_changelog
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_documentation(self) -> bool:
|
||||||
|
"""Return True if a documentation exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_documentation
|
||||||
|
return self.addon_store.with_documentation
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Return True if this add-on is available on this platform."""
|
"""Return True if this add-on is available on this platform."""
|
||||||
@@ -250,7 +312,9 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self) -> AddonBoot:
|
def boot(self) -> AddonBoot:
|
||||||
"""Return boot config with prio local settings."""
|
"""Return boot config with prio local settings unless config is forced."""
|
||||||
|
if self.boot_config == AddonBootConfig.MANUAL_ONLY:
|
||||||
|
return super().boot
|
||||||
return self.persist.get(ATTR_BOOT, super().boot)
|
return self.persist.get(ATTR_BOOT, super().boot)
|
||||||
|
|
||||||
@boot.setter
|
@boot.setter
|
||||||
@@ -268,6 +332,28 @@ class Addon(AddonModel):
|
|||||||
"""Set auto update."""
|
"""Set auto update."""
|
||||||
self.persist[ATTR_AUTO_UPDATE] = value
|
self.persist[ATTR_AUTO_UPDATE] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto_update_available(self) -> bool:
|
||||||
|
"""Return if it is safe to auto update addon."""
|
||||||
|
if not self.need_update or not self.auto_update:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for version in self.breaking_versions:
|
||||||
|
try:
|
||||||
|
# Must update to latest so if true update crosses a breaking version
|
||||||
|
if self.version < version:
|
||||||
|
return False
|
||||||
|
except AwesomeVersionCompareException:
|
||||||
|
# If version scheme changed, we may get compare exception
|
||||||
|
# If latest version >= breaking version then assume update will
|
||||||
|
# cross it as the version scheme changes
|
||||||
|
# If both versions have compare exception, ignore as its in the past
|
||||||
|
with suppress(AwesomeVersionCompareException):
|
||||||
|
if self.latest_version >= version:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def watchdog(self) -> bool:
|
def watchdog(self) -> bool:
|
||||||
"""Return True if watchdog is enable."""
|
"""Return True if watchdog is enable."""
|
||||||
@@ -283,6 +369,37 @@ class Addon(AddonModel):
|
|||||||
else:
|
else:
|
||||||
self.persist[ATTR_WATCHDOG] = value
|
self.persist[ATTR_WATCHDOG] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def system_managed(self) -> bool:
|
||||||
|
"""Return True if addon is managed by Home Assistant."""
|
||||||
|
return self.persist[ATTR_SYSTEM_MANAGED]
|
||||||
|
|
||||||
|
@system_managed.setter
|
||||||
|
def system_managed(self, value: bool) -> None:
|
||||||
|
"""Set system managed enable/disable."""
|
||||||
|
if not value and self.system_managed_config_entry:
|
||||||
|
self.system_managed_config_entry = None
|
||||||
|
|
||||||
|
self.persist[ATTR_SYSTEM_MANAGED] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def system_managed_config_entry(self) -> str | None:
|
||||||
|
"""Return id of config entry managing this addon (if any)."""
|
||||||
|
if not self.system_managed:
|
||||||
|
return None
|
||||||
|
return self.persist.get(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY)
|
||||||
|
|
||||||
|
@system_managed_config_entry.setter
|
||||||
|
def system_managed_config_entry(self, value: str | None) -> None:
|
||||||
|
"""Set ID of config entry managing this addon."""
|
||||||
|
if not self.system_managed:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Ignoring system managed config entry for %s because it is not system managed",
|
||||||
|
self.slug,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.persist[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def uuid(self) -> str:
|
def uuid(self) -> str:
|
||||||
"""Return an API token for this add-on."""
|
"""Return an API token for this add-on."""
|
||||||
@@ -310,6 +427,11 @@ class Addon(AddonModel):
|
|||||||
"""Return version of add-on."""
|
"""Return version of add-on."""
|
||||||
return self.data_store[ATTR_VERSION]
|
return self.data_store[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version_timestamp(self) -> datetime:
|
||||||
|
"""Return when latest version was first seen."""
|
||||||
|
return utc_from_timestamp(self.data_store[ATTR_VERSION_TIMESTAMP])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def protected(self) -> bool:
|
def protected(self) -> bool:
|
||||||
"""Return if add-on is in protected mode."""
|
"""Return if add-on is in protected mode."""
|
||||||
@@ -387,7 +509,7 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
port = self.data[ATTR_INGRESS_PORT]
|
port = self.data[ATTR_INGRESS_PORT]
|
||||||
if port == 0:
|
if port == 0:
|
||||||
return self.sys_ingress.get_dynamic_port(self.slug)
|
raise RuntimeError(f"No port set for add-on {self.slug}")
|
||||||
return port
|
return port
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -453,6 +575,21 @@ class Addon(AddonModel):
|
|||||||
"""Return add-on data path external for Docker."""
|
"""Return add-on data path external for Docker."""
|
||||||
return PurePath(self.sys_config.path_extern_addons_data, self.slug)
|
return PurePath(self.sys_config.path_extern_addons_data, self.slug)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def addon_config_used(self) -> bool:
|
||||||
|
"""Add-on is using its public config folder."""
|
||||||
|
return MappingType.ADDON_CONFIG in self.map_volumes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_config(self) -> Path:
|
||||||
|
"""Return add-on config path inside Supervisor."""
|
||||||
|
return Path(self.sys_config.path_addon_configs, self.slug)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_extern_config(self) -> PurePath:
|
||||||
|
"""Return add-on config path external for Docker."""
|
||||||
|
return PurePath(self.sys_config.path_extern_addon_configs, self.slug)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_options(self) -> Path:
|
def path_options(self) -> Path:
|
||||||
"""Return path to add-on options."""
|
"""Return path to add-on options."""
|
||||||
@@ -516,7 +653,7 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
# TCP monitoring
|
# TCP monitoring
|
||||||
if s_prefix == "tcp":
|
if s_prefix == "tcp":
|
||||||
return await self.sys_run_in_executor(check_port, self.ip_address, port)
|
return await check_port(self.ip_address, port)
|
||||||
|
|
||||||
# lookup the correct protocol from config
|
# lookup the correct protocol from config
|
||||||
if t_proto:
|
if t_proto:
|
||||||
@@ -532,7 +669,7 @@ class Addon(AddonModel):
|
|||||||
) as req:
|
) as req:
|
||||||
if req.status < 300:
|
if req.status < 300:
|
||||||
return True
|
return True
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (TimeoutError, aiohttp.ClientError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return False
|
return False
|
||||||
@@ -559,6 +696,11 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
raise AddonConfigurationError()
|
raise AddonConfigurationError()
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_unload",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
async def unload(self) -> None:
|
async def unload(self) -> None:
|
||||||
"""Unload add-on and remove data."""
|
"""Unload add-on and remove data."""
|
||||||
if self._startup_task:
|
if self._startup_task:
|
||||||
@@ -570,11 +712,192 @@ class Addon(AddonModel):
|
|||||||
for listener in self._listeners:
|
for listener in self._listeners:
|
||||||
self.sys_bus.remove_listener(listener)
|
self.sys_bus.remove_listener(listener)
|
||||||
|
|
||||||
if not self.path_data.is_dir():
|
if self.path_data.is_dir():
|
||||||
|
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||||
|
await remove_data(self.path_data)
|
||||||
|
|
||||||
|
async def _check_ingress_port(self):
|
||||||
|
"""Assign a ingress port if dynamic port selection is used."""
|
||||||
|
if not self.with_ingress:
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
if self.data[ATTR_INGRESS_PORT] == 0:
|
||||||
await remove_data(self.path_data)
|
self.data[ATTR_INGRESS_PORT] = await self.sys_ingress.get_dynamic_port(
|
||||||
|
self.slug
|
||||||
|
)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_install",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def install(self) -> None:
|
||||||
|
"""Install and setup this addon."""
|
||||||
|
self.sys_addons.data.install(self.addon_store)
|
||||||
|
await self.load()
|
||||||
|
|
||||||
|
if not self.path_data.is_dir():
|
||||||
|
_LOGGER.info(
|
||||||
|
"Creating Home Assistant add-on data folder %s", self.path_data
|
||||||
|
)
|
||||||
|
self.path_data.mkdir()
|
||||||
|
|
||||||
|
# Setup/Fix AppArmor profile
|
||||||
|
await self.install_apparmor()
|
||||||
|
|
||||||
|
# Install image
|
||||||
|
try:
|
||||||
|
await self.instance.install(
|
||||||
|
self.latest_version, self.addon_store.image, arch=self.arch
|
||||||
|
)
|
||||||
|
except DockerError as err:
|
||||||
|
self.sys_addons.data.uninstall(self)
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
# Add to addon manager
|
||||||
|
self.sys_addons.local[self.slug] = self
|
||||||
|
|
||||||
|
# Reload ingress tokens
|
||||||
|
if self.with_ingress:
|
||||||
|
await self.sys_ingress.reload()
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_uninstall",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def uninstall(
|
||||||
|
self, *, remove_config: bool, remove_image: bool = True
|
||||||
|
) -> None:
|
||||||
|
"""Uninstall and cleanup this addon."""
|
||||||
|
try:
|
||||||
|
await self.instance.remove(remove_image=remove_image)
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
self.state = AddonState.UNKNOWN
|
||||||
|
|
||||||
|
await self.unload()
|
||||||
|
|
||||||
|
# Remove config if present and requested
|
||||||
|
if self.addon_config_used and remove_config:
|
||||||
|
await remove_data(self.path_config)
|
||||||
|
|
||||||
|
# Cleanup audio settings
|
||||||
|
if self.path_pulse.exists():
|
||||||
|
with suppress(OSError):
|
||||||
|
self.path_pulse.unlink()
|
||||||
|
|
||||||
|
# Cleanup AppArmor profile
|
||||||
|
with suppress(HostAppArmorError):
|
||||||
|
await self.uninstall_apparmor()
|
||||||
|
|
||||||
|
# Cleanup Ingress panel from sidebar
|
||||||
|
if self.ingress_panel:
|
||||||
|
self.ingress_panel = False
|
||||||
|
with suppress(HomeAssistantAPIError):
|
||||||
|
await self.sys_ingress.update_hass_panel(self)
|
||||||
|
|
||||||
|
# Cleanup Ingress dynamic port assignment
|
||||||
|
if self.with_ingress:
|
||||||
|
self.sys_create_task(self.sys_ingress.reload())
|
||||||
|
self.sys_ingress.del_dynamic_port(self.slug)
|
||||||
|
|
||||||
|
# Cleanup discovery data
|
||||||
|
for message in self.sys_discovery.list_messages:
|
||||||
|
if message.addon != self.slug:
|
||||||
|
continue
|
||||||
|
self.sys_discovery.remove(message)
|
||||||
|
|
||||||
|
# Cleanup services data
|
||||||
|
for service in self.sys_services.list_services:
|
||||||
|
if self.slug not in service.active:
|
||||||
|
continue
|
||||||
|
service.del_service_data(self)
|
||||||
|
|
||||||
|
# Remove from addon manager
|
||||||
|
self.sys_addons.data.uninstall(self)
|
||||||
|
self.sys_addons.local.pop(self.slug)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_update",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def update(self) -> asyncio.Task | None:
|
||||||
|
"""Update this addon to latest version.
|
||||||
|
|
||||||
|
Returns a Task that completes when addon has state 'started' (see start)
|
||||||
|
if it was running. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
old_image = self.image
|
||||||
|
# Cache data to prevent races with other updates to global
|
||||||
|
store = self.addon_store.clone()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.instance.update(store.version, store.image, arch=self.arch)
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
# Stop the addon if running
|
||||||
|
if (last_state := self.state) in {AddonState.STARTED, AddonState.STARTUP}:
|
||||||
|
await self.stop()
|
||||||
|
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Add-on '%s' successfully updated", self.slug)
|
||||||
|
self.sys_addons.data.update(store)
|
||||||
|
await self._check_ingress_port()
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.cleanup(
|
||||||
|
old_image=old_image, image=store.image, version=store.version
|
||||||
|
)
|
||||||
|
|
||||||
|
# Setup/Fix AppArmor profile
|
||||||
|
await self.install_apparmor()
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# restore state. Return Task for caller if no exception
|
||||||
|
out = (
|
||||||
|
await self.start()
|
||||||
|
if last_state in {AddonState.STARTED, AddonState.STARTUP}
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_rebuild",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def rebuild(self) -> asyncio.Task | None:
|
||||||
|
"""Rebuild this addons container and image.
|
||||||
|
|
||||||
|
Returns a Task that completes when addon has state 'started' (see start)
|
||||||
|
if it was running. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
last_state: AddonState = self.state
|
||||||
|
try:
|
||||||
|
# remove docker container but not addon config
|
||||||
|
try:
|
||||||
|
await self.instance.remove()
|
||||||
|
await self.instance.install(self.version)
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
self.sys_addons.data.update(self.addon_store)
|
||||||
|
await self._check_ingress_port()
|
||||||
|
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# restore state
|
||||||
|
out = (
|
||||||
|
await self.start()
|
||||||
|
if last_state in [AddonState.STARTED, AddonState.STARTUP]
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
def write_pulse(self) -> None:
|
def write_pulse(self) -> None:
|
||||||
"""Write asound config to file and return True on success."""
|
"""Write asound config to file and return True on success."""
|
||||||
@@ -590,6 +913,8 @@ class Addon(AddonModel):
|
|||||||
try:
|
try:
|
||||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
|
if err.errno == errno.EBADMSG:
|
||||||
|
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
||||||
)
|
)
|
||||||
@@ -660,9 +985,9 @@ class Addon(AddonModel):
|
|||||||
try:
|
try:
|
||||||
self._startup_task = self.sys_create_task(self._startup_event.wait())
|
self._startup_task = self.sys_create_task(self._startup_event.wait())
|
||||||
await asyncio.wait_for(self._startup_task, STARTUP_TIMEOUT)
|
await asyncio.wait_for(self._startup_task, STARTUP_TIMEOUT)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Timeout while waiting for addon %s to start, took more then %s seconds",
|
"Timeout while waiting for addon %s to start, took more than %s seconds",
|
||||||
self.name,
|
self.name,
|
||||||
STARTUP_TIMEOUT,
|
STARTUP_TIMEOUT,
|
||||||
)
|
)
|
||||||
@@ -671,16 +996,21 @@ class Addon(AddonModel):
|
|||||||
finally:
|
finally:
|
||||||
self._startup_task = None
|
self._startup_task = None
|
||||||
|
|
||||||
async def start(self) -> Awaitable[None]:
|
@Job(
|
||||||
|
name="addon_start",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def start(self) -> asyncio.Task:
|
||||||
"""Set options and start add-on.
|
"""Set options and start add-on.
|
||||||
|
|
||||||
Returns a coroutine that completes when addon has state 'started'.
|
Returns a Task that completes when addon has state 'started'.
|
||||||
For addons with a healthcheck, that is when they become healthy or unhealthy.
|
For addons with a healthcheck, that is when they become healthy or unhealthy.
|
||||||
Addons without a healthcheck have state 'started' immediately.
|
Addons without a healthcheck have state 'started' immediately.
|
||||||
"""
|
"""
|
||||||
if await self.instance.is_running():
|
if await self.instance.is_running():
|
||||||
_LOGGER.warning("%s is already running!", self.slug)
|
_LOGGER.warning("%s is already running!", self.slug)
|
||||||
return self._wait_for_startup()
|
return self.sys_create_task(self._wait_for_startup())
|
||||||
|
|
||||||
# Access Token
|
# Access Token
|
||||||
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||||
@@ -693,6 +1023,18 @@ class Addon(AddonModel):
|
|||||||
if self.with_audio:
|
if self.with_audio:
|
||||||
self.write_pulse()
|
self.write_pulse()
|
||||||
|
|
||||||
|
def _check_addon_config_dir():
|
||||||
|
if self.path_config.is_dir():
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Creating Home Assistant add-on config folder %s", self.path_config
|
||||||
|
)
|
||||||
|
self.path_config.mkdir()
|
||||||
|
|
||||||
|
if self.addon_config_used:
|
||||||
|
await self.sys_run_in_executor(_check_addon_config_dir)
|
||||||
|
|
||||||
# Start Add-on
|
# Start Add-on
|
||||||
self._startup_event.clear()
|
self._startup_event.clear()
|
||||||
try:
|
try:
|
||||||
@@ -701,8 +1043,13 @@ class Addon(AddonModel):
|
|||||||
self.state = AddonState.ERROR
|
self.state = AddonState.ERROR
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
return self._wait_for_startup()
|
return self.sys_create_task(self._wait_for_startup())
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_stop",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
async def stop(self) -> None:
|
async def stop(self) -> None:
|
||||||
"""Stop add-on."""
|
"""Stop add-on."""
|
||||||
self._manual_stop = True
|
self._manual_stop = True
|
||||||
@@ -712,10 +1059,15 @@ class Addon(AddonModel):
|
|||||||
self.state = AddonState.ERROR
|
self.state = AddonState.ERROR
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
async def restart(self) -> Awaitable[None]:
|
@Job(
|
||||||
|
name="addon_restart",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def restart(self) -> asyncio.Task:
|
||||||
"""Restart add-on.
|
"""Restart add-on.
|
||||||
|
|
||||||
Returns a coroutine that completes when addon has state 'started' (see start).
|
Returns a Task that completes when addon has state 'started' (see start).
|
||||||
"""
|
"""
|
||||||
with suppress(AddonsError):
|
with suppress(AddonsError):
|
||||||
await self.stop()
|
await self.stop()
|
||||||
@@ -742,6 +1094,11 @@ class Addon(AddonModel):
|
|||||||
except DockerError as err:
|
except DockerError as err:
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_write_stdin",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
async def write_stdin(self, data) -> None:
|
async def write_stdin(self, data) -> None:
|
||||||
"""Write data to add-on stdin."""
|
"""Write data to add-on stdin."""
|
||||||
if not self.with_stdin:
|
if not self.with_stdin:
|
||||||
@@ -771,14 +1128,59 @@ class Addon(AddonModel):
|
|||||||
_LOGGER.error,
|
_LOGGER.error,
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
async def backup(self, tar_file: tarfile.TarFile) -> Awaitable[None] | None:
|
@Job(
|
||||||
|
name="addon_begin_backup",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def begin_backup(self) -> bool:
|
||||||
|
"""Execute pre commands or stop addon if necessary.
|
||||||
|
|
||||||
|
Returns value of `is_running`. Caller should not call `end_backup` if return is false.
|
||||||
|
"""
|
||||||
|
if not await self.is_running():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.backup_mode == AddonBackupMode.COLD:
|
||||||
|
_LOGGER.info("Shutdown add-on %s for cold backup", self.slug)
|
||||||
|
await self.stop()
|
||||||
|
|
||||||
|
elif self.backup_pre is not None:
|
||||||
|
await self._backup_command(self.backup_pre)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_end_backup",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def end_backup(self) -> asyncio.Task | None:
|
||||||
|
"""Execute post commands or restart addon if necessary.
|
||||||
|
|
||||||
|
Returns a Task that completes when addon has state 'started' (see start)
|
||||||
|
for cold backup. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
if self.backup_mode is AddonBackupMode.COLD:
|
||||||
|
_LOGGER.info("Starting add-on %s again", self.slug)
|
||||||
|
return await self.start()
|
||||||
|
|
||||||
|
if self.backup_post is not None:
|
||||||
|
await self._backup_command(self.backup_post)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_backup",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def backup(self, tar_file: tarfile.TarFile) -> asyncio.Task | None:
|
||||||
"""Backup state of an add-on.
|
"""Backup state of an add-on.
|
||||||
|
|
||||||
Returns a coroutine that completes when addon has state 'started' (see start)
|
Returns a Task that completes when addon has state 'started' (see start)
|
||||||
for cold backup. Else nothing is returned.
|
for cold backup. Else nothing is returned.
|
||||||
"""
|
"""
|
||||||
wait_for_start: Awaitable[None] | None = None
|
wait_for_start: Awaitable[None] | None = None
|
||||||
is_running = await self.is_running()
|
|
||||||
|
|
||||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||||
temp_path = Path(temp)
|
temp_path = Path(temp)
|
||||||
@@ -830,19 +1232,16 @@ class Addon(AddonModel):
|
|||||||
arcname="data",
|
arcname="data",
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
# Backup config
|
||||||
is_running
|
if self.addon_config_used:
|
||||||
and self.backup_mode == AddonBackupMode.HOT
|
atomic_contents_add(
|
||||||
and self.backup_pre is not None
|
backup,
|
||||||
):
|
self.path_config,
|
||||||
await self._backup_command(self.backup_pre)
|
excludes=self.backup_exclude,
|
||||||
elif is_running and self.backup_mode == AddonBackupMode.COLD:
|
arcname="config",
|
||||||
_LOGGER.info("Shutdown add-on %s for cold backup", self.slug)
|
)
|
||||||
try:
|
|
||||||
await self.instance.stop()
|
|
||||||
except DockerError as err:
|
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
|
is_running = await self.begin_backup()
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||||
await self.sys_run_in_executor(_write_tarfile)
|
await self.sys_run_in_executor(_write_tarfile)
|
||||||
@@ -851,23 +1250,21 @@ class Addon(AddonModel):
|
|||||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||||
) from err
|
) from err
|
||||||
finally:
|
finally:
|
||||||
if (
|
if is_running:
|
||||||
is_running
|
wait_for_start = await self.end_backup()
|
||||||
and self.backup_mode == AddonBackupMode.HOT
|
|
||||||
and self.backup_post is not None
|
|
||||||
):
|
|
||||||
await self._backup_command(self.backup_post)
|
|
||||||
elif is_running and self.backup_mode is AddonBackupMode.COLD:
|
|
||||||
_LOGGER.info("Starting add-on %s again", self.slug)
|
|
||||||
wait_for_start = await self.start()
|
|
||||||
|
|
||||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
|
|
||||||
async def restore(self, tar_file: tarfile.TarFile) -> Awaitable[None] | None:
|
@Job(
|
||||||
|
name="addon_restore",
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def restore(self, tar_file: tarfile.TarFile) -> asyncio.Task | None:
|
||||||
"""Restore state of an add-on.
|
"""Restore state of an add-on.
|
||||||
|
|
||||||
Returns a coroutine that completes when addon has state 'started' (see start)
|
Returns a Task that completes when addon has state 'started' (see start)
|
||||||
if addon is started after restore. Else nothing is returned.
|
if addon is started after restore. Else nothing is returned.
|
||||||
"""
|
"""
|
||||||
wait_for_start: Awaitable[None] | None = None
|
wait_for_start: Awaitable[None] | None = None
|
||||||
@@ -876,7 +1273,11 @@ class Addon(AddonModel):
|
|||||||
def _extract_tarfile():
|
def _extract_tarfile():
|
||||||
"""Extract tar backup."""
|
"""Extract tar backup."""
|
||||||
with tar_file as backup:
|
with tar_file as backup:
|
||||||
backup.extractall(path=Path(temp), members=secure_path(backup))
|
backup.extractall(
|
||||||
|
path=Path(temp),
|
||||||
|
members=secure_path(backup),
|
||||||
|
filter="fully_trusted",
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.sys_run_in_executor(_extract_tarfile)
|
await self.sys_run_in_executor(_extract_tarfile)
|
||||||
@@ -916,64 +1317,79 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
# Stop it first if its running
|
# Stop it first if its running
|
||||||
if await self.instance.is_running():
|
if await self.instance.is_running():
|
||||||
with suppress(DockerError):
|
await self.stop()
|
||||||
await self.instance.stop()
|
|
||||||
|
|
||||||
# Check version / restore image
|
|
||||||
version = data[ATTR_VERSION]
|
|
||||||
if not await self.instance.exists():
|
|
||||||
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
|
||||||
|
|
||||||
image_file = Path(temp, "image.tar")
|
|
||||||
if image_file.is_file():
|
|
||||||
with suppress(DockerError):
|
|
||||||
await self.instance.import_image(image_file)
|
|
||||||
else:
|
|
||||||
with suppress(DockerError):
|
|
||||||
await self.instance.install(version, restore_image)
|
|
||||||
await self.instance.cleanup()
|
|
||||||
elif self.instance.version != version or self.legacy:
|
|
||||||
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
|
||||||
with suppress(DockerError):
|
|
||||||
await self.instance.update(version, restore_image)
|
|
||||||
|
|
||||||
# Restore data
|
|
||||||
def _restore_data():
|
|
||||||
"""Restore data."""
|
|
||||||
temp_data = Path(temp, "data")
|
|
||||||
if temp_data.is_dir():
|
|
||||||
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
|
||||||
else:
|
|
||||||
self.path_data.mkdir()
|
|
||||||
|
|
||||||
_LOGGER.info("Restoring data for addon %s", self.slug)
|
|
||||||
if self.path_data.is_dir():
|
|
||||||
await remove_data(self.path_data)
|
|
||||||
try:
|
try:
|
||||||
await self.sys_run_in_executor(_restore_data)
|
# Check version / restore image
|
||||||
except shutil.Error as err:
|
version = data[ATTR_VERSION]
|
||||||
raise AddonsError(
|
if not await self.instance.exists():
|
||||||
f"Can't restore origin data: {err}", _LOGGER.error
|
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
||||||
) from err
|
|
||||||
|
image_file = Path(temp, "image.tar")
|
||||||
|
if image_file.is_file():
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.import_image(image_file)
|
||||||
|
else:
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.install(
|
||||||
|
version, restore_image, self.arch
|
||||||
|
)
|
||||||
|
await self.instance.cleanup()
|
||||||
|
elif self.instance.version != version or self.legacy:
|
||||||
|
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.update(version, restore_image, self.arch)
|
||||||
|
await self._check_ingress_port()
|
||||||
|
|
||||||
|
# Restore data and config
|
||||||
|
def _restore_data():
|
||||||
|
"""Restore data and config."""
|
||||||
|
temp_data = Path(temp, "data")
|
||||||
|
if temp_data.is_dir():
|
||||||
|
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
||||||
|
else:
|
||||||
|
self.path_data.mkdir()
|
||||||
|
|
||||||
|
temp_config = Path(temp, "config")
|
||||||
|
if temp_config.is_dir():
|
||||||
|
shutil.copytree(temp_config, self.path_config, symlinks=True)
|
||||||
|
elif self.addon_config_used:
|
||||||
|
self.path_config.mkdir()
|
||||||
|
|
||||||
|
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
||||||
|
if self.path_data.is_dir():
|
||||||
|
await remove_data(self.path_data)
|
||||||
|
if self.path_config.is_dir():
|
||||||
|
await remove_data(self.path_config)
|
||||||
|
|
||||||
# Restore AppArmor
|
|
||||||
profile_file = Path(temp, "apparmor.txt")
|
|
||||||
if profile_file.exists():
|
|
||||||
try:
|
try:
|
||||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
await self.sys_run_in_executor(_restore_data)
|
||||||
except HostAppArmorError as err:
|
except shutil.Error as err:
|
||||||
_LOGGER.error(
|
raise AddonsError(
|
||||||
"Can't restore AppArmor profile for add-on %s", self.slug
|
f"Can't restore origin data: {err}", _LOGGER.error
|
||||||
)
|
) from err
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
# Is add-on loaded
|
# Restore AppArmor
|
||||||
if not self.loaded:
|
profile_file = Path(temp, "apparmor.txt")
|
||||||
await self.load()
|
if profile_file.exists():
|
||||||
|
try:
|
||||||
|
await self.sys_host.apparmor.load_profile(
|
||||||
|
self.slug, profile_file
|
||||||
|
)
|
||||||
|
except HostAppArmorError as err:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Can't restore AppArmor profile for add-on %s", self.slug
|
||||||
|
)
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
# Run add-on
|
finally:
|
||||||
if data[ATTR_STATE] == AddonState.STARTED:
|
# Is add-on loaded
|
||||||
wait_for_start = await self.start()
|
if not self.loaded:
|
||||||
|
await self.load()
|
||||||
|
|
||||||
|
# Run add-on
|
||||||
|
if data[ATTR_STATE] == AddonState.STARTED:
|
||||||
|
wait_for_start = await self.start()
|
||||||
|
|
||||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
@@ -1000,7 +1416,7 @@ class Addon(AddonModel):
|
|||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Watchdog found addon %s is %s, restarting...",
|
"Watchdog found addon %s is %s, restarting...",
|
||||||
self.name,
|
self.name,
|
||||||
state.value,
|
state,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
if state == ContainerState.FAILED:
|
if state == ContainerState.FAILED:
|
||||||
@@ -1064,3 +1480,9 @@ class Addon(AddonModel):
|
|||||||
ContainerState.UNHEALTHY,
|
ContainerState.UNHEALTHY,
|
||||||
]:
|
]:
|
||||||
await self._restart_after_problem(event.state)
|
await self._restart_after_problem(event.state)
|
||||||
|
|
||||||
|
def refresh_path_cache(self) -> Awaitable[None]:
|
||||||
|
"""Refresh cache of existing paths."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().refresh_path_cache()
|
||||||
|
return self.addon_store.refresh_path_cache()
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Supervisor add-on build environment."""
|
"""Supervisor add-on build environment."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
@@ -102,11 +103,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
|||||||
except HassioArchNotFound:
|
except HassioArchNotFound:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_docker_args(self, version: AwesomeVersion):
|
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
||||||
"""Create a dict with Docker build arguments."""
|
"""Create a dict with Docker build arguments."""
|
||||||
args = {
|
args = {
|
||||||
"path": str(self.addon.path_location),
|
"path": str(self.addon.path_location),
|
||||||
"tag": f"{self.addon.image}:{version!s}",
|
"tag": f"{image or self.addon.image}:{version!s}",
|
||||||
"dockerfile": str(self.dockerfile),
|
"dockerfile": str(self.dockerfile),
|
||||||
"pull": True,
|
"pull": True,
|
||||||
"forcerm": not self.sys_dev,
|
"forcerm": not self.sys_dev,
|
||||||
|
|||||||
11
supervisor/addons/configuration.py
Normal file
11
supervisor/addons/configuration.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
"""Confgiuration Objects for Addon Config."""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class FolderMapping:
|
||||||
|
"""Represent folder mapping configuration."""
|
||||||
|
|
||||||
|
path: str | None
|
||||||
|
read_only: bool
|
||||||
@@ -1,19 +1,38 @@
|
|||||||
"""Add-on static data."""
|
"""Add-on static data."""
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
|
|
||||||
from ..jobs.const import JobCondition
|
from ..jobs.const import JobCondition
|
||||||
|
|
||||||
|
|
||||||
class AddonBackupMode(str, Enum):
|
class AddonBackupMode(StrEnum):
|
||||||
"""Backup mode of an Add-on."""
|
"""Backup mode of an Add-on."""
|
||||||
|
|
||||||
HOT = "hot"
|
HOT = "hot"
|
||||||
COLD = "cold"
|
COLD = "cold"
|
||||||
|
|
||||||
|
|
||||||
|
class MappingType(StrEnum):
|
||||||
|
"""Mapping type of an Add-on Folder."""
|
||||||
|
|
||||||
|
DATA = "data"
|
||||||
|
CONFIG = "config"
|
||||||
|
SSL = "ssl"
|
||||||
|
ADDONS = "addons"
|
||||||
|
BACKUP = "backup"
|
||||||
|
SHARE = "share"
|
||||||
|
MEDIA = "media"
|
||||||
|
HOMEASSISTANT_CONFIG = "homeassistant_config"
|
||||||
|
ALL_ADDON_CONFIGS = "all_addon_configs"
|
||||||
|
ADDON_CONFIG = "addon_config"
|
||||||
|
|
||||||
|
|
||||||
ATTR_BACKUP = "backup"
|
ATTR_BACKUP = "backup"
|
||||||
|
ATTR_BREAKING_VERSIONS = "breaking_versions"
|
||||||
ATTR_CODENOTARY = "codenotary"
|
ATTR_CODENOTARY = "codenotary"
|
||||||
|
ATTR_READ_ONLY = "read_only"
|
||||||
|
ATTR_PATH = "path"
|
||||||
WATCHDOG_RETRY_SECONDS = 10
|
WATCHDOG_RETRY_SECONDS = 10
|
||||||
WATCHDOG_MAX_ATTEMPTS = 5
|
WATCHDOG_MAX_ATTEMPTS = 5
|
||||||
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
|
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor add-on data."""
|
"""Init file for Supervisor add-on data."""
|
||||||
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|||||||
388
supervisor/addons/manager.py
Normal file
388
supervisor/addons/manager.py
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
"""Supervisor add-on manager."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from collections.abc import Awaitable
|
||||||
|
from contextlib import suppress
|
||||||
|
import logging
|
||||||
|
import tarfile
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
from ..const import AddonBoot, AddonStartup, AddonState
|
||||||
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
|
from ..exceptions import (
|
||||||
|
AddonConfigurationError,
|
||||||
|
AddonsError,
|
||||||
|
AddonsJobError,
|
||||||
|
AddonsNotSupportedError,
|
||||||
|
CoreDNSError,
|
||||||
|
DockerAPIError,
|
||||||
|
DockerError,
|
||||||
|
DockerNotFound,
|
||||||
|
HassioError,
|
||||||
|
HomeAssistantAPIError,
|
||||||
|
)
|
||||||
|
from ..jobs.decorator import Job, JobCondition
|
||||||
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
|
from ..store.addon import AddonStore
|
||||||
|
from ..utils import check_exception_chain
|
||||||
|
from ..utils.sentry import capture_exception
|
||||||
|
from .addon import Addon
|
||||||
|
from .const import ADDON_UPDATE_CONDITIONS
|
||||||
|
from .data import AddonsData
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
AnyAddon = Union[Addon, AddonStore]
|
||||||
|
|
||||||
|
|
||||||
|
class AddonManager(CoreSysAttributes):
|
||||||
|
"""Manage add-ons inside Supervisor."""
|
||||||
|
|
||||||
|
def __init__(self, coresys: CoreSys):
|
||||||
|
"""Initialize Docker base wrapper."""
|
||||||
|
self.coresys: CoreSys = coresys
|
||||||
|
self.data: AddonsData = AddonsData(coresys)
|
||||||
|
self.local: dict[str, Addon] = {}
|
||||||
|
self.store: dict[str, AddonStore] = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all(self) -> list[AnyAddon]:
|
||||||
|
"""Return a list of all add-ons."""
|
||||||
|
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
||||||
|
return list(addons.values())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed(self) -> list[Addon]:
|
||||||
|
"""Return a list of all installed add-ons."""
|
||||||
|
return list(self.local.values())
|
||||||
|
|
||||||
|
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
|
||||||
|
"""Return an add-on from slug.
|
||||||
|
|
||||||
|
Prio:
|
||||||
|
1 - Local
|
||||||
|
2 - Store
|
||||||
|
"""
|
||||||
|
if addon_slug in self.local:
|
||||||
|
return self.local[addon_slug]
|
||||||
|
if not local_only:
|
||||||
|
return self.store.get(addon_slug)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def from_token(self, token: str) -> Addon | None:
|
||||||
|
"""Return an add-on from Supervisor token."""
|
||||||
|
for addon in self.installed:
|
||||||
|
if token == addon.supervisor_token:
|
||||||
|
return addon
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def load(self) -> None:
|
||||||
|
"""Start up add-on management."""
|
||||||
|
# Refresh cache for all store addons
|
||||||
|
tasks: list[Awaitable[None]] = [
|
||||||
|
store.refresh_path_cache() for store in self.store.values()
|
||||||
|
]
|
||||||
|
|
||||||
|
# Load all installed addons
|
||||||
|
for slug in self.data.system:
|
||||||
|
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||||
|
tasks.append(addon.load())
|
||||||
|
|
||||||
|
# Run initial tasks
|
||||||
|
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
|
||||||
|
if tasks:
|
||||||
|
await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
# Sync DNS
|
||||||
|
await self.sync_dns()
|
||||||
|
|
||||||
|
async def boot(self, stage: AddonStartup) -> None:
|
||||||
|
"""Boot add-ons with mode auto."""
|
||||||
|
tasks: list[Addon] = []
|
||||||
|
for addon in self.installed:
|
||||||
|
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
||||||
|
continue
|
||||||
|
tasks.append(addon)
|
||||||
|
|
||||||
|
# Evaluate add-ons which need to be started
|
||||||
|
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
||||||
|
if not tasks:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Start Add-ons sequential
|
||||||
|
# avoid issue on slow IO
|
||||||
|
# Config.wait_boot is deprecated. Until addons update with healthchecks,
|
||||||
|
# add a sleep task for it to keep the same minimum amount of wait time
|
||||||
|
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
|
||||||
|
for addon in tasks:
|
||||||
|
try:
|
||||||
|
if start_task := await addon.start():
|
||||||
|
wait_boot.append(start_task)
|
||||||
|
except AddonsError as err:
|
||||||
|
# Check if there is an system/user issue
|
||||||
|
if check_exception_chain(
|
||||||
|
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
||||||
|
):
|
||||||
|
addon.boot = AddonBoot.MANUAL
|
||||||
|
addon.save_persist()
|
||||||
|
except HassioError:
|
||||||
|
pass # These are already handled
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
||||||
|
|
||||||
|
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
||||||
|
await asyncio.gather(*wait_boot, return_exceptions=True)
|
||||||
|
|
||||||
|
async def shutdown(self, stage: AddonStartup) -> None:
|
||||||
|
"""Shutdown addons."""
|
||||||
|
tasks: list[Addon] = []
|
||||||
|
for addon in self.installed:
|
||||||
|
if addon.state != AddonState.STARTED or addon.startup != stage:
|
||||||
|
continue
|
||||||
|
tasks.append(addon)
|
||||||
|
|
||||||
|
# Evaluate add-ons which need to be stopped
|
||||||
|
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
||||||
|
if not tasks:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Stop Add-ons sequential
|
||||||
|
# avoid issue on slow IO
|
||||||
|
for addon in tasks:
|
||||||
|
try:
|
||||||
|
await addon.stop()
|
||||||
|
except Exception as err: # pylint: disable=broad-except
|
||||||
|
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||||
|
capture_exception(err)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_manager_install",
|
||||||
|
conditions=ADDON_UPDATE_CONDITIONS,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def install(self, slug: str) -> None:
|
||||||
|
"""Install an add-on."""
|
||||||
|
self.sys_jobs.current.reference = slug
|
||||||
|
|
||||||
|
if slug in self.local:
|
||||||
|
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
||||||
|
store = self.store.get(slug)
|
||||||
|
|
||||||
|
if not store:
|
||||||
|
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
||||||
|
|
||||||
|
store.validate_availability()
|
||||||
|
|
||||||
|
await Addon(self.coresys, slug).install()
|
||||||
|
|
||||||
|
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||||
|
|
||||||
|
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
||||||
|
"""Remove an add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||||
|
return
|
||||||
|
|
||||||
|
shared_image = any(
|
||||||
|
self.local[slug].image == addon.image
|
||||||
|
and self.local[slug].version == addon.version
|
||||||
|
for addon in self.installed
|
||||||
|
if addon.slug != slug
|
||||||
|
)
|
||||||
|
await self.local[slug].uninstall(
|
||||||
|
remove_config=remove_config, remove_image=not shared_image
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_manager_update",
|
||||||
|
conditions=ADDON_UPDATE_CONDITIONS,
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def update(
|
||||||
|
self, slug: str, backup: bool | None = False
|
||||||
|
) -> asyncio.Task | None:
|
||||||
|
"""Update add-on.
|
||||||
|
|
||||||
|
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||||
|
if addon is started after update. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
self.sys_jobs.current.reference = slug
|
||||||
|
|
||||||
|
if slug not in self.local:
|
||||||
|
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||||
|
addon = self.local[slug]
|
||||||
|
|
||||||
|
if addon.is_detached:
|
||||||
|
raise AddonsError(
|
||||||
|
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||||
|
)
|
||||||
|
store = self.store[slug]
|
||||||
|
|
||||||
|
if addon.version == store.version:
|
||||||
|
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
||||||
|
|
||||||
|
# Check if available, Maybe something have changed
|
||||||
|
store.validate_availability()
|
||||||
|
|
||||||
|
if backup:
|
||||||
|
await self.sys_backups.do_backup_partial(
|
||||||
|
name=f"addon_{addon.slug}_{addon.version}",
|
||||||
|
homeassistant=False,
|
||||||
|
addons=[addon.slug],
|
||||||
|
)
|
||||||
|
|
||||||
|
return await addon.update()
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_manager_rebuild",
|
||||||
|
conditions=[
|
||||||
|
JobCondition.FREE_SPACE,
|
||||||
|
JobCondition.INTERNET_HOST,
|
||||||
|
JobCondition.HEALTHY,
|
||||||
|
],
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def rebuild(self, slug: str) -> asyncio.Task | None:
|
||||||
|
"""Perform a rebuild of local build add-on.
|
||||||
|
|
||||||
|
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||||
|
if addon is started after rebuild. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
self.sys_jobs.current.reference = slug
|
||||||
|
|
||||||
|
if slug not in self.local:
|
||||||
|
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||||
|
addon = self.local[slug]
|
||||||
|
|
||||||
|
if addon.is_detached:
|
||||||
|
raise AddonsError(
|
||||||
|
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||||
|
)
|
||||||
|
store = self.store[slug]
|
||||||
|
|
||||||
|
# Check if a rebuild is possible now
|
||||||
|
if addon.version != store.version:
|
||||||
|
raise AddonsError(
|
||||||
|
"Version changed, use Update instead Rebuild", _LOGGER.error
|
||||||
|
)
|
||||||
|
if not addon.need_build:
|
||||||
|
raise AddonsNotSupportedError(
|
||||||
|
"Can't rebuild a image based add-on", _LOGGER.error
|
||||||
|
)
|
||||||
|
|
||||||
|
return await addon.rebuild()
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_manager_restore",
|
||||||
|
conditions=[
|
||||||
|
JobCondition.FREE_SPACE,
|
||||||
|
JobCondition.INTERNET_HOST,
|
||||||
|
JobCondition.HEALTHY,
|
||||||
|
],
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def restore(
|
||||||
|
self, slug: str, tar_file: tarfile.TarFile
|
||||||
|
) -> asyncio.Task | None:
|
||||||
|
"""Restore state of an add-on.
|
||||||
|
|
||||||
|
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||||
|
if addon is started after restore. Else nothing is returned.
|
||||||
|
"""
|
||||||
|
self.sys_jobs.current.reference = slug
|
||||||
|
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||||
|
addon = Addon(self.coresys, slug)
|
||||||
|
had_ingress = False
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||||
|
addon = self.local[slug]
|
||||||
|
had_ingress = addon.ingress_panel
|
||||||
|
|
||||||
|
wait_for_start = await addon.restore(tar_file)
|
||||||
|
|
||||||
|
# Check if new
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||||
|
self.local[slug] = addon
|
||||||
|
|
||||||
|
# Update ingress
|
||||||
|
if had_ingress != addon.ingress_panel:
|
||||||
|
await self.sys_ingress.reload()
|
||||||
|
with suppress(HomeAssistantAPIError):
|
||||||
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
|
return wait_for_start
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="addon_manager_repair",
|
||||||
|
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
|
||||||
|
)
|
||||||
|
async def repair(self) -> None:
|
||||||
|
"""Repair local add-ons."""
|
||||||
|
needs_repair: list[Addon] = []
|
||||||
|
|
||||||
|
# Evaluate Add-ons to repair
|
||||||
|
for addon in self.installed:
|
||||||
|
if await addon.instance.exists():
|
||||||
|
continue
|
||||||
|
needs_repair.append(addon)
|
||||||
|
|
||||||
|
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||||
|
if not needs_repair:
|
||||||
|
return
|
||||||
|
|
||||||
|
for addon in needs_repair:
|
||||||
|
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
||||||
|
with suppress(DockerError, KeyError):
|
||||||
|
# Need pull a image again
|
||||||
|
if not addon.need_build:
|
||||||
|
await addon.instance.install(addon.version, addon.image)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Need local lookup
|
||||||
|
if addon.need_build and not addon.is_detached:
|
||||||
|
store = self.store[addon.slug]
|
||||||
|
# If this add-on is available for rebuild
|
||||||
|
if addon.version == store.version:
|
||||||
|
await addon.instance.install(addon.version, addon.image)
|
||||||
|
continue
|
||||||
|
|
||||||
|
_LOGGER.error("Can't repair %s", addon.slug)
|
||||||
|
with suppress(AddonsError):
|
||||||
|
await self.uninstall(addon.slug)
|
||||||
|
|
||||||
|
async def sync_dns(self) -> None:
|
||||||
|
"""Sync add-ons DNS names."""
|
||||||
|
# Update hosts
|
||||||
|
add_host_coros: list[Awaitable[None]] = []
|
||||||
|
for addon in self.installed:
|
||||||
|
try:
|
||||||
|
if not await addon.instance.is_running():
|
||||||
|
continue
|
||||||
|
except DockerError as err:
|
||||||
|
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.CORRUPT_DOCKER,
|
||||||
|
ContextType.ADDON,
|
||||||
|
reference=addon.slug,
|
||||||
|
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||||
|
)
|
||||||
|
capture_exception(err)
|
||||||
|
else:
|
||||||
|
add_host_coros.append(
|
||||||
|
self.sys_plugins.dns.add_host(
|
||||||
|
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await asyncio.gather(*add_host_coros)
|
||||||
|
|
||||||
|
# Write hosts files
|
||||||
|
with suppress(CoreDNSError):
|
||||||
|
await self.sys_plugins.dns.write_hosts()
|
||||||
@@ -1,14 +1,18 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from collections.abc import Awaitable, Callable
|
from collections.abc import Awaitable, Callable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
from datetime import datetime
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||||
|
|
||||||
|
from supervisor.utils.dt import utc_from_timestamp
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADVANCED,
|
ATTR_ADVANCED,
|
||||||
ATTR_APPARMOR,
|
ATTR_APPARMOR,
|
||||||
@@ -65,11 +69,13 @@ from ..const import (
|
|||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
ATTR_TRANSLATIONS,
|
ATTR_TRANSLATIONS,
|
||||||
|
ATTR_TYPE,
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USB,
|
ATTR_USB,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
ATTR_VERSION_TIMESTAMP,
|
||||||
ATTR_VIDEO,
|
ATTR_VIDEO,
|
||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
@@ -77,6 +83,7 @@ from ..const import (
|
|||||||
SECURITY_DISABLE,
|
SECURITY_DISABLE,
|
||||||
SECURITY_PROFILE,
|
SECURITY_PROFILE,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
|
AddonBootConfig,
|
||||||
AddonStage,
|
AddonStage,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
)
|
)
|
||||||
@@ -85,9 +92,19 @@ from ..docker.const import Capabilities
|
|||||||
from ..exceptions import AddonsNotSupportedError
|
from ..exceptions import AddonsNotSupportedError
|
||||||
from ..jobs.const import JOB_GROUP_ADDON
|
from ..jobs.const import JOB_GROUP_ADDON
|
||||||
from ..jobs.job_group import JobGroup
|
from ..jobs.job_group import JobGroup
|
||||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
|
from ..utils import version_is_new_enough
|
||||||
|
from .configuration import FolderMapping
|
||||||
|
from .const import (
|
||||||
|
ATTR_BACKUP,
|
||||||
|
ATTR_BREAKING_VERSIONS,
|
||||||
|
ATTR_CODENOTARY,
|
||||||
|
ATTR_PATH,
|
||||||
|
ATTR_READ_ONLY,
|
||||||
|
AddonBackupMode,
|
||||||
|
MappingType,
|
||||||
|
)
|
||||||
from .options import AddonOptions, UiOptions
|
from .options import AddonOptions, UiOptions
|
||||||
from .validate import RE_SERVICE, RE_VOLUME
|
from .validate import RE_SERVICE
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -103,6 +120,10 @@ class AddonModel(JobGroup, ABC):
|
|||||||
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
||||||
)
|
)
|
||||||
self.slug: str = slug
|
self.slug: str = slug
|
||||||
|
self._path_icon_exists: bool = False
|
||||||
|
self._path_logo_exists: bool = False
|
||||||
|
self._path_changelog_exists: bool = False
|
||||||
|
self._path_documentation_exists: bool = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@@ -130,10 +151,15 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_OPTIONS]
|
return self.data[ATTR_OPTIONS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self) -> AddonBoot:
|
def boot_config(self) -> AddonBootConfig:
|
||||||
"""Return boot config with prio local settings."""
|
"""Return boot config."""
|
||||||
return self.data[ATTR_BOOT]
|
return self.data[ATTR_BOOT]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def boot(self) -> AddonBoot:
|
||||||
|
"""Return boot config with prio local settings unless config is forced."""
|
||||||
|
return AddonBoot(self.data[ATTR_BOOT])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auto_update(self) -> bool | None:
|
def auto_update(self) -> bool | None:
|
||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
@@ -211,6 +237,11 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return latest version of add-on."""
|
"""Return latest version of add-on."""
|
||||||
return self.data[ATTR_VERSION]
|
return self.data[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version_timestamp(self) -> datetime:
|
||||||
|
"""Return when latest version was first seen."""
|
||||||
|
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> AwesomeVersion:
|
def version(self) -> AwesomeVersion:
|
||||||
"""Return version of add-on."""
|
"""Return version of add-on."""
|
||||||
@@ -491,22 +522,22 @@ class AddonModel(JobGroup, ABC):
|
|||||||
@property
|
@property
|
||||||
def with_icon(self) -> bool:
|
def with_icon(self) -> bool:
|
||||||
"""Return True if an icon exists."""
|
"""Return True if an icon exists."""
|
||||||
return self.path_icon.exists()
|
return self._path_icon_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_logo(self) -> bool:
|
def with_logo(self) -> bool:
|
||||||
"""Return True if a logo exists."""
|
"""Return True if a logo exists."""
|
||||||
return self.path_logo.exists()
|
return self._path_logo_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_changelog(self) -> bool:
|
def with_changelog(self) -> bool:
|
||||||
"""Return True if a changelog exists."""
|
"""Return True if a changelog exists."""
|
||||||
return self.path_changelog.exists()
|
return self._path_changelog_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_documentation(self) -> bool:
|
def with_documentation(self) -> bool:
|
||||||
"""Return True if a documentation exists."""
|
"""Return True if a documentation exists."""
|
||||||
return self.path_documentation.exists()
|
return self._path_documentation_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_arch(self) -> list[str]:
|
def supported_arch(self) -> list[str]:
|
||||||
@@ -537,14 +568,13 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return ATTR_IMAGE not in self.data
|
return ATTR_IMAGE not in self.data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def map_volumes(self) -> dict[str, bool]:
|
def map_volumes(self) -> dict[MappingType, FolderMapping]:
|
||||||
"""Return a dict of {volume: read-only} from add-on."""
|
"""Return a dict of {MappingType: FolderMapping} from add-on."""
|
||||||
volumes = {}
|
volumes = {}
|
||||||
for volume in self.data[ATTR_MAP]:
|
for volume in self.data[ATTR_MAP]:
|
||||||
result = RE_VOLUME.match(volume)
|
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
|
||||||
if not result:
|
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY]
|
||||||
continue
|
)
|
||||||
volumes[result.group(1)] = result.group(2) != "rw"
|
|
||||||
|
|
||||||
return volumes
|
return volumes
|
||||||
|
|
||||||
@@ -611,6 +641,22 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return Signer email address for CAS."""
|
"""Return Signer email address for CAS."""
|
||||||
return self.data.get(ATTR_CODENOTARY)
|
return self.data.get(ATTR_CODENOTARY)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def breaking_versions(self) -> list[AwesomeVersion]:
|
||||||
|
"""Return breaking versions of addon."""
|
||||||
|
return self.data[ATTR_BREAKING_VERSIONS]
|
||||||
|
|
||||||
|
def refresh_path_cache(self) -> Awaitable[None]:
|
||||||
|
"""Refresh cache of existing paths."""
|
||||||
|
|
||||||
|
def check_paths():
|
||||||
|
self._path_icon_exists = self.path_icon.exists()
|
||||||
|
self._path_logo_exists = self.path_logo.exists()
|
||||||
|
self._path_changelog_exists = self.path_changelog.exists()
|
||||||
|
self._path_documentation_exists = self.path_documentation.exists()
|
||||||
|
|
||||||
|
return self.sys_run_in_executor(check_paths)
|
||||||
|
|
||||||
def validate_availability(self) -> None:
|
def validate_availability(self) -> None:
|
||||||
"""Validate if addon is available for current system."""
|
"""Validate if addon is available for current system."""
|
||||||
return self._validate_availability(self.data, logger=_LOGGER.error)
|
return self._validate_availability(self.data, logger=_LOGGER.error)
|
||||||
@@ -645,7 +691,9 @@ class AddonModel(JobGroup, ABC):
|
|||||||
# Home Assistant
|
# Home Assistant
|
||||||
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
|
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
|
||||||
with suppress(AwesomeVersionException, TypeError):
|
with suppress(AwesomeVersionException, TypeError):
|
||||||
if self.sys_homeassistant.version < version:
|
if version and not version_is_new_enough(
|
||||||
|
self.sys_homeassistant.version, version
|
||||||
|
):
|
||||||
raise AddonsNotSupportedError(
|
raise AddonsNotSupportedError(
|
||||||
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
|
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
|
||||||
logger,
|
logger,
|
||||||
@@ -669,19 +717,3 @@ class AddonModel(JobGroup, ABC):
|
|||||||
|
|
||||||
# local build
|
# local build
|
||||||
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||||
|
|
||||||
def install(self) -> Awaitable[None]:
|
|
||||||
"""Install this add-on."""
|
|
||||||
return self.sys_addons.install(self.slug)
|
|
||||||
|
|
||||||
def uninstall(self) -> Awaitable[None]:
|
|
||||||
"""Uninstall this add-on."""
|
|
||||||
return self.sys_addons.uninstall(self.slug)
|
|
||||||
|
|
||||||
def update(self, backup: bool | None = False) -> Awaitable[Awaitable[None] | None]:
|
|
||||||
"""Update this add-on."""
|
|
||||||
return self.sys_addons.update(self.slug, backup=backup)
|
|
||||||
|
|
||||||
def rebuild(self) -> Awaitable[Awaitable[None] | None]:
|
|
||||||
"""Rebuild this add-on."""
|
|
||||||
return self.sys_addons.rebuild(self.slug)
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Add-on Options / UI rendering."""
|
"""Add-on Options / UI rendering."""
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Util add-ons functions."""
|
"""Util add-ons functions."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Validate add-ons options schema."""
|
"""Validate add-ons options schema."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import secrets
|
import secrets
|
||||||
@@ -78,9 +79,12 @@ from ..const import (
|
|||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
|
ATTR_SYSTEM_MANAGED,
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
ATTR_TRANSLATIONS,
|
ATTR_TRANSLATIONS,
|
||||||
|
ATTR_TYPE,
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
@@ -94,11 +98,11 @@ from ..const import (
|
|||||||
ROLE_ALL,
|
ROLE_ALL,
|
||||||
ROLE_DEFAULT,
|
ROLE_DEFAULT,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
|
AddonBootConfig,
|
||||||
AddonStage,
|
AddonStage,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
AddonState,
|
AddonState,
|
||||||
)
|
)
|
||||||
from ..discovery.validate import valid_discovery_service
|
|
||||||
from ..docker.const import Capabilities
|
from ..docker.const import Capabilities
|
||||||
from ..validate import (
|
from ..validate import (
|
||||||
docker_image,
|
docker_image,
|
||||||
@@ -109,12 +113,23 @@ from ..validate import (
|
|||||||
uuid_match,
|
uuid_match,
|
||||||
version_tag,
|
version_tag,
|
||||||
)
|
)
|
||||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, RE_SLUG, AddonBackupMode
|
from .const import (
|
||||||
|
ATTR_BACKUP,
|
||||||
|
ATTR_BREAKING_VERSIONS,
|
||||||
|
ATTR_CODENOTARY,
|
||||||
|
ATTR_PATH,
|
||||||
|
ATTR_READ_ONLY,
|
||||||
|
RE_SLUG,
|
||||||
|
AddonBackupMode,
|
||||||
|
MappingType,
|
||||||
|
)
|
||||||
from .options import RE_SCHEMA_ELEMENT
|
from .options import RE_SCHEMA_ELEMENT
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
|
RE_VOLUME = re.compile(
|
||||||
|
r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$"
|
||||||
|
)
|
||||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||||
|
|
||||||
|
|
||||||
@@ -143,6 +158,7 @@ RE_MACHINE = re.compile(
|
|||||||
r"|raspberrypi3"
|
r"|raspberrypi3"
|
||||||
r"|raspberrypi4-64"
|
r"|raspberrypi4-64"
|
||||||
r"|raspberrypi4"
|
r"|raspberrypi4"
|
||||||
|
r"|raspberrypi5-64"
|
||||||
r"|yellow"
|
r"|yellow"
|
||||||
r"|green"
|
r"|green"
|
||||||
r"|tinker"
|
r"|tinker"
|
||||||
@@ -177,20 +193,6 @@ def _warn_addon_config(config: dict[str, Any]):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
invalid_services: list[str] = []
|
|
||||||
for service in config.get(ATTR_DISCOVERY, []):
|
|
||||||
try:
|
|
||||||
valid_discovery_service(service)
|
|
||||||
except vol.Invalid:
|
|
||||||
invalid_services.append(service)
|
|
||||||
|
|
||||||
if invalid_services:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s",
|
|
||||||
", ".join(invalid_services),
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
@@ -212,9 +214,9 @@ def _migrate_addon_config(protocol=False):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
if value == "before":
|
if value == "before":
|
||||||
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
|
config[ATTR_STARTUP] = AddonStartup.SERVICES
|
||||||
elif value == "after":
|
elif value == "after":
|
||||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
|
config[ATTR_STARTUP] = AddonStartup.APPLICATION
|
||||||
|
|
||||||
# UART 2021-01-20
|
# UART 2021-01-20
|
||||||
if "auto_uart" in config:
|
if "auto_uart" in config:
|
||||||
@@ -260,6 +262,48 @@ def _migrate_addon_config(protocol=False):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# 2023-11 "map" entries can also be dict to allow path configuration
|
||||||
|
volumes = []
|
||||||
|
for entry in config.get(ATTR_MAP, []):
|
||||||
|
if isinstance(entry, dict):
|
||||||
|
volumes.append(entry)
|
||||||
|
if isinstance(entry, str):
|
||||||
|
result = RE_VOLUME.match(entry)
|
||||||
|
if not result:
|
||||||
|
continue
|
||||||
|
volumes.append(
|
||||||
|
{
|
||||||
|
ATTR_TYPE: result.group(1),
|
||||||
|
ATTR_READ_ONLY: result.group(2) != "rw",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if volumes:
|
||||||
|
config[ATTR_MAP] = volumes
|
||||||
|
|
||||||
|
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
|
||||||
|
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
|
||||||
|
if any(
|
||||||
|
volume
|
||||||
|
and volume[ATTR_TYPE]
|
||||||
|
in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG}
|
||||||
|
for volume in volumes
|
||||||
|
):
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
|
||||||
|
MappingType.ADDON_CONFIG,
|
||||||
|
MappingType.HOMEASSISTANT_CONFIG,
|
||||||
|
MappingType.CONFIG,
|
||||||
|
name,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
|
||||||
|
MappingType.CONFIG,
|
||||||
|
MappingType.HOMEASSISTANT_CONFIG,
|
||||||
|
name,
|
||||||
|
)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
return _migrate
|
return _migrate
|
||||||
@@ -278,7 +322,9 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
||||||
AddonStartup
|
AddonStartup
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
|
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
|
||||||
|
AddonBootConfig
|
||||||
|
),
|
||||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
||||||
@@ -308,7 +354,15 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_DEVICES): [str],
|
vol.Optional(ATTR_DEVICES): [str],
|
||||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
vol.Optional(ATTR_MAP, default=list): [
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_TYPE): vol.Coerce(MappingType),
|
||||||
|
vol.Optional(ATTR_READ_ONLY, default=True): bool,
|
||||||
|
vol.Optional(ATTR_PATH): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
],
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
|
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
|
||||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||||
@@ -360,6 +414,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
@@ -418,6 +473,8 @@ SCHEMA_ADDON_USER = vol.Schema(
|
|||||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor RESTful API."""
|
"""Init file for Supervisor RESTful API."""
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -8,12 +9,14 @@ from aiohttp import web
|
|||||||
|
|
||||||
from ..const import AddonState
|
from ..const import AddonState
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import APIAddonNotInstalled
|
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
||||||
|
from ..utils.sentry import capture_exception
|
||||||
from .addons import APIAddons
|
from .addons import APIAddons
|
||||||
from .audio import APIAudio
|
from .audio import APIAudio
|
||||||
from .auth import APIAuth
|
from .auth import APIAuth
|
||||||
from .backups import APIBackups
|
from .backups import APIBackups
|
||||||
from .cli import APICli
|
from .cli import APICli
|
||||||
|
from .const import CONTENT_TYPE_TEXT
|
||||||
from .discovery import APIDiscovery
|
from .discovery import APIDiscovery
|
||||||
from .dns import APICoreDNS
|
from .dns import APICoreDNS
|
||||||
from .docker import APIDocker
|
from .docker import APIDocker
|
||||||
@@ -35,7 +38,7 @@ from .security import APISecurity
|
|||||||
from .services import APIServices
|
from .services import APIServices
|
||||||
from .store import APIStore
|
from .store import APIStore
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
from .utils import api_process
|
from .utils import api_process, api_process_raw
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -66,11 +69,17 @@ class RestAPI(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# service stuff
|
# service stuff
|
||||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
|
||||||
self._site: web.TCPSite | None = None
|
self._site: web.TCPSite | None = None
|
||||||
|
|
||||||
|
# share single host API handler for reuse in logging endpoints
|
||||||
|
self._api_host: APIHost | None = None
|
||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Register REST API Calls."""
|
"""Register REST API Calls."""
|
||||||
|
self._api_host = APIHost()
|
||||||
|
self._api_host.coresys = self.coresys
|
||||||
|
|
||||||
self._register_addons()
|
self._register_addons()
|
||||||
self._register_audio()
|
self._register_audio()
|
||||||
self._register_auth()
|
self._register_auth()
|
||||||
@@ -100,10 +109,41 @@ class RestAPI(CoreSysAttributes):
|
|||||||
|
|
||||||
await self.start()
|
await self.start()
|
||||||
|
|
||||||
|
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||||
|
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs",
|
||||||
|
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/follow",
|
||||||
|
partial(
|
||||||
|
self._api_host.advanced_logs,
|
||||||
|
identifier=syslog_identifier,
|
||||||
|
follow=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/boots/{{bootid}}",
|
||||||
|
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/boots/{{bootid}}/follow",
|
||||||
|
partial(
|
||||||
|
self._api_host.advanced_logs,
|
||||||
|
identifier=syslog_identifier,
|
||||||
|
follow=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_host(self) -> None:
|
def _register_host(self) -> None:
|
||||||
"""Register hostcontrol functions."""
|
"""Register hostcontrol functions."""
|
||||||
api_host = APIHost()
|
api_host = self._api_host
|
||||||
api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
@@ -180,12 +220,16 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/os/config/sync", api_os.config_sync),
|
web.post("/os/config/sync", api_os.config_sync),
|
||||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||||
web.get("/os/datadisk/list", api_os.list_data),
|
web.get("/os/datadisk/list", api_os.list_data),
|
||||||
|
web.post("/os/datadisk/wipe", api_os.wipe_data),
|
||||||
|
web.post("/os/boot-slot", api_os.set_boot_slot),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Boards endpoints
|
# Boards endpoints
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
|
web.get("/os/boards/green", api_os.boards_green_info),
|
||||||
|
web.post("/os/boards/green", api_os.boards_green_options),
|
||||||
web.get("/os/boards/yellow", api_os.boards_yellow_info),
|
web.get("/os/boards/yellow", api_os.boards_yellow_info),
|
||||||
web.post("/os/boards/yellow", api_os.boards_yellow_options),
|
web.post("/os/boards/yellow", api_os.boards_yellow_options),
|
||||||
web.get("/os/boards/{board}", api_os.boards_other_info),
|
web.get("/os/boards/{board}", api_os.boards_other_info),
|
||||||
@@ -215,6 +259,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/jobs/info", api_jobs.info),
|
web.get("/jobs/info", api_jobs.info),
|
||||||
web.post("/jobs/options", api_jobs.options),
|
web.post("/jobs/options", api_jobs.options),
|
||||||
web.post("/jobs/reset", api_jobs.reset),
|
web.post("/jobs/reset", api_jobs.reset),
|
||||||
|
web.get("/jobs/{uuid}", api_jobs.job_info),
|
||||||
|
web.delete("/jobs/{uuid}", api_jobs.remove_job),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -253,11 +299,11 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/multicast/info", api_multicast.info),
|
web.get("/multicast/info", api_multicast.info),
|
||||||
web.get("/multicast/stats", api_multicast.stats),
|
web.get("/multicast/stats", api_multicast.stats),
|
||||||
web.get("/multicast/logs", api_multicast.logs),
|
|
||||||
web.post("/multicast/update", api_multicast.update),
|
web.post("/multicast/update", api_multicast.update),
|
||||||
web.post("/multicast/restart", api_multicast.restart),
|
web.post("/multicast/restart", api_multicast.restart),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
self._register_advanced_logs("/multicast", "hassio_multicast")
|
||||||
|
|
||||||
def _register_hardware(self) -> None:
|
def _register_hardware(self) -> None:
|
||||||
"""Register hardware functions."""
|
"""Register hardware functions."""
|
||||||
@@ -330,6 +376,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/auth", api_auth.auth),
|
web.post("/auth", api_auth.auth),
|
||||||
web.post("/auth/reset", api_auth.reset),
|
web.post("/auth/reset", api_auth.reset),
|
||||||
web.delete("/auth/cache", api_auth.cache),
|
web.delete("/auth/cache", api_auth.cache),
|
||||||
|
web.get("/auth/list", api_auth.list_users),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -343,7 +390,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/supervisor/ping", api_supervisor.ping),
|
web.get("/supervisor/ping", api_supervisor.ping),
|
||||||
web.get("/supervisor/info", api_supervisor.info),
|
web.get("/supervisor/info", api_supervisor.info),
|
||||||
web.get("/supervisor/stats", api_supervisor.stats),
|
web.get("/supervisor/stats", api_supervisor.stats),
|
||||||
web.get("/supervisor/logs", api_supervisor.logs),
|
|
||||||
web.post("/supervisor/update", api_supervisor.update),
|
web.post("/supervisor/update", api_supervisor.update),
|
||||||
web.post("/supervisor/reload", api_supervisor.reload),
|
web.post("/supervisor/reload", api_supervisor.reload),
|
||||||
web.post("/supervisor/restart", api_supervisor.restart),
|
web.post("/supervisor/restart", api_supervisor.restart),
|
||||||
@@ -352,6 +398,38 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def get_supervisor_logs(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
return await self._api_host.advanced_logs_handler(
|
||||||
|
*args, identifier="hassio_supervisor", **kwargs
|
||||||
|
)
|
||||||
|
except Exception as err: # pylint: disable=broad-exception-caught
|
||||||
|
# Supervisor logs are critical, so catch everything, log the exception
|
||||||
|
# and try to return Docker container logs as the fallback
|
||||||
|
_LOGGER.exception(
|
||||||
|
"Failed to get supervisor logs using advanced_logs API"
|
||||||
|
)
|
||||||
|
if not isinstance(err, HostNotSupportedError):
|
||||||
|
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||||
|
# is known and reported to the user using the resolution center.
|
||||||
|
capture_exception(err)
|
||||||
|
return await api_supervisor.logs(*args, **kwargs)
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/supervisor/logs", get_supervisor_logs),
|
||||||
|
web.get(
|
||||||
|
"/supervisor/logs/follow",
|
||||||
|
partial(get_supervisor_logs, follow=True),
|
||||||
|
),
|
||||||
|
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
|
||||||
|
web.get(
|
||||||
|
"/supervisor/logs/boots/{bootid}/follow",
|
||||||
|
partial(get_supervisor_logs, follow=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_homeassistant(self) -> None:
|
def _register_homeassistant(self) -> None:
|
||||||
"""Register Home Assistant functions."""
|
"""Register Home Assistant functions."""
|
||||||
api_hass = APIHomeAssistant()
|
api_hass = APIHomeAssistant()
|
||||||
@@ -360,7 +438,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/core/info", api_hass.info),
|
web.get("/core/info", api_hass.info),
|
||||||
web.get("/core/logs", api_hass.logs),
|
|
||||||
web.get("/core/stats", api_hass.stats),
|
web.get("/core/stats", api_hass.stats),
|
||||||
web.post("/core/options", api_hass.options),
|
web.post("/core/options", api_hass.options),
|
||||||
web.post("/core/update", api_hass.update),
|
web.post("/core/update", api_hass.update),
|
||||||
@@ -372,11 +449,12 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/core", "homeassistant")
|
||||||
|
|
||||||
# Reroute from legacy
|
# Reroute from legacy
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/homeassistant/info", api_hass.info),
|
web.get("/homeassistant/info", api_hass.info),
|
||||||
web.get("/homeassistant/logs", api_hass.logs),
|
|
||||||
web.get("/homeassistant/stats", api_hass.stats),
|
web.get("/homeassistant/stats", api_hass.stats),
|
||||||
web.post("/homeassistant/options", api_hass.options),
|
web.post("/homeassistant/options", api_hass.options),
|
||||||
web.post("/homeassistant/restart", api_hass.restart),
|
web.post("/homeassistant/restart", api_hass.restart),
|
||||||
@@ -388,6 +466,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/homeassistant", "homeassistant")
|
||||||
|
|
||||||
def _register_proxy(self) -> None:
|
def _register_proxy(self) -> None:
|
||||||
"""Register Home Assistant API Proxy."""
|
"""Register Home Assistant API Proxy."""
|
||||||
api_proxy = APIProxy()
|
api_proxy = APIProxy()
|
||||||
@@ -429,18 +509,39 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||||
web.post("/addons/{addon}/restart", api_addons.restart),
|
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||||
web.post("/addons/{addon}/options", api_addons.options),
|
web.post("/addons/{addon}/options", api_addons.options),
|
||||||
|
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
|
||||||
web.post(
|
web.post(
|
||||||
"/addons/{addon}/options/validate", api_addons.options_validate
|
"/addons/{addon}/options/validate", api_addons.options_validate
|
||||||
),
|
),
|
||||||
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
||||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
|
||||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||||
web.post("/addons/{addon}/security", api_addons.security),
|
web.post("/addons/{addon}/security", api_addons.security),
|
||||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||||
|
async def get_addon_logs(request, *args, **kwargs):
|
||||||
|
addon = api_addons.get_addon_for_request(request)
|
||||||
|
kwargs["identifier"] = f"addon_{addon.slug}"
|
||||||
|
return await self._api_host.advanced_logs(request, *args, **kwargs)
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/addons/{addon}/logs", get_addon_logs),
|
||||||
|
web.get(
|
||||||
|
"/addons/{addon}/logs/follow",
|
||||||
|
partial(get_addon_logs, follow=True),
|
||||||
|
),
|
||||||
|
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
|
||||||
|
web.get(
|
||||||
|
"/addons/{addon}/logs/boots/{bootid}/follow",
|
||||||
|
partial(get_addon_logs, follow=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
# Legacy routing to support requests for not installed addons
|
# Legacy routing to support requests for not installed addons
|
||||||
api_store = APIStore()
|
api_store = APIStore()
|
||||||
api_store.coresys = self.coresys
|
api_store.coresys = self.coresys
|
||||||
@@ -485,6 +586,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/backups/info", api_backups.info),
|
web.get("/backups/info", api_backups.info),
|
||||||
web.post("/backups/options", api_backups.options),
|
web.post("/backups/options", api_backups.options),
|
||||||
web.post("/backups/reload", api_backups.reload),
|
web.post("/backups/reload", api_backups.reload),
|
||||||
|
web.post("/backups/freeze", api_backups.freeze),
|
||||||
|
web.post("/backups/thaw", api_backups.thaw),
|
||||||
web.post("/backups/new/full", api_backups.backup_full),
|
web.post("/backups/new/full", api_backups.backup_full),
|
||||||
web.post("/backups/new/partial", api_backups.backup_partial),
|
web.post("/backups/new/partial", api_backups.backup_partial),
|
||||||
web.post("/backups/new/upload", api_backups.upload),
|
web.post("/backups/new/upload", api_backups.upload),
|
||||||
@@ -536,7 +639,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/dns/info", api_dns.info),
|
web.get("/dns/info", api_dns.info),
|
||||||
web.get("/dns/stats", api_dns.stats),
|
web.get("/dns/stats", api_dns.stats),
|
||||||
web.get("/dns/logs", api_dns.logs),
|
|
||||||
web.post("/dns/update", api_dns.update),
|
web.post("/dns/update", api_dns.update),
|
||||||
web.post("/dns/options", api_dns.options),
|
web.post("/dns/options", api_dns.options),
|
||||||
web.post("/dns/restart", api_dns.restart),
|
web.post("/dns/restart", api_dns.restart),
|
||||||
@@ -544,18 +646,17 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/dns", "hassio_dns")
|
||||||
|
|
||||||
def _register_audio(self) -> None:
|
def _register_audio(self) -> None:
|
||||||
"""Register Audio functions."""
|
"""Register Audio functions."""
|
||||||
api_audio = APIAudio()
|
api_audio = APIAudio()
|
||||||
api_audio.coresys = self.coresys
|
api_audio.coresys = self.coresys
|
||||||
api_host = APIHost()
|
|
||||||
api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/audio/info", api_audio.info),
|
web.get("/audio/info", api_audio.info),
|
||||||
web.get("/audio/stats", api_audio.stats),
|
web.get("/audio/stats", api_audio.stats),
|
||||||
web.get("/audio/logs", api_audio.logs),
|
|
||||||
web.post("/audio/update", api_audio.update),
|
web.post("/audio/update", api_audio.update),
|
||||||
web.post("/audio/restart", api_audio.restart),
|
web.post("/audio/restart", api_audio.restart),
|
||||||
web.post("/audio/reload", api_audio.reload),
|
web.post("/audio/reload", api_audio.reload),
|
||||||
@@ -568,6 +669,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/audio", "hassio_audio")
|
||||||
|
|
||||||
def _register_mounts(self) -> None:
|
def _register_mounts(self) -> None:
|
||||||
"""Register mounts endpoints."""
|
"""Register mounts endpoints."""
|
||||||
api_mounts = APIMounts()
|
api_mounts = APIMounts()
|
||||||
@@ -594,7 +697,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/store", api_store.store_info),
|
web.get("/store", api_store.store_info),
|
||||||
web.get("/store/addons", api_store.addons_list),
|
web.get("/store/addons", api_store.addons_list),
|
||||||
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
||||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
|
||||||
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
||||||
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
||||||
web.get(
|
web.get(
|
||||||
@@ -616,6 +718,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
"/store/addons/{addon}/update/{version}",
|
"/store/addons/{addon}/update/{version}",
|
||||||
api_store.addons_addon_update,
|
api_store.addons_addon_update,
|
||||||
),
|
),
|
||||||
|
# Must be below others since it has a wildcard in resource path
|
||||||
|
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||||
web.post("/store/reload", api_store.reload),
|
web.post("/store/reload", api_store.reload),
|
||||||
web.get("/store/repositories", api_store.repositories_list),
|
web.get("/store/repositories", api_store.repositories_list),
|
||||||
web.get(
|
web.get(
|
||||||
@@ -667,9 +771,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
"""Run RESTful API webserver."""
|
"""Run RESTful API webserver."""
|
||||||
await self._runner.setup()
|
await self._runner.setup()
|
||||||
self._site = web.TCPSite(
|
self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80)
|
||||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self._site.start()
|
await self._site.start()
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -8,8 +9,8 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from ..addons import AnyAddon
|
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
|
from ..addons.manager import AnyAddon
|
||||||
from ..addons.utils import rating_security
|
from ..addons.utils import rating_security
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
@@ -81,6 +82,8 @@ from ..const import (
|
|||||||
ATTR_STARTUP,
|
ATTR_STARTUP,
|
||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
|
ATTR_SYSTEM_MANAGED,
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||||
ATTR_TRANSLATIONS,
|
ATTR_TRANSLATIONS,
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
@@ -95,6 +98,7 @@ from ..const import (
|
|||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
|
AddonBootConfig,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..docker.stats import DockerStats
|
from ..docker.stats import DockerStats
|
||||||
@@ -106,8 +110,8 @@ from ..exceptions import (
|
|||||||
PwnedSecret,
|
PwnedSecret,
|
||||||
)
|
)
|
||||||
from ..validate import docker_ports
|
from ..validate import docker_ports
|
||||||
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
|
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED
|
||||||
from .utils import api_process, api_process_raw, api_validate, json_loads
|
from .utils import api_process, api_validate, json_loads
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -126,15 +130,26 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
SCHEMA_SYS_OPTIONS = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||||
|
|
||||||
|
SCHEMA_UNINSTALL = vol.Schema(
|
||||||
|
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
|
||||||
|
)
|
||||||
|
# pylint: enable=no-value-for-parameter
|
||||||
|
|
||||||
|
|
||||||
class APIAddons(CoreSysAttributes):
|
class APIAddons(CoreSysAttributes):
|
||||||
"""Handle RESTful API for add-on functions."""
|
"""Handle RESTful API for add-on functions."""
|
||||||
|
|
||||||
def _extract_addon(self, request: web.Request) -> Addon:
|
def get_addon_for_request(self, request: web.Request) -> Addon:
|
||||||
"""Return addon, throw an exception it it doesn't exist."""
|
"""Return addon, throw an exception if it doesn't exist."""
|
||||||
addon_slug: str = request.match_info.get("addon")
|
addon_slug: str = request.match_info.get("addon")
|
||||||
|
|
||||||
# Lookup itself
|
# Lookup itself
|
||||||
@@ -174,6 +189,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
ATTR_ICON: addon.with_icon,
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
ATTR_SYSTEM_MANAGED: addon.system_managed,
|
||||||
}
|
}
|
||||||
for addon in self.sys_addons.installed
|
for addon in self.sys_addons.installed
|
||||||
]
|
]
|
||||||
@@ -187,7 +203,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return add-on information."""
|
"""Return add-on information."""
|
||||||
addon: AnyAddon = self._extract_addon(request)
|
addon: AnyAddon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
@@ -202,6 +218,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_VERSION_LATEST: addon.latest_version,
|
ATTR_VERSION_LATEST: addon.latest_version,
|
||||||
ATTR_PROTECTED: addon.protected,
|
ATTR_PROTECTED: addon.protected,
|
||||||
ATTR_RATING: rating_security(addon),
|
ATTR_RATING: rating_security(addon),
|
||||||
|
ATTR_BOOT_CONFIG: addon.boot_config,
|
||||||
ATTR_BOOT: addon.boot,
|
ATTR_BOOT: addon.boot,
|
||||||
ATTR_OPTIONS: addon.options,
|
ATTR_OPTIONS: addon.options,
|
||||||
ATTR_SCHEMA: addon.schema_ui,
|
ATTR_SCHEMA: addon.schema_ui,
|
||||||
@@ -261,6 +278,8 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_WATCHDOG: addon.watchdog,
|
ATTR_WATCHDOG: addon.watchdog,
|
||||||
ATTR_DEVICES: addon.static_devices
|
ATTR_DEVICES: addon.static_devices
|
||||||
+ [device.path for device in addon.devices],
|
+ [device.path for device in addon.devices],
|
||||||
|
ATTR_SYSTEM_MANAGED: addon.system_managed,
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
|
||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@@ -268,7 +287,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def options(self, request: web.Request) -> None:
|
async def options(self, request: web.Request) -> None:
|
||||||
"""Store user options for add-on."""
|
"""Store user options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
# Update secrets for validation
|
# Update secrets for validation
|
||||||
await self.sys_homeassistant.secrets.reload()
|
await self.sys_homeassistant.secrets.reload()
|
||||||
@@ -283,6 +302,10 @@ class APIAddons(CoreSysAttributes):
|
|||||||
if ATTR_OPTIONS in body:
|
if ATTR_OPTIONS in body:
|
||||||
addon.options = body[ATTR_OPTIONS]
|
addon.options = body[ATTR_OPTIONS]
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
|
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
|
||||||
|
raise APIError(
|
||||||
|
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
|
||||||
|
)
|
||||||
addon.boot = body[ATTR_BOOT]
|
addon.boot = body[ATTR_BOOT]
|
||||||
if ATTR_AUTO_UPDATE in body:
|
if ATTR_AUTO_UPDATE in body:
|
||||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||||
@@ -300,10 +323,24 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
addon.save_persist()
|
addon.save_persist()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def sys_options(self, request: web.Request) -> None:
|
||||||
|
"""Store system options for an add-on."""
|
||||||
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
|
# Validate/Process Body
|
||||||
|
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
|
||||||
|
if ATTR_SYSTEM_MANAGED in body:
|
||||||
|
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
|
||||||
|
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
|
||||||
|
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
|
||||||
|
|
||||||
|
addon.save_persist()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def options_validate(self, request: web.Request) -> None:
|
async def options_validate(self, request: web.Request) -> None:
|
||||||
"""Validate user options for add-on."""
|
"""Validate user options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||||
|
|
||||||
options = await request.json(loads=json_loads) or addon.options
|
options = await request.json(loads=json_loads) or addon.options
|
||||||
@@ -345,7 +382,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
slug: str = request.match_info.get("addon")
|
slug: str = request.match_info.get("addon")
|
||||||
if slug != "self":
|
if slug != "self":
|
||||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
# Lookup/reload secrets
|
# Lookup/reload secrets
|
||||||
await self.sys_homeassistant.secrets.reload()
|
await self.sys_homeassistant.secrets.reload()
|
||||||
@@ -357,7 +394,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def security(self, request: web.Request) -> None:
|
async def security(self, request: web.Request) -> None:
|
||||||
"""Store security options for add-on."""
|
"""Store security options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||||
|
|
||||||
if ATTR_PROTECTED in body:
|
if ATTR_PROTECTED in body:
|
||||||
@@ -369,7 +406,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return resource information."""
|
"""Return resource information."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
stats: DockerStats = await addon.stats()
|
stats: DockerStats = await addon.stats()
|
||||||
|
|
||||||
@@ -385,48 +422,47 @@ class APIAddons(CoreSysAttributes):
|
|||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
async def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Uninstall add-on."""
|
"""Uninstall add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
return asyncio.shield(addon.uninstall())
|
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
|
||||||
|
return await asyncio.shield(
|
||||||
|
self.sys_addons.uninstall(
|
||||||
|
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def start(self, request: web.Request) -> None:
|
async def start(self, request: web.Request) -> None:
|
||||||
"""Start add-on."""
|
"""Start add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(addon.start()):
|
if start_task := await asyncio.shield(addon.start()):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop add-on."""
|
"""Stop add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
return asyncio.shield(addon.stop())
|
return asyncio.shield(addon.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request: web.Request) -> None:
|
async def restart(self, request: web.Request) -> None:
|
||||||
"""Restart add-on."""
|
"""Restart add-on."""
|
||||||
addon: Addon = self._extract_addon(request)
|
addon: Addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(addon.restart()):
|
if start_task := await asyncio.shield(addon.restart()):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def rebuild(self, request: web.Request) -> None:
|
async def rebuild(self, request: web.Request) -> None:
|
||||||
"""Rebuild local build add-on."""
|
"""Rebuild local build add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(addon.rebuild()):
|
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return logs from add-on."""
|
|
||||||
addon = self._extract_addon(request)
|
|
||||||
return addon.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stdin(self, request: web.Request) -> None:
|
async def stdin(self, request: web.Request) -> None:
|
||||||
"""Write to stdin of add-on."""
|
"""Write to stdin of add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if not addon.with_stdin:
|
if not addon.with_stdin:
|
||||||
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Audio RESTful API."""
|
"""Init file for Supervisor Audio RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
@@ -35,8 +36,7 @@ from ..coresys import CoreSysAttributes
|
|||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..host.sound import StreamType
|
from ..host.sound import StreamType
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .utils import api_process, api_validate
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -111,11 +111,6 @@ class APIAudio(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.audio.update(version))
|
await asyncio.shield(self.sys_plugins.audio.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return Audio Docker logs."""
|
|
||||||
return self.sys_plugins.audio.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Audio plugin."""
|
"""Restart Audio plugin."""
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import BasicAuth, web
|
from aiohttp import BasicAuth, web
|
||||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||||
@@ -8,10 +10,19 @@ from aiohttp.web_exceptions import HTTPUnauthorized
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIForbidden
|
from ..exceptions import APIForbidden
|
||||||
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
from ..utils.json import json_loads
|
||||||
|
from .const import (
|
||||||
|
ATTR_GROUP_IDS,
|
||||||
|
ATTR_IS_ACTIVE,
|
||||||
|
ATTR_IS_OWNER,
|
||||||
|
ATTR_LOCAL_ONLY,
|
||||||
|
ATTR_USERS,
|
||||||
|
CONTENT_TYPE_JSON,
|
||||||
|
CONTENT_TYPE_URL,
|
||||||
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -67,7 +78,7 @@ class APIAuth(CoreSysAttributes):
|
|||||||
|
|
||||||
# Json
|
# Json
|
||||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||||
data = await request.json()
|
data = await request.json(loads=json_loads)
|
||||||
return await self._process_dict(request, addon, data)
|
return await self._process_dict(request, addon, data)
|
||||||
|
|
||||||
# URL encoded
|
# URL encoded
|
||||||
@@ -89,3 +100,21 @@ class APIAuth(CoreSysAttributes):
|
|||||||
async def cache(self, request: web.Request) -> None:
|
async def cache(self, request: web.Request) -> None:
|
||||||
"""Process cache reset request."""
|
"""Process cache reset request."""
|
||||||
self.sys_auth.reset_data()
|
self.sys_auth.reset_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
||||||
|
"""List users on the Home Assistant instance."""
|
||||||
|
return {
|
||||||
|
ATTR_USERS: [
|
||||||
|
{
|
||||||
|
ATTR_USERNAME: user[ATTR_USERNAME],
|
||||||
|
ATTR_NAME: user[ATTR_NAME],
|
||||||
|
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
|
||||||
|
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
|
||||||
|
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
|
||||||
|
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
|
||||||
|
}
|
||||||
|
for user in await self.sys_auth.list_users()
|
||||||
|
if user[ATTR_USERNAME]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
"""Backups RESTful API."""
|
"""Backups RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Callable
|
||||||
|
import errno
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
@@ -10,6 +13,7 @@ from aiohttp import web
|
|||||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..backups.backup import Backup
|
||||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
@@ -20,6 +24,7 @@ from ..const import (
|
|||||||
ATTR_DAYS_UNTIL_STALE,
|
ATTR_DAYS_UNTIL_STALE,
|
||||||
ATTR_FOLDERS,
|
ATTR_FOLDERS,
|
||||||
ATTR_HOMEASSISTANT,
|
ATTR_HOMEASSISTANT,
|
||||||
|
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
||||||
ATTR_LOCATON,
|
ATTR_LOCATON,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_PASSWORD,
|
ATTR_PASSWORD,
|
||||||
@@ -28,13 +33,18 @@ from ..const import (
|
|||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
ATTR_SUPERVISOR_VERSION,
|
ATTR_SUPERVISOR_VERSION,
|
||||||
|
ATTR_TIMEOUT,
|
||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
BusEvent,
|
||||||
|
CoreState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
|
from ..jobs import JobSchedulerOptions
|
||||||
from ..mounts.const import MountUsage
|
from ..mounts.const import MountUsage
|
||||||
from .const import CONTENT_TYPE_TAR
|
from ..resolution.const import UnhealthyReason
|
||||||
|
from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -46,23 +56,29 @@ RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
|||||||
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_RESTORE_PARTIAL = vol.Schema(
|
SCHEMA_RESTORE_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
|
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||||
|
{
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
|
|
||||||
|
|
||||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_NAME): str,
|
vol.Optional(ATTR_NAME): str,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
||||||
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
|
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -80,6 +96,12 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
SCHEMA_FREEZE = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class APIBackups(CoreSysAttributes):
|
class APIBackups(CoreSysAttributes):
|
||||||
"""Handle RESTful API for backups functions."""
|
"""Handle RESTful API for backups functions."""
|
||||||
@@ -142,7 +164,7 @@ class APIBackups(CoreSysAttributes):
|
|||||||
self.sys_backups.save_data()
|
self.sys_backups.save_data()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request):
|
async def reload(self, _):
|
||||||
"""Reload backup list."""
|
"""Reload backup list."""
|
||||||
await asyncio.shield(self.sys_backups.reload())
|
await asyncio.shield(self.sys_backups.reload())
|
||||||
return True
|
return True
|
||||||
@@ -177,6 +199,7 @@ class APIBackups(CoreSysAttributes):
|
|||||||
ATTR_ADDONS: data_addons,
|
ATTR_ADDONS: data_addons,
|
||||||
ATTR_REPOSITORIES: backup.repositories,
|
ATTR_REPOSITORIES: backup.repositories,
|
||||||
ATTR_FOLDERS: backup.folders,
|
ATTR_FOLDERS: backup.folders,
|
||||||
|
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
|
||||||
}
|
}
|
||||||
|
|
||||||
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
||||||
@@ -192,46 +215,120 @@ class APIBackups(CoreSysAttributes):
|
|||||||
|
|
||||||
return body
|
return body
|
||||||
|
|
||||||
|
async def _background_backup_task(
|
||||||
|
self, backup_method: Callable, *args, **kwargs
|
||||||
|
) -> tuple[asyncio.Task, str]:
|
||||||
|
"""Start backup task in background and return task and job ID."""
|
||||||
|
event = asyncio.Event()
|
||||||
|
job, backup_task = self.sys_jobs.schedule_job(
|
||||||
|
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
async def release_on_freeze(new_state: CoreState):
|
||||||
|
if new_state == CoreState.FREEZE:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
# Wait for system to get into freeze state before returning
|
||||||
|
# If the backup fails validation it will raise before getting there
|
||||||
|
listener = self.sys_bus.register_event(
|
||||||
|
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
await asyncio.wait(
|
||||||
|
(
|
||||||
|
backup_task,
|
||||||
|
self.sys_create_task(event.wait()),
|
||||||
|
),
|
||||||
|
return_when=asyncio.FIRST_COMPLETED,
|
||||||
|
)
|
||||||
|
return (backup_task, job.uuid)
|
||||||
|
finally:
|
||||||
|
self.sys_bus.remove_listener(listener)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_full(self, request):
|
async def backup_full(self, request):
|
||||||
"""Create full backup."""
|
"""Create full backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
backup = await asyncio.shield(
|
backup_task, job_id = await self._background_backup_task(
|
||||||
self.sys_backups.do_backup_full(**self._location_to_mount(body))
|
self.sys_backups.do_backup_full, **self._location_to_mount(body)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if background and not backup_task.done():
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
|
||||||
|
backup: Backup = await backup_task
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||||
return False
|
raise APIError(
|
||||||
|
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_partial(self, request):
|
async def backup_partial(self, request):
|
||||||
"""Create a partial backup."""
|
"""Create a partial backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||||
backup = await asyncio.shield(
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
self.sys_backups.do_backup_partial(**self._location_to_mount(body))
|
backup_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_backup_partial, **self._location_to_mount(body)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if background and not backup_task.done():
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
|
||||||
|
backup: Backup = await backup_task
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||||
return False
|
raise APIError(
|
||||||
|
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_full(self, request):
|
async def restore_full(self, request):
|
||||||
"""Full restore of a backup."""
|
"""Full restore of a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
|
restore_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_restore_full, backup, **body
|
||||||
|
)
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
|
if background and not restore_task.done() or await restore_task:
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
raise APIError(
|
||||||
|
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_partial(self, request):
|
async def restore_partial(self, request):
|
||||||
"""Partial restore a backup."""
|
"""Partial restore a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
|
restore_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_restore_partial, backup, **body
|
||||||
|
)
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
if background and not restore_task.done() or await restore_task:
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
raise APIError(
|
||||||
|
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def freeze(self, request):
|
||||||
|
"""Initiate manual freeze for external backup."""
|
||||||
|
body = await api_validate(SCHEMA_FREEZE, request)
|
||||||
|
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def thaw(self, request):
|
||||||
|
"""Begin thaw after manual freeze."""
|
||||||
|
await self.sys_backups.thaw_all()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove(self, request):
|
async def remove(self, request):
|
||||||
@@ -246,9 +343,9 @@ class APIBackups(CoreSysAttributes):
|
|||||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||||
response = web.FileResponse(backup.tarfile)
|
response = web.FileResponse(backup.tarfile)
|
||||||
response.content_type = CONTENT_TYPE_TAR
|
response.content_type = CONTENT_TYPE_TAR
|
||||||
response.headers[
|
response.headers[CONTENT_DISPOSITION] = (
|
||||||
CONTENT_DISPOSITION
|
f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||||
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -267,6 +364,8 @@ class APIBackups(CoreSysAttributes):
|
|||||||
backup.write(chunk)
|
backup.write(chunk)
|
||||||
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
|
if err.errno == errno.EBADMSG:
|
||||||
|
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
_LOGGER.error("Can't write new backup file: %s", err)
|
_LOGGER.error("Can't write new backup file: %s", err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor HA cli RESTful API."""
|
"""Init file for Supervisor HA cli RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
"""Const for API."""
|
"""Const for API."""
|
||||||
|
|
||||||
|
from enum import StrEnum
|
||||||
|
|
||||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
CONTENT_TYPE_BINARY = "application/octet-stream"
|
||||||
CONTENT_TYPE_JSON = "application/json"
|
CONTENT_TYPE_JSON = "application/json"
|
||||||
CONTENT_TYPE_PNG = "image/png"
|
CONTENT_TYPE_PNG = "image/png"
|
||||||
CONTENT_TYPE_TAR = "application/tar"
|
CONTENT_TYPE_TAR = "application/tar"
|
||||||
CONTENT_TYPE_TEXT = "text/plain"
|
CONTENT_TYPE_TEXT = "text/plain"
|
||||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||||
|
CONTENT_TYPE_X_LOG = "text/x-log"
|
||||||
|
|
||||||
COOKIE_INGRESS = "ingress_session"
|
COOKIE_INGRESS = "ingress_session"
|
||||||
|
|
||||||
@@ -13,6 +16,10 @@ ATTR_AGENT_VERSION = "agent_version"
|
|||||||
ATTR_APPARMOR_VERSION = "apparmor_version"
|
ATTR_APPARMOR_VERSION = "apparmor_version"
|
||||||
ATTR_ATTRIBUTES = "attributes"
|
ATTR_ATTRIBUTES = "attributes"
|
||||||
ATTR_AVAILABLE_UPDATES = "available_updates"
|
ATTR_AVAILABLE_UPDATES = "available_updates"
|
||||||
|
ATTR_BACKGROUND = "background"
|
||||||
|
ATTR_BOOT_CONFIG = "boot_config"
|
||||||
|
ATTR_BOOT_SLOT = "boot_slot"
|
||||||
|
ATTR_BOOT_SLOTS = "boot_slots"
|
||||||
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
||||||
ATTR_BOOTS = "boots"
|
ATTR_BOOTS = "boots"
|
||||||
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
||||||
@@ -23,7 +30,6 @@ ATTR_CONNECTION_BUS = "connection_bus"
|
|||||||
ATTR_DATA_DISK = "data_disk"
|
ATTR_DATA_DISK = "data_disk"
|
||||||
ATTR_DEVICE = "device"
|
ATTR_DEVICE = "device"
|
||||||
ATTR_DEV_PATH = "dev_path"
|
ATTR_DEV_PATH = "dev_path"
|
||||||
ATTR_DISK_LED = "disk_led"
|
|
||||||
ATTR_DISKS = "disks"
|
ATTR_DISKS = "disks"
|
||||||
ATTR_DRIVES = "drives"
|
ATTR_DRIVES = "drives"
|
||||||
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
||||||
@@ -31,25 +37,43 @@ ATTR_DT_UTC = "dt_utc"
|
|||||||
ATTR_EJECTABLE = "ejectable"
|
ATTR_EJECTABLE = "ejectable"
|
||||||
ATTR_FALLBACK = "fallback"
|
ATTR_FALLBACK = "fallback"
|
||||||
ATTR_FILESYSTEMS = "filesystems"
|
ATTR_FILESYSTEMS = "filesystems"
|
||||||
ATTR_HEARTBEAT_LED = "heartbeat_led"
|
ATTR_FORCE = "force"
|
||||||
|
ATTR_GROUP_IDS = "group_ids"
|
||||||
ATTR_IDENTIFIERS = "identifiers"
|
ATTR_IDENTIFIERS = "identifiers"
|
||||||
|
ATTR_IS_ACTIVE = "is_active"
|
||||||
|
ATTR_IS_OWNER = "is_owner"
|
||||||
|
ATTR_JOB_ID = "job_id"
|
||||||
|
ATTR_JOBS = "jobs"
|
||||||
ATTR_LLMNR = "llmnr"
|
ATTR_LLMNR = "llmnr"
|
||||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||||
|
ATTR_LOCAL_ONLY = "local_only"
|
||||||
ATTR_MDNS = "mdns"
|
ATTR_MDNS = "mdns"
|
||||||
ATTR_MODEL = "model"
|
ATTR_MODEL = "model"
|
||||||
ATTR_MOUNTS = "mounts"
|
ATTR_MOUNTS = "mounts"
|
||||||
ATTR_MOUNT_POINTS = "mount_points"
|
ATTR_MOUNT_POINTS = "mount_points"
|
||||||
ATTR_PANEL_PATH = "panel_path"
|
ATTR_PANEL_PATH = "panel_path"
|
||||||
ATTR_POWER_LED = "power_led"
|
|
||||||
ATTR_REMOVABLE = "removable"
|
ATTR_REMOVABLE = "removable"
|
||||||
|
ATTR_REMOVE_CONFIG = "remove_config"
|
||||||
ATTR_REVISION = "revision"
|
ATTR_REVISION = "revision"
|
||||||
|
ATTR_SAFE_MODE = "safe_mode"
|
||||||
ATTR_SEAT = "seat"
|
ATTR_SEAT = "seat"
|
||||||
ATTR_SIGNED = "signed"
|
ATTR_SIGNED = "signed"
|
||||||
ATTR_STARTUP_TIME = "startup_time"
|
ATTR_STARTUP_TIME = "startup_time"
|
||||||
|
ATTR_STATUS = "status"
|
||||||
ATTR_SUBSYSTEM = "subsystem"
|
ATTR_SUBSYSTEM = "subsystem"
|
||||||
ATTR_SYSFS = "sysfs"
|
ATTR_SYSFS = "sysfs"
|
||||||
|
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
||||||
ATTR_TIME_DETECTED = "time_detected"
|
ATTR_TIME_DETECTED = "time_detected"
|
||||||
ATTR_UPDATE_TYPE = "update_type"
|
ATTR_UPDATE_TYPE = "update_type"
|
||||||
ATTR_USE_NTP = "use_ntp"
|
|
||||||
ATTR_USAGE = "usage"
|
ATTR_USAGE = "usage"
|
||||||
|
ATTR_USE_NTP = "use_ntp"
|
||||||
|
ATTR_USERS = "users"
|
||||||
ATTR_VENDOR = "vendor"
|
ATTR_VENDOR = "vendor"
|
||||||
|
ATTR_VIRTUALIZATION = "virtualization"
|
||||||
|
|
||||||
|
|
||||||
|
class BootSlot(StrEnum):
|
||||||
|
"""Boot slots used by HAOS."""
|
||||||
|
|
||||||
|
A = "A"
|
||||||
|
B = "B"
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor network RESTful API."""
|
"""Init file for Supervisor network RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -15,7 +16,6 @@ from ..const import (
|
|||||||
AddonState,
|
AddonState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..discovery.validate import valid_discovery_service
|
|
||||||
from ..exceptions import APIError, APIForbidden
|
from ..exceptions import APIError, APIForbidden
|
||||||
from .utils import api_process, api_validate, require_home_assistant
|
from .utils import api_process, api_validate, require_home_assistant
|
||||||
|
|
||||||
@@ -24,7 +24,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
SCHEMA_DISCOVERY = vol.Schema(
|
SCHEMA_DISCOVERY = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_SERVICE): str,
|
vol.Required(ATTR_SERVICE): str,
|
||||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
vol.Required(ATTR_CONFIG): dict,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -71,15 +71,6 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
addon: Addon = request[REQUEST_FROM]
|
addon: Addon = request[REQUEST_FROM]
|
||||||
service = body[ATTR_SERVICE]
|
service = body[ATTR_SERVICE]
|
||||||
|
|
||||||
try:
|
|
||||||
valid_discovery_service(service)
|
|
||||||
except vol.Invalid:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on",
|
|
||||||
service,
|
|
||||||
addon.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Access?
|
# Access?
|
||||||
if body[ATTR_SERVICE] not in addon.discovery:
|
if body[ATTR_SERVICE] not in addon.discovery:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor DNS RESTful API."""
|
"""Init file for Supervisor DNS RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -26,8 +27,8 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import dns_server_list, version_tag
|
from ..validate import dns_server_list, version_tag
|
||||||
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
|
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -105,11 +106,6 @@ class APICoreDNS(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.dns.update(version))
|
await asyncio.shield(self.sys_plugins.dns.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return DNS Docker logs."""
|
|
||||||
return self.sys_plugins.dns.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart CoreDNS plugin."""
|
"""Restart CoreDNS plugin."""
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor hardware RESTful API."""
|
"""Init file for Supervisor hardware RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -16,7 +17,7 @@ from ..const import (
|
|||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..dbus.udisks2 import UDisks2
|
from ..dbus.udisks2 import UDisks2Manager
|
||||||
from ..dbus.udisks2.block import UDisks2Block
|
from ..dbus.udisks2.block import UDisks2Block
|
||||||
from ..dbus.udisks2.drive import UDisks2Drive
|
from ..dbus.udisks2.drive import UDisks2Drive
|
||||||
from ..hardware.data import Device
|
from ..hardware.data import Device
|
||||||
@@ -72,7 +73,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
|
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
|
||||||
"""Return a dict with information of a disk to be used in the API."""
|
"""Return a dict with information of a disk to be used in the API."""
|
||||||
return {
|
return {
|
||||||
ATTR_VENDOR: drive.vendor,
|
ATTR_VENDOR: drive.vendor,
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -12,6 +13,7 @@ from ..const import (
|
|||||||
ATTR_AUDIO_INPUT,
|
ATTR_AUDIO_INPUT,
|
||||||
ATTR_AUDIO_OUTPUT,
|
ATTR_AUDIO_OUTPUT,
|
||||||
ATTR_BACKUP,
|
ATTR_BACKUP,
|
||||||
|
ATTR_BACKUPS_EXCLUDE_DATABASE,
|
||||||
ATTR_BLK_READ,
|
ATTR_BLK_READ,
|
||||||
ATTR_BLK_WRITE,
|
ATTR_BLK_WRITE,
|
||||||
ATTR_BOOT,
|
ATTR_BOOT,
|
||||||
@@ -33,10 +35,10 @@ from ..const import (
|
|||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIDBMigrationInProgress, APIError
|
||||||
from ..validate import docker_image, network_port, version_tag
|
from ..validate import docker_image, network_port, version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .const import ATTR_FORCE, ATTR_SAFE_MODE
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -51,6 +53,7 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
|
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||||
|
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -61,10 +64,34 @@ SCHEMA_UPDATE = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
SCHEMA_RESTART = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_STOP = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class APIHomeAssistant(CoreSysAttributes):
|
class APIHomeAssistant(CoreSysAttributes):
|
||||||
"""Handle RESTful API for Home Assistant functions."""
|
"""Handle RESTful API for Home Assistant functions."""
|
||||||
|
|
||||||
|
async def _check_offline_migration(self, force: bool = False) -> None:
|
||||||
|
"""Check and raise if there's an offline DB migration in progress."""
|
||||||
|
if (
|
||||||
|
not force
|
||||||
|
and (state := await self.sys_homeassistant.api.get_api_state())
|
||||||
|
and state.offline_db_migration
|
||||||
|
):
|
||||||
|
raise APIDBMigrationInProgress(
|
||||||
|
"Offline database migration in progress, try again after it has completed"
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
@@ -82,6 +109,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||||
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
||||||
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
||||||
|
ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -91,6 +119,9 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
|
|
||||||
if ATTR_IMAGE in body:
|
if ATTR_IMAGE in body:
|
||||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||||
|
self.sys_homeassistant.override_image = (
|
||||||
|
self.sys_homeassistant.image != self.sys_homeassistant.default_image
|
||||||
|
)
|
||||||
|
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||||
@@ -113,6 +144,11 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
if ATTR_AUDIO_OUTPUT in body:
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
|
||||||
|
if ATTR_BACKUPS_EXCLUDE_DATABASE in body:
|
||||||
|
self.sys_homeassistant.backups_exclude_database = body[
|
||||||
|
ATTR_BACKUPS_EXCLUDE_DATABASE
|
||||||
|
]
|
||||||
|
|
||||||
self.sys_homeassistant.save_data()
|
self.sys_homeassistant.save_data()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -137,6 +173,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
async def update(self, request: web.Request) -> None:
|
async def update(self, request: web.Request) -> None:
|
||||||
"""Update Home Assistant."""
|
"""Update Home Assistant."""
|
||||||
body = await api_validate(SCHEMA_UPDATE, request)
|
body = await api_validate(SCHEMA_UPDATE, request)
|
||||||
|
await self._check_offline_migration()
|
||||||
|
|
||||||
await asyncio.shield(
|
await asyncio.shield(
|
||||||
self.sys_homeassistant.core.update(
|
self.sys_homeassistant.core.update(
|
||||||
@@ -146,9 +183,12 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
async def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop Home Assistant."""
|
"""Stop Home Assistant."""
|
||||||
return asyncio.shield(self.sys_homeassistant.core.stop())
|
body = await api_validate(SCHEMA_STOP, request)
|
||||||
|
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
|
return await asyncio.shield(self.sys_homeassistant.core.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request: web.Request) -> Awaitable[None]:
|
def start(self, request: web.Request) -> Awaitable[None]:
|
||||||
@@ -156,19 +196,24 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
return asyncio.shield(self.sys_homeassistant.core.start())
|
return asyncio.shield(self.sys_homeassistant.core.start())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
async def restart(self, request: web.Request) -> None:
|
||||||
"""Restart Home Assistant."""
|
"""Restart Home Assistant."""
|
||||||
return asyncio.shield(self.sys_homeassistant.core.restart())
|
body = await api_validate(SCHEMA_RESTART, request)
|
||||||
|
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
|
await asyncio.shield(
|
||||||
|
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
async def rebuild(self, request: web.Request) -> None:
|
||||||
"""Rebuild Home Assistant."""
|
"""Rebuild Home Assistant."""
|
||||||
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
body = await api_validate(SCHEMA_RESTART, request)
|
||||||
|
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
await asyncio.shield(
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
|
||||||
"""Return Home Assistant Docker logs."""
|
)
|
||||||
return self.sys_homeassistant.core.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def check(self, request: web.Request) -> None:
|
async def check(self, request: web.Request) -> None:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor host RESTful API."""
|
"""Init file for Supervisor host RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
@@ -27,8 +28,15 @@ from ..const import (
|
|||||||
ATTR_TIMEZONE,
|
ATTR_TIMEZONE,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, HostLogError
|
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError
|
||||||
from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER
|
from ..host.const import (
|
||||||
|
PARAM_BOOT_ID,
|
||||||
|
PARAM_FOLLOW,
|
||||||
|
PARAM_SYSLOG_IDENTIFIER,
|
||||||
|
LogFormat,
|
||||||
|
LogFormatter,
|
||||||
|
)
|
||||||
|
from ..utils.systemd_journal import journal_logs_reader
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_AGENT_VERSION,
|
ATTR_AGENT_VERSION,
|
||||||
ATTR_APPARMOR_VERSION,
|
ATTR_APPARMOR_VERSION,
|
||||||
@@ -38,26 +46,48 @@ from .const import (
|
|||||||
ATTR_BROADCAST_MDNS,
|
ATTR_BROADCAST_MDNS,
|
||||||
ATTR_DT_SYNCHRONIZED,
|
ATTR_DT_SYNCHRONIZED,
|
||||||
ATTR_DT_UTC,
|
ATTR_DT_UTC,
|
||||||
|
ATTR_FORCE,
|
||||||
ATTR_IDENTIFIERS,
|
ATTR_IDENTIFIERS,
|
||||||
ATTR_LLMNR_HOSTNAME,
|
ATTR_LLMNR_HOSTNAME,
|
||||||
ATTR_STARTUP_TIME,
|
ATTR_STARTUP_TIME,
|
||||||
ATTR_USE_NTP,
|
ATTR_USE_NTP,
|
||||||
|
ATTR_VIRTUALIZATION,
|
||||||
CONTENT_TYPE_TEXT,
|
CONTENT_TYPE_TEXT,
|
||||||
|
CONTENT_TYPE_X_LOG,
|
||||||
)
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
IDENTIFIER = "identifier"
|
IDENTIFIER = "identifier"
|
||||||
BOOTID = "bootid"
|
BOOTID = "bootid"
|
||||||
DEFAULT_RANGE = 100
|
DEFAULT_LINES = 100
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_SHUTDOWN = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# pylint: enable=no-value-for-parameter
|
||||||
|
|
||||||
|
|
||||||
class APIHost(CoreSysAttributes):
|
class APIHost(CoreSysAttributes):
|
||||||
"""Handle RESTful API for host functions."""
|
"""Handle RESTful API for host functions."""
|
||||||
|
|
||||||
|
async def _check_ha_offline_migration(self, force: bool) -> None:
|
||||||
|
"""Check if HA has an offline migration in progress and raise if not forced."""
|
||||||
|
if (
|
||||||
|
not force
|
||||||
|
and (state := await self.sys_homeassistant.api.get_api_state())
|
||||||
|
and state.offline_db_migration
|
||||||
|
):
|
||||||
|
raise APIDBMigrationInProgress(
|
||||||
|
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
@@ -65,6 +95,7 @@ class APIHost(CoreSysAttributes):
|
|||||||
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
||||||
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
||||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||||
|
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
|
||||||
ATTR_CPE: self.sys_host.info.cpe,
|
ATTR_CPE: self.sys_host.info.cpe,
|
||||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||||
@@ -98,14 +129,20 @@ class APIHost(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def reboot(self, request):
|
async def reboot(self, request):
|
||||||
"""Reboot host."""
|
"""Reboot host."""
|
||||||
return asyncio.shield(self.sys_host.control.reboot())
|
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
||||||
|
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
|
return await asyncio.shield(self.sys_host.control.reboot())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def shutdown(self, request):
|
async def shutdown(self, request):
|
||||||
"""Poweroff host."""
|
"""Poweroff host."""
|
||||||
return asyncio.shield(self.sys_host.control.shutdown())
|
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
||||||
|
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
|
return await asyncio.shield(self.sys_host.control.shutdown())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def reload(self, request):
|
def reload(self, request):
|
||||||
@@ -153,11 +190,11 @@ class APIHost(CoreSysAttributes):
|
|||||||
raise APIError() from err
|
raise APIError() from err
|
||||||
return possible_offset
|
return possible_offset
|
||||||
|
|
||||||
@api_process
|
async def advanced_logs_handler(
|
||||||
async def advanced_logs(
|
|
||||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||||
) -> web.StreamResponse:
|
) -> web.StreamResponse:
|
||||||
"""Return systemd-journald logs."""
|
"""Return systemd-journald logs."""
|
||||||
|
log_formatter = LogFormatter.PLAIN
|
||||||
params = {}
|
params = {}
|
||||||
if identifier:
|
if identifier:
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
||||||
@@ -165,6 +202,8 @@ class APIHost(CoreSysAttributes):
|
|||||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
||||||
else:
|
else:
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
||||||
|
# host logs should be always verbose, no matter what Accept header is used
|
||||||
|
log_formatter = LogFormatter.VERBOSE
|
||||||
|
|
||||||
if BOOTID in request.match_info:
|
if BOOTID in request.match_info:
|
||||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
||||||
@@ -175,28 +214,62 @@ class APIHost(CoreSysAttributes):
|
|||||||
|
|
||||||
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
||||||
CONTENT_TYPE_TEXT,
|
CONTENT_TYPE_TEXT,
|
||||||
|
CONTENT_TYPE_X_LOG,
|
||||||
"*/*",
|
"*/*",
|
||||||
]:
|
]:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
"Invalid content type requested. Only text/plain supported for now."
|
"Invalid content type requested. Only text/plain and text/x-log "
|
||||||
|
"supported for now."
|
||||||
)
|
)
|
||||||
|
|
||||||
if RANGE in request.headers:
|
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
|
||||||
|
log_formatter = LogFormatter.VERBOSE
|
||||||
|
|
||||||
|
if "lines" in request.query:
|
||||||
|
lines = request.query.get("lines", DEFAULT_LINES)
|
||||||
|
try:
|
||||||
|
lines = int(lines)
|
||||||
|
except ValueError:
|
||||||
|
# If the user passed a non-integer value, just use the default instead of error.
|
||||||
|
lines = DEFAULT_LINES
|
||||||
|
finally:
|
||||||
|
# We can't use the entries= Range header syntax to refer to the last 1 line,
|
||||||
|
# and passing 1 to the calculation below would return the 1st line of the logs
|
||||||
|
# instead. Since this is really an edge case that doesn't matter much, we'll just
|
||||||
|
# return 2 lines at minimum.
|
||||||
|
lines = max(2, lines)
|
||||||
|
# entries=cursor[[:num_skip]:num_entries]
|
||||||
|
range_header = f"entries=:-{lines-1}:{'' if follow else lines}"
|
||||||
|
elif RANGE in request.headers:
|
||||||
range_header = request.headers.get(RANGE)
|
range_header = request.headers.get(RANGE)
|
||||||
else:
|
else:
|
||||||
range_header = f"entries=:-{DEFAULT_RANGE}:"
|
range_header = (
|
||||||
|
f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}"
|
||||||
|
)
|
||||||
|
|
||||||
async with self.sys_host.logs.journald_logs(
|
async with self.sys_host.logs.journald_logs(
|
||||||
params=params, range_header=range_header
|
params=params, range_header=range_header, accept=LogFormat.JOURNAL
|
||||||
) as resp:
|
) as resp:
|
||||||
try:
|
try:
|
||||||
response = web.StreamResponse()
|
response = web.StreamResponse()
|
||||||
response.content_type = CONTENT_TYPE_TEXT
|
response.content_type = CONTENT_TYPE_TEXT
|
||||||
await response.prepare(request)
|
headers_returned = False
|
||||||
async for data in resp.content:
|
async for cursor, line in journal_logs_reader(resp, log_formatter):
|
||||||
await response.write(data)
|
if not headers_returned:
|
||||||
|
if cursor:
|
||||||
|
response.headers["X-First-Cursor"] = cursor
|
||||||
|
await response.prepare(request)
|
||||||
|
headers_returned = True
|
||||||
|
await response.write(line.encode("utf-8") + b"\n")
|
||||||
except ConnectionResetError as ex:
|
except ConnectionResetError as ex:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
"Connection reset when trying to fetch data from systemd-journald."
|
"Connection reset when trying to fetch data from systemd-journald."
|
||||||
) from ex
|
) from ex
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||||
|
async def advanced_logs(
|
||||||
|
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||||
|
) -> web.StreamResponse:
|
||||||
|
"""Return systemd-journald logs. Wrapped as standard API handler."""
|
||||||
|
return await self.advanced_logs_handler(request, identifier, follow)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Supervisor Add-on ingress service."""
|
"""Supervisor Add-on ingress service."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address
|
||||||
import logging
|
import logging
|
||||||
@@ -48,6 +49,29 @@ SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1
|
||||||
|
def must_be_empty_body(method: str, code: int) -> bool:
|
||||||
|
"""Check if a request must return an empty body."""
|
||||||
|
return (
|
||||||
|
status_code_must_be_empty_body(code)
|
||||||
|
or method_must_be_empty_body(method)
|
||||||
|
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def method_must_be_empty_body(method: str) -> bool:
|
||||||
|
"""Check if a method must return an empty body."""
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
||||||
|
return method.upper() == hdrs.METH_HEAD
|
||||||
|
|
||||||
|
|
||||||
|
def status_code_must_be_empty_body(code: int) -> bool:
|
||||||
|
"""Check if a status code must return an empty body."""
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
||||||
|
return code in {204, 304} or 100 <= code < 200
|
||||||
|
|
||||||
|
|
||||||
class APIIngress(CoreSysAttributes):
|
class APIIngress(CoreSysAttributes):
|
||||||
"""Ingress view to handle add-on webui routing."""
|
"""Ingress view to handle add-on webui routing."""
|
||||||
|
|
||||||
@@ -225,10 +249,18 @@ class APIIngress(CoreSysAttributes):
|
|||||||
skip_auto_headers={hdrs.CONTENT_TYPE},
|
skip_auto_headers={hdrs.CONTENT_TYPE},
|
||||||
) as result:
|
) as result:
|
||||||
headers = _response_header(result)
|
headers = _response_header(result)
|
||||||
|
# Avoid parsing content_type in simple cases for better performance
|
||||||
|
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
|
||||||
|
content_type = (maybe_content_type.partition(";"))[0].strip()
|
||||||
|
else:
|
||||||
|
content_type = result.content_type
|
||||||
# Simple request
|
# Simple request
|
||||||
if (
|
if (
|
||||||
hdrs.CONTENT_LENGTH in result.headers
|
# empty body responses should not be streamed,
|
||||||
|
# otherwise aiohttp < 3.9.0 may generate
|
||||||
|
# an invalid "0\r\n\r\n" chunk instead of an empty response.
|
||||||
|
must_be_empty_body(request.method, result.status)
|
||||||
|
or hdrs.CONTENT_LENGTH in result.headers
|
||||||
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||||
):
|
):
|
||||||
# Return Response
|
# Return Response
|
||||||
@@ -236,13 +268,13 @@ class APIIngress(CoreSysAttributes):
|
|||||||
return web.Response(
|
return web.Response(
|
||||||
headers=headers,
|
headers=headers,
|
||||||
status=result.status,
|
status=result.status,
|
||||||
content_type=result.content_type,
|
content_type=content_type,
|
||||||
body=body,
|
body=body,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Stream response
|
# Stream response
|
||||||
response = web.StreamResponse(status=result.status, headers=headers)
|
response = web.StreamResponse(status=result.status, headers=headers)
|
||||||
response.content_type = result.content_type
|
response.content_type = content_type
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
@@ -282,8 +314,10 @@ def _init_header(
|
|||||||
|
|
||||||
if session_data is not None:
|
if session_data is not None:
|
||||||
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
|
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
|
||||||
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
|
if session_data.user.username is not None:
|
||||||
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
|
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
|
||||||
|
if session_data.user.display_name is not None:
|
||||||
|
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
|
||||||
|
|
||||||
# filter flags
|
# filter flags
|
||||||
for name, value in request.headers.items():
|
for name, value in request.headers.items():
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Jobs RESTful API."""
|
"""Init file for Supervisor Jobs RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -6,7 +7,10 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIError
|
||||||
|
from ..jobs import SupervisorJob
|
||||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||||
|
from .const import ATTR_JOBS
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -19,11 +23,47 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
class APIJobs(CoreSysAttributes):
|
class APIJobs(CoreSysAttributes):
|
||||||
"""Handle RESTful API for OS functions."""
|
"""Handle RESTful API for OS functions."""
|
||||||
|
|
||||||
|
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
||||||
|
"""Return current job tree."""
|
||||||
|
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
||||||
|
for job in self.sys_jobs.jobs:
|
||||||
|
if job.internal:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if job.parent_id not in jobs_by_parent:
|
||||||
|
jobs_by_parent[job.parent_id] = [job]
|
||||||
|
else:
|
||||||
|
jobs_by_parent[job.parent_id].append(job)
|
||||||
|
|
||||||
|
job_list: list[dict[str, Any]] = []
|
||||||
|
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
||||||
|
[(job_list, start)]
|
||||||
|
if start
|
||||||
|
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
|
||||||
|
)
|
||||||
|
|
||||||
|
while queue:
|
||||||
|
(current_list, current_job) = queue.pop(0)
|
||||||
|
child_jobs: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
# We remove parent_id and instead use that info to represent jobs as a tree
|
||||||
|
job_dict = current_job.as_dict() | {"child_jobs": child_jobs}
|
||||||
|
job_dict.pop("parent_id")
|
||||||
|
current_list.append(job_dict)
|
||||||
|
|
||||||
|
if current_job.uuid in jobs_by_parent:
|
||||||
|
queue.extend(
|
||||||
|
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
|
||||||
|
)
|
||||||
|
|
||||||
|
return job_list
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return JobManager information."""
|
"""Return JobManager information."""
|
||||||
return {
|
return {
|
||||||
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
||||||
|
ATTR_JOBS: self._list_jobs(),
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -42,3 +82,19 @@ class APIJobs(CoreSysAttributes):
|
|||||||
async def reset(self, request: web.Request) -> None:
|
async def reset(self, request: web.Request) -> None:
|
||||||
"""Reset options for JobManager."""
|
"""Reset options for JobManager."""
|
||||||
self.sys_jobs.reset_data()
|
self.sys_jobs.reset_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||||
|
"""Get details of a job by ID."""
|
||||||
|
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||||
|
return self._list_jobs(job)[0]
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def remove_job(self, request: web.Request) -> None:
|
||||||
|
"""Remove a completed job."""
|
||||||
|
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||||
|
|
||||||
|
if not job.done:
|
||||||
|
raise APIError(f"Job {job.uuid} is not done!")
|
||||||
|
|
||||||
|
self.sys_jobs.remove_job(job)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Handle security part of this API."""
|
"""Handle security part of this API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from typing import Final
|
from typing import Final
|
||||||
@@ -8,6 +9,8 @@ from aiohttp.web import Request, RequestHandler, Response, middleware
|
|||||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
|
|
||||||
|
from supervisor.homeassistant.const import LANDINGPAGE
|
||||||
|
|
||||||
from ...addons.const import RE_SLUG
|
from ...addons.const import RE_SLUG
|
||||||
from ...const import (
|
from ...const import (
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
@@ -19,6 +22,7 @@ from ...const import (
|
|||||||
CoreState,
|
CoreState,
|
||||||
)
|
)
|
||||||
from ...coresys import CoreSys, CoreSysAttributes
|
from ...coresys import CoreSys, CoreSysAttributes
|
||||||
|
from ...utils import version_is_new_enough
|
||||||
from ..utils import api_return_error, excract_supervisor_token
|
from ..utils import api_return_error, excract_supervisor_token
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -76,6 +80,13 @@ ADDONS_API_BYPASS: Final = re.compile(
|
|||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Home Assistant only
|
||||||
|
CORE_ONLY_PATHS: Final = re.compile(
|
||||||
|
r"^(?:"
|
||||||
|
r"/addons/" + RE_SLUG + "/sys_options"
|
||||||
|
r")$"
|
||||||
|
)
|
||||||
|
|
||||||
# Policy role add-on API access
|
# Policy role add-on API access
|
||||||
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
||||||
ROLE_DEFAULT: re.compile(
|
ROLE_DEFAULT: re.compile(
|
||||||
@@ -102,6 +113,8 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
||||||
r"|/audio/.+"
|
r"|/audio/.+"
|
||||||
r"|/auth/cache"
|
r"|/auth/cache"
|
||||||
|
r"|/available_updates"
|
||||||
|
r"|/backups.*"
|
||||||
r"|/cli/.+"
|
r"|/cli/.+"
|
||||||
r"|/core/.+"
|
r"|/core/.+"
|
||||||
r"|/dns/.+"
|
r"|/dns/.+"
|
||||||
@@ -111,16 +124,17 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
r"|/hassos/.+"
|
r"|/hassos/.+"
|
||||||
r"|/homeassistant/.+"
|
r"|/homeassistant/.+"
|
||||||
r"|/host/.+"
|
r"|/host/.+"
|
||||||
|
r"|/mounts.*"
|
||||||
r"|/multicast/.+"
|
r"|/multicast/.+"
|
||||||
r"|/network/.+"
|
r"|/network/.+"
|
||||||
r"|/observer/.+"
|
r"|/observer/.+"
|
||||||
r"|/os/.+"
|
r"|/os/(?!datadisk/wipe).+"
|
||||||
|
r"|/refresh_updates"
|
||||||
r"|/resolution/.+"
|
r"|/resolution/.+"
|
||||||
r"|/backups.*"
|
r"|/security/.+"
|
||||||
r"|/snapshots.*"
|
r"|/snapshots.*"
|
||||||
r"|/store.*"
|
r"|/store.*"
|
||||||
r"|/supervisor/.+"
|
r"|/supervisor/.+"
|
||||||
r"|/security/.+"
|
|
||||||
r")$"
|
r")$"
|
||||||
),
|
),
|
||||||
ROLE_ADMIN: re.compile(
|
ROLE_ADMIN: re.compile(
|
||||||
@@ -195,7 +209,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
CoreState.FREEZE,
|
CoreState.FREEZE,
|
||||||
):
|
):
|
||||||
return api_return_error(
|
return api_return_error(
|
||||||
message=f"System is not ready with state: {self.sys_core.state.value}"
|
message=f"System is not ready with state: {self.sys_core.state}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return await handler(request)
|
return await handler(request)
|
||||||
@@ -228,6 +242,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
if supervisor_token == self.sys_homeassistant.supervisor_token:
|
if supervisor_token == self.sys_homeassistant.supervisor_token:
|
||||||
_LOGGER.debug("%s access from Home Assistant", request.path)
|
_LOGGER.debug("%s access from Home Assistant", request.path)
|
||||||
request_from = self.sys_homeassistant
|
request_from = self.sys_homeassistant
|
||||||
|
elif CORE_ONLY_PATHS.match(request.path):
|
||||||
|
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
|
||||||
|
raise HTTPForbidden()
|
||||||
|
|
||||||
# Host
|
# Host
|
||||||
if supervisor_token == self.sys_plugins.cli.supervisor_token:
|
if supervisor_token == self.sys_plugins.cli.supervisor_token:
|
||||||
@@ -275,7 +292,8 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
"""Validate user from Core API proxy."""
|
"""Validate user from Core API proxy."""
|
||||||
if (
|
if (
|
||||||
request[REQUEST_FROM] != self.sys_homeassistant
|
request[REQUEST_FROM] != self.sys_homeassistant
|
||||||
or self.sys_homeassistant.version >= _CORE_VERSION
|
or self.sys_homeassistant.version == LANDINGPAGE
|
||||||
|
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
|
||||||
):
|
):
|
||||||
return await handler(request)
|
return await handler(request)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Multicast RESTful API."""
|
"""Init file for Supervisor Multicast RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -23,8 +24,7 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .utils import api_process, api_validate
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -69,11 +69,6 @@ class APIMulticast(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return Multicast Docker logs."""
|
|
||||||
return self.sys_plugins.multicast.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Multicast plugin."""
|
"""Restart Multicast plugin."""
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
"""REST API for network."""
|
"""REST API for network."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from dataclasses import replace
|
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface
|
||||||
from ipaddress import ip_address, ip_interface
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
@@ -48,18 +48,28 @@ from ..host.configuration import (
|
|||||||
Interface,
|
Interface,
|
||||||
InterfaceMethod,
|
InterfaceMethod,
|
||||||
IpConfig,
|
IpConfig,
|
||||||
|
IpSetting,
|
||||||
VlanConfig,
|
VlanConfig,
|
||||||
WifiConfig,
|
WifiConfig,
|
||||||
)
|
)
|
||||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_SCHEMA_IP_CONFIG = vol.Schema(
|
_SCHEMA_IPV4_CONFIG = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
|
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)],
|
||||||
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
|
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address),
|
||||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
|
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
_SCHEMA_IPV6_CONFIG = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
|
||||||
|
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||||
|
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
|
||||||
|
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -76,18 +86,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_UPDATE = vol.Schema(
|
SCHEMA_UPDATE = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
|
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG,
|
||||||
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
|
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
|
||||||
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
||||||
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
|
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
|
||||||
"""Return a dict with information about ip configuration."""
|
"""Return a dict with information about ip configuration."""
|
||||||
return {
|
return {
|
||||||
ATTR_METHOD: config.method,
|
ATTR_METHOD: setting.method,
|
||||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||||
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
||||||
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
||||||
@@ -122,8 +132,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
|||||||
ATTR_CONNECTED: interface.connected,
|
ATTR_CONNECTED: interface.connected,
|
||||||
ATTR_PRIMARY: interface.primary,
|
ATTR_PRIMARY: interface.primary,
|
||||||
ATTR_MAC: interface.mac,
|
ATTR_MAC: interface.mac,
|
||||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
|
||||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
|
||||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||||
}
|
}
|
||||||
@@ -197,24 +207,26 @@ class APINetwork(CoreSysAttributes):
|
|||||||
# Apply config
|
# Apply config
|
||||||
for key, config in body.items():
|
for key, config in body.items():
|
||||||
if key == ATTR_IPV4:
|
if key == ATTR_IPV4:
|
||||||
interface.ipv4 = replace(
|
interface.ipv4setting = IpSetting(
|
||||||
interface.ipv4
|
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
config.get(ATTR_ADDRESS, []),
|
||||||
**config,
|
config.get(ATTR_GATEWAY),
|
||||||
|
config.get(ATTR_NAMESERVERS, []),
|
||||||
)
|
)
|
||||||
elif key == ATTR_IPV6:
|
elif key == ATTR_IPV6:
|
||||||
interface.ipv6 = replace(
|
interface.ipv6setting = IpSetting(
|
||||||
interface.ipv6
|
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
config.get(ATTR_ADDRESS, []),
|
||||||
**config,
|
config.get(ATTR_GATEWAY),
|
||||||
|
config.get(ATTR_NAMESERVERS, []),
|
||||||
)
|
)
|
||||||
elif key == ATTR_WIFI:
|
elif key == ATTR_WIFI:
|
||||||
interface.wifi = replace(
|
interface.wifi = WifiConfig(
|
||||||
interface.wifi
|
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
|
||||||
or WifiConfig(
|
config.get(ATTR_SSID, ""),
|
||||||
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
|
config.get(ATTR_AUTH, AuthMethod.OPEN),
|
||||||
),
|
config.get(ATTR_PSK, None),
|
||||||
**config,
|
None,
|
||||||
)
|
)
|
||||||
elif key == ATTR_ENABLED:
|
elif key == ATTR_ENABLED:
|
||||||
interface.enabled = config
|
interface.enabled = config
|
||||||
@@ -256,24 +268,22 @@ class APINetwork(CoreSysAttributes):
|
|||||||
|
|
||||||
vlan_config = VlanConfig(vlan, interface.name)
|
vlan_config = VlanConfig(vlan, interface.name)
|
||||||
|
|
||||||
ipv4_config = None
|
ipv4_setting = None
|
||||||
if ATTR_IPV4 in body:
|
if ATTR_IPV4 in body:
|
||||||
ipv4_config = IpConfig(
|
ipv4_setting = IpSetting(
|
||||||
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||||
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||||
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||||
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||||
None,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
ipv6_config = None
|
ipv6_setting = None
|
||||||
if ATTR_IPV6 in body:
|
if ATTR_IPV6 in body:
|
||||||
ipv6_config = IpConfig(
|
ipv6_setting = IpSetting(
|
||||||
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||||
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||||
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||||
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
||||||
None,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
vlan_interface = Interface(
|
vlan_interface = Interface(
|
||||||
@@ -284,8 +294,10 @@ class APINetwork(CoreSysAttributes):
|
|||||||
True,
|
True,
|
||||||
False,
|
False,
|
||||||
InterfaceType.VLAN,
|
InterfaceType.VLAN,
|
||||||
ipv4_config,
|
None,
|
||||||
ipv6_config,
|
ipv4_setting,
|
||||||
|
None,
|
||||||
|
ipv6_setting,
|
||||||
None,
|
None,
|
||||||
vlan_config,
|
vlan_config,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Observer RESTful API."""
|
"""Init file for Supervisor Observer RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor HassOS RESTful API."""
|
"""Init file for Supervisor HassOS RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -8,13 +9,18 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
|
ATTR_ACTIVITY_LED,
|
||||||
ATTR_BOARD,
|
ATTR_BOARD,
|
||||||
ATTR_BOOT,
|
ATTR_BOOT,
|
||||||
ATTR_DEVICES,
|
ATTR_DEVICES,
|
||||||
|
ATTR_DISK_LED,
|
||||||
|
ATTR_HEARTBEAT_LED,
|
||||||
ATTR_ID,
|
ATTR_ID,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
|
ATTR_POWER_LED,
|
||||||
ATTR_SERIAL,
|
ATTR_SERIAL,
|
||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
|
ATTR_STATE,
|
||||||
ATTR_UPDATE_AVAILABLE,
|
ATTR_UPDATE_AVAILABLE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_VERSION_LATEST,
|
ATTR_VERSION_LATEST,
|
||||||
@@ -24,24 +30,27 @@ from ..exceptions import BoardInvalidError
|
|||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import (
|
from .const import (
|
||||||
|
ATTR_BOOT_SLOT,
|
||||||
|
ATTR_BOOT_SLOTS,
|
||||||
ATTR_DATA_DISK,
|
ATTR_DATA_DISK,
|
||||||
ATTR_DEV_PATH,
|
ATTR_DEV_PATH,
|
||||||
ATTR_DEVICE,
|
ATTR_DEVICE,
|
||||||
ATTR_DISK_LED,
|
|
||||||
ATTR_DISKS,
|
ATTR_DISKS,
|
||||||
ATTR_HEARTBEAT_LED,
|
|
||||||
ATTR_MODEL,
|
ATTR_MODEL,
|
||||||
ATTR_POWER_LED,
|
ATTR_STATUS,
|
||||||
|
ATTR_SYSTEM_HEALTH_LED,
|
||||||
ATTR_VENDOR,
|
ATTR_VENDOR,
|
||||||
|
BootSlot,
|
||||||
)
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||||
|
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
|
||||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
|
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
|
||||||
@@ -49,6 +58,14 @@ SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
|||||||
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
SCHEMA_GREEN_OPTIONS = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# pylint: enable=no-value-for-parameter
|
||||||
|
|
||||||
|
|
||||||
class APIOS(CoreSysAttributes):
|
class APIOS(CoreSysAttributes):
|
||||||
@@ -64,6 +81,15 @@ class APIOS(CoreSysAttributes):
|
|||||||
ATTR_BOARD: self.sys_os.board,
|
ATTR_BOARD: self.sys_os.board,
|
||||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||||
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
||||||
|
ATTR_BOOT_SLOTS: {
|
||||||
|
slot.bootname: {
|
||||||
|
ATTR_STATE: slot.state,
|
||||||
|
ATTR_STATUS: slot.boot_status,
|
||||||
|
ATTR_VERSION: slot.bundle_version,
|
||||||
|
}
|
||||||
|
for slot in self.sys_os.slots
|
||||||
|
if slot.bootname
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -86,6 +112,17 @@ class APIOS(CoreSysAttributes):
|
|||||||
|
|
||||||
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def wipe_data(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Trigger data disk wipe on Host."""
|
||||||
|
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def set_boot_slot(self, request: web.Request) -> None:
|
||||||
|
"""Change the active boot slot and reboot into it."""
|
||||||
|
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
|
||||||
|
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return possible data targets."""
|
"""Return possible data targets."""
|
||||||
@@ -105,6 +142,35 @@ class APIOS(CoreSysAttributes):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
|
||||||
|
"""Get green board settings."""
|
||||||
|
return {
|
||||||
|
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
|
||||||
|
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
|
||||||
|
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def boards_green_options(self, request: web.Request) -> None:
|
||||||
|
"""Update green board settings."""
|
||||||
|
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
||||||
|
|
||||||
|
if ATTR_ACTIVITY_LED in body:
|
||||||
|
await self.sys_dbus.agent.board.green.set_activity_led(
|
||||||
|
body[ATTR_ACTIVITY_LED]
|
||||||
|
)
|
||||||
|
|
||||||
|
if ATTR_POWER_LED in body:
|
||||||
|
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
|
||||||
|
|
||||||
|
if ATTR_SYSTEM_HEALTH_LED in body:
|
||||||
|
await self.sys_dbus.agent.board.green.set_user_led(
|
||||||
|
body[ATTR_SYSTEM_HEALTH_LED]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.sys_dbus.agent.board.green.save_data()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Get yellow board settings."""
|
"""Get yellow board settings."""
|
||||||
@@ -120,14 +186,17 @@ class APIOS(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_DISK_LED in body:
|
if ATTR_DISK_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
|
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
|
||||||
|
|
||||||
if ATTR_HEARTBEAT_LED in body:
|
if ATTR_HEARTBEAT_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
|
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
|
||||||
|
body[ATTR_HEARTBEAT_LED]
|
||||||
|
)
|
||||||
|
|
||||||
if ATTR_POWER_LED in body:
|
if ATTR_POWER_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
|
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
||||||
|
|
||||||
|
self.sys_dbus.agent.board.yellow.save_data()
|
||||||
self.sys_resolution.create_issue(
|
self.sys_resolution.create_issue(
|
||||||
IssueType.REBOOT_REQUIRED,
|
IssueType.REBOOT_REQUIRED,
|
||||||
ContextType.SYSTEM,
|
ContextType.SYSTEM,
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Utils for Home Assistant Proxy."""
|
"""Utils for Home Assistant Proxy."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
import logging
|
import logging
|
||||||
@@ -6,11 +7,15 @@ import logging
|
|||||||
import aiohttp
|
import aiohttp
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp.client_exceptions import ClientConnectorError
|
from aiohttp.client_exceptions import ClientConnectorError
|
||||||
|
from aiohttp.client_ws import ClientWebSocketResponse
|
||||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE
|
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE
|
||||||
|
from aiohttp.http import WSMessage
|
||||||
|
from aiohttp.http_websocket import WSMsgType
|
||||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
|
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError
|
from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError
|
||||||
|
from ..utils.json import json_dumps
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -74,7 +79,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
_LOGGER.error("Error on API for request %s", path)
|
_LOGGER.error("Error on API for request %s", path)
|
||||||
except aiohttp.ClientError as err:
|
except aiohttp.ClientError as err:
|
||||||
_LOGGER.error("Client error on API %s request %s", path, err)
|
_LOGGER.error("Client error on API %s request %s", path, err)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.error("Client timeout error on API request %s", path)
|
_LOGGER.error("Client timeout error on API request %s", path)
|
||||||
|
|
||||||
raise HTTPBadGateway()
|
raise HTTPBadGateway()
|
||||||
@@ -114,7 +119,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
body=data, status=client.status, content_type=client.content_type
|
body=data, status=client.status, content_type=client.content_type
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _websocket_client(self):
|
async def _websocket_client(self) -> ClientWebSocketResponse:
|
||||||
"""Initialize a WebSocket API connection."""
|
"""Initialize a WebSocket API connection."""
|
||||||
url = f"{self.sys_homeassistant.api_url}/api/websocket"
|
url = f"{self.sys_homeassistant.api_url}/api/websocket"
|
||||||
|
|
||||||
@@ -142,7 +147,8 @@ class APIProxy(CoreSysAttributes):
|
|||||||
{
|
{
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"access_token": self.sys_homeassistant.api.access_token,
|
"access_token": self.sys_homeassistant.api.access_token,
|
||||||
}
|
},
|
||||||
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
|
|
||||||
data = await client.receive_json()
|
data = await client.receive_json()
|
||||||
@@ -167,6 +173,28 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
raise APIError()
|
raise APIError()
|
||||||
|
|
||||||
|
async def _proxy_message(
|
||||||
|
self,
|
||||||
|
read_task: asyncio.Task,
|
||||||
|
target: web.WebSocketResponse | ClientWebSocketResponse,
|
||||||
|
) -> None:
|
||||||
|
"""Proxy a message from client to server or vice versa."""
|
||||||
|
if read_task.exception():
|
||||||
|
raise read_task.exception()
|
||||||
|
|
||||||
|
msg: WSMessage = read_task.result()
|
||||||
|
if msg.type == WSMsgType.TEXT:
|
||||||
|
return await target.send_str(msg.data)
|
||||||
|
if msg.type == WSMsgType.BINARY:
|
||||||
|
return await target.send_bytes(msg.data)
|
||||||
|
if msg.type == WSMsgType.CLOSE:
|
||||||
|
_LOGGER.debug("Received close message from WebSocket.")
|
||||||
|
return await target.close()
|
||||||
|
|
||||||
|
raise TypeError(
|
||||||
|
f"Cannot proxy websocket message of unsupported type: {msg.type}"
|
||||||
|
)
|
||||||
|
|
||||||
async def websocket(self, request: web.Request):
|
async def websocket(self, request: web.Request):
|
||||||
"""Initialize a WebSocket API connection."""
|
"""Initialize a WebSocket API connection."""
|
||||||
if not await self.sys_homeassistant.api.check_api_state():
|
if not await self.sys_homeassistant.api.check_api_state():
|
||||||
@@ -176,11 +204,13 @@ class APIProxy(CoreSysAttributes):
|
|||||||
# init server
|
# init server
|
||||||
server = web.WebSocketResponse(heartbeat=30)
|
server = web.WebSocketResponse(heartbeat=30)
|
||||||
await server.prepare(request)
|
await server.prepare(request)
|
||||||
|
addon_name = None
|
||||||
|
|
||||||
# handle authentication
|
# handle authentication
|
||||||
try:
|
try:
|
||||||
await server.send_json(
|
await server.send_json(
|
||||||
{"type": "auth_required", "ha_version": self.sys_homeassistant.version}
|
{"type": "auth_required", "ha_version": self.sys_homeassistant.version},
|
||||||
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check API access
|
# Check API access
|
||||||
@@ -193,14 +223,17 @@ class APIProxy(CoreSysAttributes):
|
|||||||
if not addon or not addon.access_homeassistant_api:
|
if not addon or not addon.access_homeassistant_api:
|
||||||
_LOGGER.warning("Unauthorized WebSocket access!")
|
_LOGGER.warning("Unauthorized WebSocket access!")
|
||||||
await server.send_json(
|
await server.send_json(
|
||||||
{"type": "auth_invalid", "message": "Invalid access"}
|
{"type": "auth_invalid", "message": "Invalid access"},
|
||||||
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
return server
|
return server
|
||||||
|
|
||||||
_LOGGER.info("WebSocket access from %s", addon.slug)
|
addon_name = addon.slug
|
||||||
|
_LOGGER.info("WebSocket access from %s", addon_name)
|
||||||
|
|
||||||
await server.send_json(
|
await server.send_json(
|
||||||
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version}
|
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version},
|
||||||
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
except (RuntimeError, ValueError) as err:
|
except (RuntimeError, ValueError) as err:
|
||||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||||
@@ -214,13 +247,13 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Home Assistant WebSocket API request running")
|
_LOGGER.info("Home Assistant WebSocket API request running")
|
||||||
try:
|
try:
|
||||||
client_read = None
|
client_read: asyncio.Task | None = None
|
||||||
server_read = None
|
server_read: asyncio.Task | None = None
|
||||||
while not server.closed and not client.closed:
|
while not server.closed and not client.closed:
|
||||||
if not client_read:
|
if not client_read:
|
||||||
client_read = self.sys_create_task(client.receive_str())
|
client_read = self.sys_create_task(client.receive())
|
||||||
if not server_read:
|
if not server_read:
|
||||||
server_read = self.sys_create_task(server.receive_str())
|
server_read = self.sys_create_task(server.receive())
|
||||||
|
|
||||||
# wait until data need to be processed
|
# wait until data need to be processed
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
@@ -229,14 +262,12 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
# server
|
# server
|
||||||
if server_read.done() and not client.closed:
|
if server_read.done() and not client.closed:
|
||||||
server_read.exception()
|
await self._proxy_message(server_read, client)
|
||||||
await client.send_str(server_read.result())
|
|
||||||
server_read = None
|
server_read = None
|
||||||
|
|
||||||
# client
|
# client
|
||||||
if client_read.done() and not server.closed:
|
if client_read.done() and not server.closed:
|
||||||
client_read.exception()
|
await self._proxy_message(client_read, server)
|
||||||
await server.send_str(client_read.result())
|
|
||||||
client_read = None
|
client_read = None
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
@@ -246,9 +277,9 @@ class APIProxy(CoreSysAttributes):
|
|||||||
_LOGGER.info("Home Assistant WebSocket API error: %s", err)
|
_LOGGER.info("Home Assistant WebSocket API error: %s", err)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if client_read:
|
if client_read and not client_read.done():
|
||||||
client_read.cancel()
|
client_read.cancel()
|
||||||
if server_read:
|
if server_read and not server_read.done():
|
||||||
server_read.cancel()
|
server_read.cancel()
|
||||||
|
|
||||||
# close connections
|
# close connections
|
||||||
@@ -257,5 +288,5 @@ class APIProxy(CoreSysAttributes):
|
|||||||
if not server.closed:
|
if not server.closed:
|
||||||
await server.close()
|
await server.close()
|
||||||
|
|
||||||
_LOGGER.info("Home Assistant WebSocket API connection is closed")
|
_LOGGER.info("Home Assistant WebSocket API for %s closed", addon_name)
|
||||||
return server
|
return server
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Handle REST API for resoulution."""
|
"""Handle REST API for resoulution."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Root RESTful API."""
|
"""Init file for Supervisor Root RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Security RESTful API."""
|
"""Init file for Supervisor Security RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -6,7 +7,7 @@ from typing import Any
|
|||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..addons import AnyAddon
|
from ..addons.manager import AnyAddon
|
||||||
from ..addons.utils import rating_security
|
from ..addons.utils import rating_security
|
||||||
from ..api.const import ATTR_SIGNED
|
from ..api.const import ATTR_SIGNED
|
||||||
from ..api.utils import api_process, api_process_raw, api_validate
|
from ..api.utils import api_process, api_process_raw, api_validate
|
||||||
@@ -186,18 +187,20 @@ class APIStore(CoreSysAttributes):
|
|||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def addons_list(self, request: web.Request) -> list[dict[str, Any]]:
|
async def addons_list(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return all store add-ons."""
|
"""Return all store add-ons."""
|
||||||
return [
|
return {
|
||||||
self._generate_addon_information(self.sys_addons.store[addon])
|
ATTR_ADDONS: [
|
||||||
for addon in self.sys_addons.store
|
self._generate_addon_information(self.sys_addons.store[addon])
|
||||||
]
|
for addon in self.sys_addons.store
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def addons_addon_install(self, request: web.Request) -> Awaitable[None]:
|
def addons_addon_install(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Install add-on."""
|
"""Install add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.install())
|
return asyncio.shield(self.sys_addons.install(addon.slug))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def addons_addon_update(self, request: web.Request) -> None:
|
async def addons_addon_update(self, request: web.Request) -> None:
|
||||||
@@ -209,7 +212,7 @@ class APIStore(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_UPDATE, request)
|
body = await api_validate(SCHEMA_UPDATE, request)
|
||||||
|
|
||||||
if start_task := await asyncio.shield(
|
if start_task := await asyncio.shield(
|
||||||
addon.update(backup=body.get(ATTR_BACKUP))
|
self.sys_addons.update(addon.slug, backup=body.get(ATTR_BACKUP))
|
||||||
):
|
):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@@ -247,9 +250,14 @@ class APIStore(CoreSysAttributes):
|
|||||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||||
async def addons_addon_changelog(self, request: web.Request) -> str:
|
async def addons_addon_changelog(self, request: web.Request) -> str:
|
||||||
"""Return changelog from add-on."""
|
"""Return changelog from add-on."""
|
||||||
addon = self._extract_addon(request)
|
# Frontend can't handle error response here, need to return 200 and error as text for now
|
||||||
|
try:
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
except APIError as err:
|
||||||
|
return str(err)
|
||||||
|
|
||||||
if not addon.with_changelog:
|
if not addon.with_changelog:
|
||||||
raise APIError(f"No changelog found for add-on {addon.slug}!")
|
return f"No changelog found for add-on {addon.slug}!"
|
||||||
|
|
||||||
with addon.path_changelog.open("r") as changelog:
|
with addon.path_changelog.open("r") as changelog:
|
||||||
return changelog.read()
|
return changelog.read()
|
||||||
@@ -257,9 +265,14 @@ class APIStore(CoreSysAttributes):
|
|||||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||||
async def addons_addon_documentation(self, request: web.Request) -> str:
|
async def addons_addon_documentation(self, request: web.Request) -> str:
|
||||||
"""Return documentation from add-on."""
|
"""Return documentation from add-on."""
|
||||||
addon = self._extract_addon(request)
|
# Frontend can't handle error response here, need to return 200 and error as text for now
|
||||||
|
try:
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
except APIError as err:
|
||||||
|
return str(err)
|
||||||
|
|
||||||
if not addon.with_documentation:
|
if not addon.with_documentation:
|
||||||
raise APIError(f"No documentation found for add-on {addon.slug}!")
|
return f"No documentation found for add-on {addon.slug}!"
|
||||||
|
|
||||||
with addon.path_documentation.open("r") as documentation:
|
with addon.path_documentation.open("r") as documentation:
|
||||||
return documentation.read()
|
return documentation.read()
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Supervisor RESTful API."""
|
"""Init file for Supervisor Supervisor RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -49,7 +50,7 @@ from ..store.validate import repositories
|
|||||||
from ..utils.sentry import close_sentry, init_sentry
|
from ..utils.sentry import close_sentry, init_sentry
|
||||||
from ..utils.validate import validate_timezone
|
from ..utils.validate import validate_timezone
|
||||||
from ..validate import version_tag, wait_boot
|
from ..validate import version_tag, wait_boot
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .const import CONTENT_TYPE_TEXT
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -140,7 +141,7 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
|
|
||||||
if ATTR_DIAGNOSTICS in body:
|
if ATTR_DIAGNOSTICS in body:
|
||||||
self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS]
|
self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS]
|
||||||
self.sys_dbus.agent.diagnostics = body[ATTR_DIAGNOSTICS]
|
await self.sys_dbus.agent.set_diagnostics(body[ATTR_DIAGNOSTICS])
|
||||||
|
|
||||||
if body[ATTR_DIAGNOSTICS]:
|
if body[ATTR_DIAGNOSTICS]:
|
||||||
init_sentry(self.coresys)
|
init_sentry(self.coresys)
|
||||||
@@ -229,7 +230,7 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
"""Soft restart Supervisor."""
|
"""Soft restart Supervisor."""
|
||||||
return asyncio.shield(self.sys_supervisor.restart())
|
return asyncio.shield(self.sys_supervisor.restart())
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
"""Return supervisor Docker logs."""
|
"""Return supervisor Docker logs."""
|
||||||
return self.sys_supervisor.logs()
|
return self.sys_supervisor.logs()
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor util for RESTful API."""
|
"""Init file for Supervisor util for RESTful API."""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -13,6 +14,7 @@ from ..const import (
|
|||||||
HEADER_TOKEN,
|
HEADER_TOKEN,
|
||||||
HEADER_TOKEN_OLD,
|
HEADER_TOKEN_OLD,
|
||||||
JSON_DATA,
|
JSON_DATA,
|
||||||
|
JSON_JOB_ID,
|
||||||
JSON_MESSAGE,
|
JSON_MESSAGE,
|
||||||
JSON_RESULT,
|
JSON_RESULT,
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
@@ -22,9 +24,9 @@ from ..const import (
|
|||||||
from ..coresys import CoreSys
|
from ..coresys import CoreSys
|
||||||
from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError
|
from ..exceptions import APIError, APIForbidden, DockerAPIError, HassioError
|
||||||
from ..utils import check_exception_chain, get_message_from_exception_chain
|
from ..utils import check_exception_chain, get_message_from_exception_chain
|
||||||
from ..utils.json import JSONEncoder
|
from ..utils.json import json_dumps, json_loads as json_loads_util
|
||||||
from ..utils.log_format import format_message
|
from ..utils.log_format import format_message
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from . import const
|
||||||
|
|
||||||
|
|
||||||
def excract_supervisor_token(request: web.Request) -> str | None:
|
def excract_supervisor_token(request: web.Request) -> str | None:
|
||||||
@@ -48,7 +50,7 @@ def json_loads(data: Any) -> dict[str, Any]:
|
|||||||
if not data:
|
if not data:
|
||||||
return {}
|
return {}
|
||||||
try:
|
try:
|
||||||
return json.loads(data)
|
return json_loads_util(data)
|
||||||
except json.JSONDecodeError as err:
|
except json.JSONDecodeError as err:
|
||||||
raise APIError("Invalid json") from err
|
raise APIError("Invalid json") from err
|
||||||
|
|
||||||
@@ -90,7 +92,7 @@ def require_home_assistant(method):
|
|||||||
return wrap_api
|
return wrap_api
|
||||||
|
|
||||||
|
|
||||||
def api_process_raw(content):
|
def api_process_raw(content, *, error_type=None):
|
||||||
"""Wrap content_type into function."""
|
"""Wrap content_type into function."""
|
||||||
|
|
||||||
def wrap_method(method):
|
def wrap_method(method):
|
||||||
@@ -100,15 +102,15 @@ def api_process_raw(content):
|
|||||||
"""Return api information."""
|
"""Return api information."""
|
||||||
try:
|
try:
|
||||||
msg_data = await method(api, *args, **kwargs)
|
msg_data = await method(api, *args, **kwargs)
|
||||||
msg_type = content
|
except HassioError as err:
|
||||||
except (APIError, APIForbidden) as err:
|
return api_return_error(
|
||||||
msg_data = str(err).encode()
|
err, error_type=error_type or const.CONTENT_TYPE_BINARY
|
||||||
msg_type = CONTENT_TYPE_BINARY
|
)
|
||||||
except HassioError:
|
|
||||||
msg_data = b""
|
|
||||||
msg_type = CONTENT_TYPE_BINARY
|
|
||||||
|
|
||||||
return web.Response(body=msg_data, content_type=msg_type)
|
if isinstance(msg_data, (web.Response, web.StreamResponse)):
|
||||||
|
return msg_data
|
||||||
|
|
||||||
|
return web.Response(body=msg_data, content_type=content)
|
||||||
|
|
||||||
return wrap_api
|
return wrap_api
|
||||||
|
|
||||||
@@ -116,21 +118,41 @@ def api_process_raw(content):
|
|||||||
|
|
||||||
|
|
||||||
def api_return_error(
|
def api_return_error(
|
||||||
error: Exception | None = None, message: str | None = None
|
error: Exception | None = None,
|
||||||
|
message: str | None = None,
|
||||||
|
error_type: str | None = None,
|
||||||
) -> web.Response:
|
) -> web.Response:
|
||||||
"""Return an API error message."""
|
"""Return an API error message."""
|
||||||
if error and not message:
|
if error and not message:
|
||||||
message = get_message_from_exception_chain(error)
|
message = get_message_from_exception_chain(error)
|
||||||
if check_exception_chain(error, DockerAPIError):
|
if check_exception_chain(error, DockerAPIError):
|
||||||
message = format_message(message)
|
message = format_message(message)
|
||||||
|
if not message:
|
||||||
|
message = "Unknown error, see supervisor"
|
||||||
|
|
||||||
|
status = 400
|
||||||
|
if is_api_error := isinstance(error, APIError):
|
||||||
|
status = error.status
|
||||||
|
|
||||||
|
match error_type:
|
||||||
|
case const.CONTENT_TYPE_TEXT:
|
||||||
|
return web.Response(body=message, content_type=error_type, status=status)
|
||||||
|
case const.CONTENT_TYPE_BINARY:
|
||||||
|
return web.Response(
|
||||||
|
body=message.encode(), content_type=error_type, status=status
|
||||||
|
)
|
||||||
|
case _:
|
||||||
|
result = {
|
||||||
|
JSON_RESULT: RESULT_ERROR,
|
||||||
|
JSON_MESSAGE: message,
|
||||||
|
}
|
||||||
|
if is_api_error and error.job_id:
|
||||||
|
result[JSON_JOB_ID] = error.job_id
|
||||||
|
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{
|
result,
|
||||||
JSON_RESULT: RESULT_ERROR,
|
status=status,
|
||||||
JSON_MESSAGE: message or "Unknown error, see supervisor",
|
dumps=json_dumps,
|
||||||
},
|
|
||||||
status=400,
|
|
||||||
dumps=lambda x: json.dumps(x, cls=JSONEncoder),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -138,7 +160,7 @@ def api_return_ok(data: dict[str, Any] | None = None) -> web.Response:
|
|||||||
"""Return an API ok answer."""
|
"""Return an API ok answer."""
|
||||||
return web.json_response(
|
return web.json_response(
|
||||||
{JSON_RESULT: RESULT_OK, JSON_DATA: data or {}},
|
{JSON_RESULT: RESULT_OK, JSON_DATA: data or {}},
|
||||||
dumps=lambda x: json.dumps(x, cls=JSONEncoder),
|
dumps=json_dumps,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Handle Arch for underlay maschine/platforms."""
|
"""Handle Arch for underlay maschine/platforms."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import platform
|
import platform
|
||||||
@@ -28,6 +29,7 @@ class CpuArch(CoreSysAttributes):
|
|||||||
"""Initialize CPU Architecture handler."""
|
"""Initialize CPU Architecture handler."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self._supported_arch: list[str] = []
|
self._supported_arch: list[str] = []
|
||||||
|
self._supported_set: set[str] = set()
|
||||||
self._default_arch: str
|
self._default_arch: str
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -70,9 +72,11 @@ class CpuArch(CoreSysAttributes):
|
|||||||
if native_support not in self._supported_arch:
|
if native_support not in self._supported_arch:
|
||||||
self._supported_arch.append(native_support)
|
self._supported_arch.append(native_support)
|
||||||
|
|
||||||
|
self._supported_set = set(self._supported_arch)
|
||||||
|
|
||||||
def is_supported(self, arch_list: list[str]) -> bool:
|
def is_supported(self, arch_list: list[str]) -> bool:
|
||||||
"""Return True if there is a supported arch by this platform."""
|
"""Return True if there is a supported arch by this platform."""
|
||||||
return not set(self.supported).isdisjoint(set(arch_list))
|
return not self._supported_set.isdisjoint(arch_list)
|
||||||
|
|
||||||
def match(self, arch_list: list[str]) -> str:
|
def match(self, arch_list: list[str]) -> str:
|
||||||
"""Return best match for this CPU/Platform."""
|
"""Return best match for this CPU/Platform."""
|
||||||
|
|||||||
@@ -1,12 +1,20 @@
|
|||||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from .addons.addon import Addon
|
from .addons.addon import Addon
|
||||||
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH
|
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_TYPE, ATTR_USERNAME, FILE_HASSIO_AUTH
|
||||||
from .coresys import CoreSys, CoreSysAttributes
|
from .coresys import CoreSys, CoreSysAttributes
|
||||||
from .exceptions import AuthError, AuthPasswordResetError, HomeAssistantAPIError
|
from .exceptions import (
|
||||||
|
AuthError,
|
||||||
|
AuthListUsersError,
|
||||||
|
AuthPasswordResetError,
|
||||||
|
HomeAssistantAPIError,
|
||||||
|
HomeAssistantWSError,
|
||||||
|
)
|
||||||
from .utils.common import FileConfiguration
|
from .utils.common import FileConfiguration
|
||||||
from .validate import SCHEMA_AUTH_CONFIG
|
from .validate import SCHEMA_AUTH_CONFIG
|
||||||
|
|
||||||
@@ -132,6 +140,17 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
|||||||
|
|
||||||
raise AuthPasswordResetError()
|
raise AuthPasswordResetError()
|
||||||
|
|
||||||
|
async def list_users(self) -> list[dict[str, Any]]:
|
||||||
|
"""List users on the Home Assistant instance."""
|
||||||
|
try:
|
||||||
|
return await self.sys_homeassistant.websocket.async_send_command(
|
||||||
|
{ATTR_TYPE: "config/auth/list"}
|
||||||
|
)
|
||||||
|
except HomeAssistantWSError:
|
||||||
|
_LOGGER.error("Can't request listing users on Home Assistant!")
|
||||||
|
|
||||||
|
raise AuthListUsersError()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _rehash(value: str, salt2: str = "") -> str:
|
def _rehash(value: str, salt2: str = "") -> str:
|
||||||
"""Rehash a value."""
|
"""Rehash a value."""
|
||||||
|
|||||||
@@ -1,13 +1,19 @@
|
|||||||
"""Representation of a backup file."""
|
"""Representation of a backup file."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from base64 import b64decode, b64encode
|
from base64 import b64decode, b64encode
|
||||||
|
from collections import defaultdict
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
|
from copy import deepcopy
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
|
import io
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tarfile
|
import tarfile
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
import time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||||
@@ -18,13 +24,14 @@ from securetar import SecureTarFile, atomic_contents_add, secure_path
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from ..addons import Addon
|
from ..addons.manager import Addon
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
ATTR_COMPRESSED,
|
ATTR_COMPRESSED,
|
||||||
ATTR_CRYPTO,
|
ATTR_CRYPTO,
|
||||||
ATTR_DATE,
|
ATTR_DATE,
|
||||||
ATTR_DOCKER,
|
ATTR_DOCKER,
|
||||||
|
ATTR_EXCLUDE_DATABASE,
|
||||||
ATTR_FOLDERS,
|
ATTR_FOLDERS,
|
||||||
ATTR_HOMEASSISTANT,
|
ATTR_HOMEASSISTANT,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -40,11 +47,14 @@ from ..const import (
|
|||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
CRYPTO_AES128,
|
CRYPTO_AES128,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys
|
||||||
from ..exceptions import AddonsError, BackupError
|
from ..exceptions import AddonsError, BackupError, BackupInvalidError
|
||||||
|
from ..jobs.const import JOB_GROUP_BACKUP
|
||||||
|
from ..jobs.decorator import Job
|
||||||
|
from ..jobs.job_group import JobGroup
|
||||||
from ..utils import remove_folder
|
from ..utils import remove_folder
|
||||||
from ..utils.dt import parse_datetime, utcnow
|
from ..utils.dt import parse_datetime, utcnow
|
||||||
from ..utils.json import write_json_file
|
from ..utils.json import json_bytes
|
||||||
from .const import BUF_SIZE, BackupType
|
from .const import BUF_SIZE, BackupType
|
||||||
from .utils import key_to_iv, password_to_key
|
from .utils import key_to_iv, password_to_key
|
||||||
from .validate import SCHEMA_BACKUP
|
from .validate import SCHEMA_BACKUP
|
||||||
@@ -52,15 +62,25 @@ from .validate import SCHEMA_BACKUP
|
|||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Backup(CoreSysAttributes):
|
class Backup(JobGroup):
|
||||||
"""A single Supervisor backup."""
|
"""A single Supervisor backup."""
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys, tar_file: Path):
|
def __init__(
|
||||||
|
self,
|
||||||
|
coresys: CoreSys,
|
||||||
|
tar_file: Path,
|
||||||
|
slug: str,
|
||||||
|
data: dict[str, Any] | None = None,
|
||||||
|
):
|
||||||
"""Initialize a backup."""
|
"""Initialize a backup."""
|
||||||
self.coresys: CoreSys = coresys
|
super().__init__(
|
||||||
|
coresys, JOB_GROUP_BACKUP.format_map(defaultdict(str, slug=slug)), slug
|
||||||
|
)
|
||||||
self._tarfile: Path = tar_file
|
self._tarfile: Path = tar_file
|
||||||
self._data: dict[str, Any] = {}
|
self._data: dict[str, Any] = data or {ATTR_SLUG: slug}
|
||||||
self._tmp = None
|
self._tmp = None
|
||||||
|
self._outer_secure_tarfile: SecureTarFile | None = None
|
||||||
|
self._outer_secure_tarfile_tarfile: tarfile.TarFile | None = None
|
||||||
self._key: bytes | None = None
|
self._key: bytes | None = None
|
||||||
self._aes: Cipher | None = None
|
self._aes: Cipher | None = None
|
||||||
|
|
||||||
@@ -85,7 +105,7 @@ class Backup(CoreSysAttributes):
|
|||||||
return self._data[ATTR_NAME]
|
return self._data[ATTR_NAME]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def date(self):
|
def date(self) -> str:
|
||||||
"""Return backup date."""
|
"""Return backup date."""
|
||||||
return self._data[ATTR_DATE]
|
return self._data[ATTR_DATE]
|
||||||
|
|
||||||
@@ -100,39 +120,46 @@ class Backup(CoreSysAttributes):
|
|||||||
return self._data[ATTR_COMPRESSED]
|
return self._data[ATTR_COMPRESSED]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def addons(self):
|
def addons(self) -> list[dict[str, Any]]:
|
||||||
"""Return backup date."""
|
"""Return backup date."""
|
||||||
return self._data[ATTR_ADDONS]
|
return self._data[ATTR_ADDONS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def addon_list(self):
|
def addon_list(self) -> list[str]:
|
||||||
"""Return a list of add-ons slugs."""
|
"""Return a list of add-ons slugs."""
|
||||||
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
|
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def folders(self):
|
def folders(self) -> list[str]:
|
||||||
"""Return list of saved folders."""
|
"""Return list of saved folders."""
|
||||||
return self._data[ATTR_FOLDERS]
|
return self._data[ATTR_FOLDERS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def repositories(self):
|
def repositories(self) -> list[str]:
|
||||||
"""Return backup date."""
|
"""Return backup date."""
|
||||||
return self._data[ATTR_REPOSITORIES]
|
return self._data[ATTR_REPOSITORIES]
|
||||||
|
|
||||||
@repositories.setter
|
@repositories.setter
|
||||||
def repositories(self, value):
|
def repositories(self, value: list[str]) -> None:
|
||||||
"""Set backup date."""
|
"""Set backup date."""
|
||||||
self._data[ATTR_REPOSITORIES] = value
|
self._data[ATTR_REPOSITORIES] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant_version(self):
|
def homeassistant_version(self) -> AwesomeVersion:
|
||||||
"""Return backup Home Assistant version."""
|
"""Return backup Home Assistant version."""
|
||||||
if self.homeassistant is None:
|
if self.homeassistant is None:
|
||||||
return None
|
return None
|
||||||
return self._data[ATTR_HOMEASSISTANT][ATTR_VERSION]
|
return self.homeassistant[ATTR_VERSION]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant(self):
|
def homeassistant_exclude_database(self) -> bool:
|
||||||
|
"""Return whether database was excluded from Home Assistant backup."""
|
||||||
|
if self.homeassistant is None:
|
||||||
|
return None
|
||||||
|
return self.homeassistant[ATTR_EXCLUDE_DATABASE]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant(self) -> dict[str, Any]:
|
||||||
"""Return backup Home Assistant data."""
|
"""Return backup Home Assistant data."""
|
||||||
return self._data[ATTR_HOMEASSISTANT]
|
return self._data[ATTR_HOMEASSISTANT]
|
||||||
|
|
||||||
@@ -142,12 +169,12 @@ class Backup(CoreSysAttributes):
|
|||||||
return self._data[ATTR_SUPERVISOR_VERSION]
|
return self._data[ATTR_SUPERVISOR_VERSION]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def docker(self):
|
def docker(self) -> dict[str, Any]:
|
||||||
"""Return backup Docker config data."""
|
"""Return backup Docker config data."""
|
||||||
return self._data.get(ATTR_DOCKER, {})
|
return self._data.get(ATTR_DOCKER, {})
|
||||||
|
|
||||||
@docker.setter
|
@docker.setter
|
||||||
def docker(self, value):
|
def docker(self, value: dict[str, Any]) -> None:
|
||||||
"""Set the Docker config data."""
|
"""Set the Docker config data."""
|
||||||
self._data[ATTR_DOCKER] = value
|
self._data[ATTR_DOCKER] = value
|
||||||
|
|
||||||
@@ -160,32 +187,36 @@ class Backup(CoreSysAttributes):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def size(self):
|
def size(self) -> float:
|
||||||
"""Return backup size."""
|
"""Return backup size."""
|
||||||
if not self.tarfile.is_file():
|
if not self.tarfile.is_file():
|
||||||
return 0
|
return 0
|
||||||
return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte
|
return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_new(self):
|
def is_new(self) -> bool:
|
||||||
"""Return True if there is new."""
|
"""Return True if there is new."""
|
||||||
return not self.tarfile.exists()
|
return not self.tarfile.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tarfile(self):
|
def tarfile(self) -> Path:
|
||||||
"""Return path to backup tarfile."""
|
"""Return path to backup tarfile."""
|
||||||
return self._tarfile
|
return self._tarfile
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_current(self):
|
def is_current(self) -> bool:
|
||||||
"""Return true if backup is current, false if stale."""
|
"""Return true if backup is current, false if stale."""
|
||||||
return parse_datetime(self.date) >= utcnow() - timedelta(
|
return parse_datetime(self.date) >= utcnow() - timedelta(
|
||||||
days=self.sys_backups.days_until_stale
|
days=self.sys_backups.days_until_stale
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> dict[str, Any]:
|
||||||
|
"""Returns a copy of the data."""
|
||||||
|
return deepcopy(self._data)
|
||||||
|
|
||||||
def new(
|
def new(
|
||||||
self,
|
self,
|
||||||
slug: str,
|
|
||||||
name: str,
|
name: str,
|
||||||
date: str,
|
date: str,
|
||||||
sys_type: BackupType,
|
sys_type: BackupType,
|
||||||
@@ -195,7 +226,6 @@ class Backup(CoreSysAttributes):
|
|||||||
"""Initialize a new backup."""
|
"""Initialize a new backup."""
|
||||||
# Init metadata
|
# Init metadata
|
||||||
self._data[ATTR_VERSION] = 2
|
self._data[ATTR_VERSION] = 2
|
||||||
self._data[ATTR_SLUG] = slug
|
|
||||||
self._data[ATTR_NAME] = name
|
self._data[ATTR_NAME] = name
|
||||||
self._data[ATTR_DATE] = date
|
self._data[ATTR_DATE] = date
|
||||||
self._data[ATTR_TYPE] = sys_type
|
self._data[ATTR_TYPE] = sys_type
|
||||||
@@ -296,25 +326,55 @@ class Backup(CoreSysAttributes):
|
|||||||
|
|
||||||
async def __aenter__(self):
|
async def __aenter__(self):
|
||||||
"""Async context to open a backup."""
|
"""Async context to open a backup."""
|
||||||
self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent))
|
|
||||||
|
|
||||||
# create a backup
|
# create a backup
|
||||||
if not self.tarfile.is_file():
|
if not self.tarfile.is_file():
|
||||||
return self
|
self._outer_secure_tarfile = SecureTarFile(
|
||||||
|
self.tarfile,
|
||||||
|
"w",
|
||||||
|
gzip=False,
|
||||||
|
bufsize=BUF_SIZE,
|
||||||
|
)
|
||||||
|
self._outer_secure_tarfile_tarfile = self._outer_secure_tarfile.__enter__()
|
||||||
|
return
|
||||||
|
|
||||||
# extract an existing backup
|
# extract an existing backup
|
||||||
|
self._tmp = TemporaryDirectory(dir=str(self.tarfile.parent))
|
||||||
|
|
||||||
def _extract_backup():
|
def _extract_backup():
|
||||||
"""Extract a backup."""
|
"""Extract a backup."""
|
||||||
with tarfile.open(self.tarfile, "r:") as tar:
|
with tarfile.open(self.tarfile, "r:") as tar:
|
||||||
tar.extractall(path=self._tmp.name, members=secure_path(tar))
|
tar.extractall(
|
||||||
|
path=self._tmp.name,
|
||||||
|
members=secure_path(tar),
|
||||||
|
filter="fully_trusted",
|
||||||
|
)
|
||||||
|
|
||||||
await self.sys_run_in_executor(_extract_backup)
|
await self.sys_run_in_executor(_extract_backup)
|
||||||
|
|
||||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||||
"""Async context to close a backup."""
|
"""Async context to close a backup."""
|
||||||
# exists backup or exception on build
|
# exists backup or exception on build
|
||||||
if self.tarfile.is_file() or exception_type is not None:
|
try:
|
||||||
self._tmp.cleanup()
|
await self._aexit(exception_type, exception_value, traceback)
|
||||||
|
finally:
|
||||||
|
if self._tmp:
|
||||||
|
self._tmp.cleanup()
|
||||||
|
if self._outer_secure_tarfile:
|
||||||
|
self._outer_secure_tarfile.__exit__(
|
||||||
|
exception_type, exception_value, traceback
|
||||||
|
)
|
||||||
|
self._outer_secure_tarfile = None
|
||||||
|
self._outer_secure_tarfile_tarfile = None
|
||||||
|
|
||||||
|
async def _aexit(self, exception_type, exception_value, traceback):
|
||||||
|
"""Cleanup after backup creation.
|
||||||
|
|
||||||
|
This is a separate method to allow it to be called from __aexit__ to ensure
|
||||||
|
that cleanup is always performed, even if an exception is raised.
|
||||||
|
"""
|
||||||
|
# If we're not creating a new backup, or if an exception was raised, we're done
|
||||||
|
if not self._outer_secure_tarfile or exception_type is not None:
|
||||||
return
|
return
|
||||||
|
|
||||||
# validate data
|
# validate data
|
||||||
@@ -327,157 +387,254 @@ class Backup(CoreSysAttributes):
|
|||||||
raise ValueError("Invalid config") from None
|
raise ValueError("Invalid config") from None
|
||||||
|
|
||||||
# new backup, build it
|
# new backup, build it
|
||||||
def _create_backup():
|
def _add_backup_json():
|
||||||
"""Create a new backup."""
|
"""Create a new backup."""
|
||||||
with tarfile.open(self.tarfile, "w:") as tar:
|
raw_bytes = json_bytes(self._data)
|
||||||
tar.add(self._tmp.name, arcname=".")
|
fileobj = io.BytesIO(raw_bytes)
|
||||||
|
tar_info = tarfile.TarInfo(name="./backup.json")
|
||||||
|
tar_info.size = len(raw_bytes)
|
||||||
|
tar_info.mtime = int(time.time())
|
||||||
|
self._outer_secure_tarfile_tarfile.addfile(tar_info, fileobj=fileobj)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
write_json_file(Path(self._tmp.name, "backup.json"), self._data)
|
await self.sys_run_in_executor(_add_backup_json)
|
||||||
await self.sys_run_in_executor(_create_backup)
|
|
||||||
except (OSError, json.JSONDecodeError) as err:
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
|
self.sys_jobs.current.capture_error(BackupError("Can't write backup"))
|
||||||
_LOGGER.error("Can't write backup: %s", err)
|
_LOGGER.error("Can't write backup: %s", err)
|
||||||
finally:
|
|
||||||
self._tmp.cleanup()
|
|
||||||
|
|
||||||
async def store_addons(self, addon_list: list[str]) -> list[Awaitable[None]]:
|
@Job(name="backup_addon_save", cleanup=False)
|
||||||
|
async def _addon_save(self, addon: Addon) -> asyncio.Task | None:
|
||||||
|
"""Store an add-on into backup."""
|
||||||
|
self.sys_jobs.current.reference = addon.slug
|
||||||
|
|
||||||
|
tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}"
|
||||||
|
|
||||||
|
addon_file = self._outer_secure_tarfile.create_inner_tar(
|
||||||
|
f"./{tar_name}",
|
||||||
|
gzip=self.compressed,
|
||||||
|
key=self._key,
|
||||||
|
)
|
||||||
|
# Take backup
|
||||||
|
try:
|
||||||
|
start_task = await addon.backup(addon_file)
|
||||||
|
except AddonsError as err:
|
||||||
|
raise BackupError(
|
||||||
|
f"Can't create backup for {addon.slug}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
# Store to config
|
||||||
|
self._data[ATTR_ADDONS].append(
|
||||||
|
{
|
||||||
|
ATTR_SLUG: addon.slug,
|
||||||
|
ATTR_NAME: addon.name,
|
||||||
|
ATTR_VERSION: addon.version,
|
||||||
|
ATTR_SIZE: addon_file.size,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return start_task
|
||||||
|
|
||||||
|
@Job(name="backup_store_addons", cleanup=False)
|
||||||
|
async def store_addons(self, addon_list: list[str]) -> list[asyncio.Task]:
|
||||||
"""Add a list of add-ons into backup.
|
"""Add a list of add-ons into backup.
|
||||||
|
|
||||||
For each addon that needs to be started after backup, returns a task which
|
For each addon that needs to be started after backup, returns a Task which
|
||||||
completes when that addon has state 'started' (see addon.start).
|
completes when that addon has state 'started' (see addon.start).
|
||||||
"""
|
"""
|
||||||
|
# Save Add-ons sequential avoid issue on slow IO
|
||||||
async def _addon_save(addon: Addon) -> Awaitable[None] | None:
|
start_tasks: list[asyncio.Task] = []
|
||||||
"""Task to store an add-on into backup."""
|
|
||||||
tar_name = f"{addon.slug}.tar{'.gz' if self.compressed else ''}"
|
|
||||||
addon_file = SecureTarFile(
|
|
||||||
Path(self._tmp.name, tar_name),
|
|
||||||
"w",
|
|
||||||
key=self._key,
|
|
||||||
gzip=self.compressed,
|
|
||||||
bufsize=BUF_SIZE,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Take backup
|
|
||||||
try:
|
|
||||||
start_task = await addon.backup(addon_file)
|
|
||||||
except AddonsError:
|
|
||||||
_LOGGER.error("Can't create backup for %s", addon.slug)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Store to config
|
|
||||||
self._data[ATTR_ADDONS].append(
|
|
||||||
{
|
|
||||||
ATTR_SLUG: addon.slug,
|
|
||||||
ATTR_NAME: addon.name,
|
|
||||||
ATTR_VERSION: addon.version,
|
|
||||||
ATTR_SIZE: addon_file.size,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return start_task
|
|
||||||
|
|
||||||
# Save Add-ons sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
start_tasks: list[Awaitable[None]] = []
|
|
||||||
for addon in addon_list:
|
for addon in addon_list:
|
||||||
try:
|
try:
|
||||||
if start_task := await _addon_save(addon):
|
if start_task := await self._addon_save(addon):
|
||||||
start_tasks.append(start_task)
|
start_tasks.append(start_task)
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err)
|
_LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err)
|
||||||
|
|
||||||
return start_tasks
|
return start_tasks
|
||||||
|
|
||||||
async def restore_addons(self, addon_list: list[str]) -> list[Awaitable[None]]:
|
@Job(name="backup_addon_restore", cleanup=False)
|
||||||
|
async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None:
|
||||||
|
"""Restore an add-on from backup."""
|
||||||
|
self.sys_jobs.current.reference = addon_slug
|
||||||
|
|
||||||
|
tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}"
|
||||||
|
addon_file = SecureTarFile(
|
||||||
|
Path(self._tmp.name, tar_name),
|
||||||
|
"r",
|
||||||
|
key=self._key,
|
||||||
|
gzip=self.compressed,
|
||||||
|
bufsize=BUF_SIZE,
|
||||||
|
)
|
||||||
|
|
||||||
|
# If exists inside backup
|
||||||
|
if not addon_file.path.exists():
|
||||||
|
raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error)
|
||||||
|
|
||||||
|
# Perform a restore
|
||||||
|
try:
|
||||||
|
return await self.sys_addons.restore(addon_slug, addon_file)
|
||||||
|
except AddonsError as err:
|
||||||
|
raise BackupError(
|
||||||
|
f"Can't restore backup {addon_slug}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
@Job(name="backup_restore_addons", cleanup=False)
|
||||||
|
async def restore_addons(
|
||||||
|
self, addon_list: list[str]
|
||||||
|
) -> tuple[bool, list[asyncio.Task]]:
|
||||||
"""Restore a list add-on from backup."""
|
"""Restore a list add-on from backup."""
|
||||||
|
# Save Add-ons sequential avoid issue on slow IO
|
||||||
async def _addon_restore(addon_slug: str) -> Awaitable[None] | None:
|
start_tasks: list[asyncio.Task] = []
|
||||||
"""Task to restore an add-on into backup."""
|
success = True
|
||||||
tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}"
|
|
||||||
addon_file = SecureTarFile(
|
|
||||||
Path(self._tmp.name, tar_name),
|
|
||||||
"r",
|
|
||||||
key=self._key,
|
|
||||||
gzip=self.compressed,
|
|
||||||
bufsize=BUF_SIZE,
|
|
||||||
)
|
|
||||||
|
|
||||||
# If exists inside backup
|
|
||||||
if not addon_file.path.exists():
|
|
||||||
_LOGGER.error("Can't find backup %s", addon_slug)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Perform a restore
|
|
||||||
try:
|
|
||||||
return await self.sys_addons.restore(addon_slug, addon_file)
|
|
||||||
except AddonsError:
|
|
||||||
_LOGGER.error("Can't restore backup %s", addon_slug)
|
|
||||||
|
|
||||||
# Save Add-ons sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
start_tasks: list[Awaitable[None]] = []
|
|
||||||
for slug in addon_list:
|
for slug in addon_list:
|
||||||
try:
|
try:
|
||||||
if start_task := await _addon_restore(slug):
|
start_task = await self._addon_restore(slug)
|
||||||
start_tasks.append(start_task)
|
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.warning("Can't restore Add-on %s: %s", slug, err)
|
_LOGGER.warning("Can't restore Add-on %s: %s", slug, err)
|
||||||
|
success = False
|
||||||
|
else:
|
||||||
|
if start_task:
|
||||||
|
start_tasks.append(start_task)
|
||||||
|
|
||||||
return start_tasks
|
return (success, start_tasks)
|
||||||
|
|
||||||
|
@Job(name="backup_remove_delta_addons", cleanup=False)
|
||||||
|
async def remove_delta_addons(self) -> bool:
|
||||||
|
"""Remove addons which are not in this backup."""
|
||||||
|
success = True
|
||||||
|
for addon in self.sys_addons.installed:
|
||||||
|
if addon.slug in self.addon_list:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Remove Add-on because it's not a part of the new env
|
||||||
|
# Do it sequential avoid issue on slow IO
|
||||||
|
try:
|
||||||
|
await self.sys_addons.uninstall(addon.slug)
|
||||||
|
except AddonsError as err:
|
||||||
|
self.sys_jobs.current.capture_error(err)
|
||||||
|
_LOGGER.warning("Can't uninstall Add-on %s: %s", addon.slug, err)
|
||||||
|
success = False
|
||||||
|
|
||||||
|
return success
|
||||||
|
|
||||||
|
@Job(name="backup_folder_save", cleanup=False)
|
||||||
|
async def _folder_save(self, name: str):
|
||||||
|
"""Take backup of a folder."""
|
||||||
|
self.sys_jobs.current.reference = name
|
||||||
|
|
||||||
|
slug_name = name.replace("/", "_")
|
||||||
|
tar_name = f"{slug_name}.tar{'.gz' if self.compressed else ''}"
|
||||||
|
origin_dir = Path(self.sys_config.path_supervisor, name)
|
||||||
|
|
||||||
|
# Check if exists
|
||||||
|
if not origin_dir.is_dir():
|
||||||
|
_LOGGER.warning("Can't find backup folder %s", name)
|
||||||
|
return
|
||||||
|
|
||||||
|
def _save() -> None:
|
||||||
|
# Take backup
|
||||||
|
_LOGGER.info("Backing up folder %s", name)
|
||||||
|
|
||||||
|
with self._outer_secure_tarfile.create_inner_tar(
|
||||||
|
f"./{tar_name}",
|
||||||
|
gzip=self.compressed,
|
||||||
|
key=self._key,
|
||||||
|
) as tar_file:
|
||||||
|
atomic_contents_add(
|
||||||
|
tar_file,
|
||||||
|
origin_dir,
|
||||||
|
excludes=[
|
||||||
|
bound.bind_mount.local_where.as_posix()
|
||||||
|
for bound in self.sys_mounts.bound_mounts
|
||||||
|
if bound.bind_mount.local_where
|
||||||
|
],
|
||||||
|
arcname=".",
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.info("Backup folder %s done", name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.sys_run_in_executor(_save)
|
||||||
|
except (tarfile.TarError, OSError) as err:
|
||||||
|
raise BackupError(
|
||||||
|
f"Can't backup folder {name}: {str(err)}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
self._data[ATTR_FOLDERS].append(name)
|
||||||
|
|
||||||
|
@Job(name="backup_store_folders", cleanup=False)
|
||||||
async def store_folders(self, folder_list: list[str]):
|
async def store_folders(self, folder_list: list[str]):
|
||||||
"""Backup Supervisor data into backup."""
|
"""Backup Supervisor data into backup."""
|
||||||
|
# Save folder sequential avoid issue on slow IO
|
||||||
async def _folder_save(name: str):
|
|
||||||
"""Take backup of a folder."""
|
|
||||||
slug_name = name.replace("/", "_")
|
|
||||||
tar_name = Path(
|
|
||||||
self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}"
|
|
||||||
)
|
|
||||||
origin_dir = Path(self.sys_config.path_supervisor, name)
|
|
||||||
|
|
||||||
# Check if exists
|
|
||||||
if not origin_dir.is_dir():
|
|
||||||
_LOGGER.warning("Can't find backup folder %s", name)
|
|
||||||
return
|
|
||||||
|
|
||||||
def _save() -> None:
|
|
||||||
# Take backup
|
|
||||||
_LOGGER.info("Backing up folder %s", name)
|
|
||||||
with SecureTarFile(
|
|
||||||
tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE
|
|
||||||
) as tar_file:
|
|
||||||
atomic_contents_add(
|
|
||||||
tar_file,
|
|
||||||
origin_dir,
|
|
||||||
excludes=[
|
|
||||||
bound.bind_mount.local_where.as_posix()
|
|
||||||
for bound in self.sys_mounts.bound_mounts
|
|
||||||
if bound.bind_mount.local_where
|
|
||||||
],
|
|
||||||
arcname=".",
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER.info("Backup folder %s done", name)
|
|
||||||
|
|
||||||
await self.sys_run_in_executor(_save)
|
|
||||||
self._data[ATTR_FOLDERS].append(name)
|
|
||||||
|
|
||||||
# Save folder sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
for folder in folder_list:
|
for folder in folder_list:
|
||||||
|
await self._folder_save(folder)
|
||||||
|
|
||||||
|
@Job(name="backup_folder_restore", cleanup=False)
|
||||||
|
async def _folder_restore(self, name: str) -> None:
|
||||||
|
"""Restore a folder."""
|
||||||
|
self.sys_jobs.current.reference = name
|
||||||
|
|
||||||
|
slug_name = name.replace("/", "_")
|
||||||
|
tar_name = Path(
|
||||||
|
self._tmp.name, f"{slug_name}.tar{'.gz' if self.compressed else ''}"
|
||||||
|
)
|
||||||
|
origin_dir = Path(self.sys_config.path_supervisor, name)
|
||||||
|
|
||||||
|
# Check if exists inside backup
|
||||||
|
if not tar_name.exists():
|
||||||
|
raise BackupInvalidError(
|
||||||
|
f"Can't find restore folder {name}", _LOGGER.warning
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unmount any mounts within folder
|
||||||
|
bind_mounts = [
|
||||||
|
bound.bind_mount
|
||||||
|
for bound in self.sys_mounts.bound_mounts
|
||||||
|
if bound.bind_mount.local_where
|
||||||
|
and bound.bind_mount.local_where.is_relative_to(origin_dir)
|
||||||
|
]
|
||||||
|
if bind_mounts:
|
||||||
|
await asyncio.gather(*[bind_mount.unmount() for bind_mount in bind_mounts])
|
||||||
|
|
||||||
|
# Clean old stuff
|
||||||
|
if origin_dir.is_dir():
|
||||||
|
await remove_folder(origin_dir, content_only=True)
|
||||||
|
|
||||||
|
# Perform a restore
|
||||||
|
def _restore() -> bool:
|
||||||
try:
|
try:
|
||||||
await _folder_save(folder)
|
_LOGGER.info("Restore folder %s", name)
|
||||||
|
with SecureTarFile(
|
||||||
|
tar_name,
|
||||||
|
"r",
|
||||||
|
key=self._key,
|
||||||
|
gzip=self.compressed,
|
||||||
|
bufsize=BUF_SIZE,
|
||||||
|
) as tar_file:
|
||||||
|
tar_file.extractall(
|
||||||
|
path=origin_dir, members=tar_file, filter="fully_trusted"
|
||||||
|
)
|
||||||
|
_LOGGER.info("Restore folder %s done", name)
|
||||||
except (tarfile.TarError, OSError) as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
raise BackupError(
|
raise BackupError(
|
||||||
f"Can't backup folder {folder}: {str(err)}", _LOGGER.error
|
f"Can't restore folder {name}: {err}", _LOGGER.warning
|
||||||
) from err
|
) from err
|
||||||
|
return True
|
||||||
|
|
||||||
async def restore_folders(self, folder_list: list[str]):
|
try:
|
||||||
|
return await self.sys_run_in_executor(_restore)
|
||||||
|
finally:
|
||||||
|
if bind_mounts:
|
||||||
|
await asyncio.gather(
|
||||||
|
*[bind_mount.mount() for bind_mount in bind_mounts]
|
||||||
|
)
|
||||||
|
|
||||||
|
@Job(name="backup_restore_folders", cleanup=False)
|
||||||
|
async def restore_folders(self, folder_list: list[str]) -> bool:
|
||||||
"""Backup Supervisor data into backup."""
|
"""Backup Supervisor data into backup."""
|
||||||
|
success = True
|
||||||
|
|
||||||
async def _folder_restore(name: str) -> None:
|
async def _folder_restore(name: str) -> bool:
|
||||||
"""Intenal function to restore a folder."""
|
"""Intenal function to restore a folder."""
|
||||||
slug_name = name.replace("/", "_")
|
slug_name = name.replace("/", "_")
|
||||||
tar_name = Path(
|
tar_name = Path(
|
||||||
@@ -488,14 +645,26 @@ class Backup(CoreSysAttributes):
|
|||||||
# Check if exists inside backup
|
# Check if exists inside backup
|
||||||
if not tar_name.exists():
|
if not tar_name.exists():
|
||||||
_LOGGER.warning("Can't find restore folder %s", name)
|
_LOGGER.warning("Can't find restore folder %s", name)
|
||||||
return
|
return False
|
||||||
|
|
||||||
|
# Unmount any mounts within folder
|
||||||
|
bind_mounts = [
|
||||||
|
bound.bind_mount
|
||||||
|
for bound in self.sys_mounts.bound_mounts
|
||||||
|
if bound.bind_mount.local_where
|
||||||
|
and bound.bind_mount.local_where.is_relative_to(origin_dir)
|
||||||
|
]
|
||||||
|
if bind_mounts:
|
||||||
|
await asyncio.gather(
|
||||||
|
*[bind_mount.unmount() for bind_mount in bind_mounts]
|
||||||
|
)
|
||||||
|
|
||||||
# Clean old stuff
|
# Clean old stuff
|
||||||
if origin_dir.is_dir():
|
if origin_dir.is_dir():
|
||||||
await remove_folder(origin_dir, content_only=True)
|
await remove_folder(origin_dir, content_only=True)
|
||||||
|
|
||||||
# Perform a restore
|
# Perform a restore
|
||||||
def _restore() -> None:
|
def _restore() -> bool:
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Restore folder %s", name)
|
_LOGGER.info("Restore folder %s", name)
|
||||||
with SecureTarFile(
|
with SecureTarFile(
|
||||||
@@ -505,40 +674,56 @@ class Backup(CoreSysAttributes):
|
|||||||
gzip=self.compressed,
|
gzip=self.compressed,
|
||||||
bufsize=BUF_SIZE,
|
bufsize=BUF_SIZE,
|
||||||
) as tar_file:
|
) as tar_file:
|
||||||
tar_file.extractall(path=origin_dir, members=tar_file)
|
tar_file.extractall(
|
||||||
|
path=origin_dir, members=tar_file, filter="fully_trusted"
|
||||||
|
)
|
||||||
_LOGGER.info("Restore folder %s done", name)
|
_LOGGER.info("Restore folder %s done", name)
|
||||||
except (tarfile.TarError, OSError) as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
await self.sys_run_in_executor(_restore)
|
try:
|
||||||
|
return await self.sys_run_in_executor(_restore)
|
||||||
|
finally:
|
||||||
|
if bind_mounts:
|
||||||
|
await asyncio.gather(
|
||||||
|
*[bind_mount.mount() for bind_mount in bind_mounts]
|
||||||
|
)
|
||||||
|
|
||||||
# Restore folder sequential
|
# Restore folder sequential avoid issue on slow IO
|
||||||
# avoid issue on slow IO
|
|
||||||
for folder in folder_list:
|
for folder in folder_list:
|
||||||
try:
|
try:
|
||||||
await _folder_restore(folder)
|
await self._folder_restore(folder)
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.warning("Can't restore folder %s: %s", folder, err)
|
_LOGGER.warning("Can't restore folder %s: %s", folder, err)
|
||||||
|
success = False
|
||||||
|
return success
|
||||||
|
|
||||||
async def store_homeassistant(self):
|
@Job(name="backup_store_homeassistant", cleanup=False)
|
||||||
"""Backup Home Assitant Core configuration folder."""
|
async def store_homeassistant(self, exclude_database: bool = False):
|
||||||
self._data[ATTR_HOMEASSISTANT] = {ATTR_VERSION: self.sys_homeassistant.version}
|
"""Backup Home Assistant Core configuration folder."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT] = {
|
||||||
|
ATTR_VERSION: self.sys_homeassistant.version,
|
||||||
|
ATTR_EXCLUDE_DATABASE: exclude_database,
|
||||||
|
}
|
||||||
|
|
||||||
|
tar_name = f"homeassistant.tar{'.gz' if self.compressed else ''}"
|
||||||
# Backup Home Assistant Core config directory
|
# Backup Home Assistant Core config directory
|
||||||
tar_name = Path(
|
homeassistant_file = self._outer_secure_tarfile.create_inner_tar(
|
||||||
self._tmp.name, f"homeassistant.tar{'.gz' if self.compressed else ''}"
|
f"./{tar_name}",
|
||||||
)
|
gzip=self.compressed,
|
||||||
homeassistant_file = SecureTarFile(
|
key=self._key,
|
||||||
tar_name, "w", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.sys_homeassistant.backup(homeassistant_file)
|
await self.sys_homeassistant.backup(homeassistant_file, exclude_database)
|
||||||
|
|
||||||
# Store size
|
# Store size
|
||||||
self.homeassistant[ATTR_SIZE] = homeassistant_file.size
|
self.homeassistant[ATTR_SIZE] = homeassistant_file.size
|
||||||
|
|
||||||
|
@Job(name="backup_restore_homeassistant", cleanup=False)
|
||||||
async def restore_homeassistant(self) -> Awaitable[None]:
|
async def restore_homeassistant(self) -> Awaitable[None]:
|
||||||
"""Restore Home Assitant Core configuration folder."""
|
"""Restore Home Assistant Core configuration folder."""
|
||||||
await self.sys_homeassistant.core.stop()
|
await self.sys_homeassistant.core.stop()
|
||||||
|
|
||||||
# Restore Home Assistant Core config directory
|
# Restore Home Assistant Core config directory
|
||||||
@@ -549,7 +734,9 @@ class Backup(CoreSysAttributes):
|
|||||||
tar_name, "r", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE
|
tar_name, "r", key=self._key, gzip=self.compressed, bufsize=BUF_SIZE
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.sys_homeassistant.restore(homeassistant_file)
|
await self.sys_homeassistant.restore(
|
||||||
|
homeassistant_file, self.homeassistant_exclude_database
|
||||||
|
)
|
||||||
|
|
||||||
# Generate restore task
|
# Generate restore task
|
||||||
async def _core_update():
|
async def _core_update():
|
||||||
@@ -568,16 +755,16 @@ class Backup(CoreSysAttributes):
|
|||||||
|
|
||||||
return self.sys_create_task(_core_update())
|
return self.sys_create_task(_core_update())
|
||||||
|
|
||||||
def store_repositories(self):
|
def store_repositories(self) -> None:
|
||||||
"""Store repository list into backup."""
|
"""Store repository list into backup."""
|
||||||
self.repositories = self.sys_store.repository_urls
|
self.repositories = self.sys_store.repository_urls
|
||||||
|
|
||||||
async def restore_repositories(self, replace: bool = False):
|
def restore_repositories(self, replace: bool = False) -> Awaitable[None]:
|
||||||
"""Restore repositories from backup.
|
"""Restore repositories from backup.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
await self.sys_store.update_repositories(
|
return self.sys_store.update_repositories(
|
||||||
self.repositories, add_with_errors=True, replace=replace
|
self.repositories, add_with_errors=True, replace=replace
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,39 @@
|
|||||||
"""Backup consts."""
|
"""Backup consts."""
|
||||||
from enum import Enum
|
|
||||||
|
from enum import StrEnum
|
||||||
|
|
||||||
BUF_SIZE = 2**20 * 4 # 4MB
|
BUF_SIZE = 2**20 * 4 # 4MB
|
||||||
|
DEFAULT_FREEZE_TIMEOUT = 600
|
||||||
|
|
||||||
|
|
||||||
class BackupType(str, Enum):
|
class BackupType(StrEnum):
|
||||||
"""Backup type enum."""
|
"""Backup type enum."""
|
||||||
|
|
||||||
FULL = "full"
|
FULL = "full"
|
||||||
PARTIAL = "partial"
|
PARTIAL = "partial"
|
||||||
|
|
||||||
|
|
||||||
|
class BackupJobStage(StrEnum):
|
||||||
|
"""Backup job stage enum."""
|
||||||
|
|
||||||
|
ADDON_REPOSITORIES = "addon_repositories"
|
||||||
|
ADDONS = "addons"
|
||||||
|
DOCKER_CONFIG = "docker_config"
|
||||||
|
FINISHING_FILE = "finishing_file"
|
||||||
|
FOLDERS = "folders"
|
||||||
|
HOME_ASSISTANT = "home_assistant"
|
||||||
|
AWAIT_ADDON_RESTARTS = "await_addon_restarts"
|
||||||
|
|
||||||
|
|
||||||
|
class RestoreJobStage(StrEnum):
|
||||||
|
"""Restore job stage enum."""
|
||||||
|
|
||||||
|
ADDON_REPOSITORIES = "addon_repositories"
|
||||||
|
ADDONS = "addons"
|
||||||
|
AWAIT_ADDON_RESTARTS = "await_addon_restarts"
|
||||||
|
AWAIT_HOME_ASSISTANT_RESTART = "await_home_assistant_restart"
|
||||||
|
CHECK_HOME_ASSISTANT = "check_home_assistant"
|
||||||
|
DOCKER_CONFIG = "docker_config"
|
||||||
|
FOLDERS = "folders"
|
||||||
|
HOME_ASSISTANT = "home_assistant"
|
||||||
|
REMOVE_DELTA_ADDONS = "remove_delta_addons"
|
||||||
|
|||||||
@@ -1,56 +1,59 @@
|
|||||||
"""Backup manager."""
|
"""Backup manager."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable, Iterable
|
from collections.abc import Awaitable, Iterable
|
||||||
|
import errno
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
from ..const import (
|
from ..const import (
|
||||||
|
ATTR_DATA,
|
||||||
ATTR_DAYS_UNTIL_STALE,
|
ATTR_DAYS_UNTIL_STALE,
|
||||||
|
ATTR_SLUG,
|
||||||
|
ATTR_TYPE,
|
||||||
FILE_HASSIO_BACKUPS,
|
FILE_HASSIO_BACKUPS,
|
||||||
FOLDER_HOMEASSISTANT,
|
FOLDER_HOMEASSISTANT,
|
||||||
CoreState,
|
CoreState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from ..dbus.const import UnitActiveState
|
from ..dbus.const import UnitActiveState
|
||||||
from ..exceptions import AddonsError
|
from ..exceptions import (
|
||||||
from ..jobs.decorator import Job, JobCondition
|
BackupError,
|
||||||
|
BackupInvalidError,
|
||||||
|
BackupJobError,
|
||||||
|
BackupMountDownError,
|
||||||
|
HomeAssistantWSError,
|
||||||
|
)
|
||||||
|
from ..homeassistant.const import WSType
|
||||||
|
from ..jobs.const import JOB_GROUP_BACKUP_MANAGER, JobCondition, JobExecutionLimit
|
||||||
|
from ..jobs.decorator import Job
|
||||||
|
from ..jobs.job_group import JobGroup
|
||||||
from ..mounts.mount import Mount
|
from ..mounts.mount import Mount
|
||||||
|
from ..resolution.const import UnhealthyReason
|
||||||
from ..utils.common import FileConfiguration
|
from ..utils.common import FileConfiguration
|
||||||
from ..utils.dt import utcnow
|
from ..utils.dt import utcnow
|
||||||
from ..utils.sentinel import DEFAULT
|
from ..utils.sentinel import DEFAULT
|
||||||
from ..utils.sentry import capture_exception
|
from ..utils.sentry import capture_exception
|
||||||
from .backup import Backup
|
from .backup import Backup
|
||||||
from .const import BackupType
|
from .const import DEFAULT_FREEZE_TIMEOUT, BackupJobStage, BackupType, RestoreJobStage
|
||||||
from .utils import create_slug
|
from .utils import create_slug
|
||||||
from .validate import ALL_FOLDERS, SCHEMA_BACKUPS_CONFIG
|
from .validate import ALL_FOLDERS, SCHEMA_BACKUPS_CONFIG
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _list_backup_files(path: Path) -> Iterable[Path]:
|
class BackupManager(FileConfiguration, JobGroup):
|
||||||
"""Return iterable of backup files, suppress and log OSError for network mounts."""
|
|
||||||
try:
|
|
||||||
# is_dir does a stat syscall which raises if the mount is down
|
|
||||||
if path.is_dir():
|
|
||||||
return path.glob("*.tar")
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err)
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
class BackupManager(FileConfiguration, CoreSysAttributes):
|
|
||||||
"""Manage backups."""
|
"""Manage backups."""
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
"""Initialize a backup manager."""
|
"""Initialize a backup manager."""
|
||||||
super().__init__(FILE_HASSIO_BACKUPS, SCHEMA_BACKUPS_CONFIG)
|
super().__init__(FILE_HASSIO_BACKUPS, SCHEMA_BACKUPS_CONFIG)
|
||||||
self.coresys = coresys
|
super(FileConfiguration, self).__init__(coresys, JOB_GROUP_BACKUP_MANAGER)
|
||||||
self._backups = {}
|
self._backups: dict[str, Backup] = {}
|
||||||
self.lock = asyncio.Lock()
|
self._thaw_task: Awaitable[None] | None = None
|
||||||
|
self._thaw_event: asyncio.Event = asyncio.Event()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_backups(self) -> set[Backup]:
|
def list_backups(self) -> set[Backup]:
|
||||||
@@ -76,20 +79,64 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
if mount.state == UnitActiveState.ACTIVE
|
if mount.state == UnitActiveState.ACTIVE
|
||||||
]
|
]
|
||||||
|
|
||||||
def get(self, slug):
|
def get(self, slug: str) -> Backup:
|
||||||
"""Return backup object."""
|
"""Return backup object."""
|
||||||
return self._backups.get(slug)
|
return self._backups.get(slug)
|
||||||
|
|
||||||
def _get_base_path(self, location: Mount | type[DEFAULT] | None = DEFAULT) -> Path:
|
def _get_base_path(self, location: Mount | type[DEFAULT] | None = DEFAULT) -> Path:
|
||||||
"""Get base path for backup using location or default location."""
|
"""Get base path for backup using location or default location."""
|
||||||
|
if location == DEFAULT and self.sys_mounts.default_backup_mount:
|
||||||
|
location = self.sys_mounts.default_backup_mount
|
||||||
|
|
||||||
if location:
|
if location:
|
||||||
|
if not location.local_where.is_mount():
|
||||||
|
raise BackupMountDownError(
|
||||||
|
f"{location.name} is down, cannot back-up to it", _LOGGER.error
|
||||||
|
)
|
||||||
return location.local_where
|
return location.local_where
|
||||||
|
|
||||||
if location == DEFAULT and self.sys_mounts.default_backup_mount:
|
|
||||||
return self.sys_mounts.default_backup_mount.local_where
|
|
||||||
|
|
||||||
return self.sys_config.path_backup
|
return self.sys_config.path_backup
|
||||||
|
|
||||||
|
def _change_stage(
|
||||||
|
self,
|
||||||
|
stage: BackupJobStage | RestoreJobStage,
|
||||||
|
backup: Backup | None = None,
|
||||||
|
):
|
||||||
|
"""Change the stage of the current job during backup/restore.
|
||||||
|
|
||||||
|
Must be called from an existing backup/restore job.
|
||||||
|
"""
|
||||||
|
job_name = self.sys_jobs.current.name
|
||||||
|
if "restore" in job_name:
|
||||||
|
action = "Restore"
|
||||||
|
elif "freeze" in job_name:
|
||||||
|
action = "Freeze"
|
||||||
|
elif "thaw" in job_name:
|
||||||
|
action = "Thaw"
|
||||||
|
else:
|
||||||
|
action = "Backup"
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"%s %sstarting stage %s",
|
||||||
|
action,
|
||||||
|
f"{backup.slug} " if backup else "",
|
||||||
|
stage,
|
||||||
|
)
|
||||||
|
self.sys_jobs.current.stage = stage
|
||||||
|
|
||||||
|
def _list_backup_files(self, path: Path) -> Iterable[Path]:
|
||||||
|
"""Return iterable of backup files, suppress and log OSError for network mounts."""
|
||||||
|
try:
|
||||||
|
# is_dir does a stat syscall which raises if the mount is down
|
||||||
|
if path.is_dir():
|
||||||
|
return path.glob("*.tar")
|
||||||
|
except OSError as err:
|
||||||
|
if err.errno == errno.EBADMSG and path == self.sys_config.path_backup:
|
||||||
|
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
|
_LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err)
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
def _create_backup(
|
def _create_backup(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
@@ -98,52 +145,58 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
compressed: bool = True,
|
compressed: bool = True,
|
||||||
location: Mount | type[DEFAULT] | None = DEFAULT,
|
location: Mount | type[DEFAULT] | None = DEFAULT,
|
||||||
) -> Backup:
|
) -> Backup:
|
||||||
"""Initialize a new backup object from name."""
|
"""Initialize a new backup object from name.
|
||||||
|
|
||||||
|
Must be called from an existing backup job.
|
||||||
|
"""
|
||||||
date_str = utcnow().isoformat()
|
date_str = utcnow().isoformat()
|
||||||
slug = create_slug(name, date_str)
|
slug = create_slug(name, date_str)
|
||||||
tar_file = Path(self._get_base_path(location), f"{slug}.tar")
|
tar_file = Path(self._get_base_path(location), f"{slug}.tar")
|
||||||
|
|
||||||
# init object
|
# init object
|
||||||
backup = Backup(self.coresys, tar_file)
|
backup = Backup(self.coresys, tar_file, slug)
|
||||||
backup.new(slug, name, date_str, sys_type, password, compressed)
|
backup.new(name, date_str, sys_type, password, compressed)
|
||||||
|
|
||||||
backup.store_repositories()
|
|
||||||
backup.store_dockerconfig()
|
|
||||||
|
|
||||||
# Add backup ID to job
|
# Add backup ID to job
|
||||||
if job := self.sys_jobs.get_job():
|
self.sys_jobs.current.reference = backup.slug
|
||||||
job.reference = backup.slug
|
|
||||||
|
self._change_stage(BackupJobStage.ADDON_REPOSITORIES, backup)
|
||||||
|
backup.store_repositories()
|
||||||
|
self._change_stage(BackupJobStage.DOCKER_CONFIG, backup)
|
||||||
|
backup.store_dockerconfig()
|
||||||
|
|
||||||
return backup
|
return backup
|
||||||
|
|
||||||
def load(self):
|
def load(self) -> Awaitable[None]:
|
||||||
"""Load exists backups data.
|
"""Load exists backups data.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self.reload()
|
return self.reload()
|
||||||
|
|
||||||
async def reload(self):
|
async def reload(self) -> None:
|
||||||
"""Load exists backups."""
|
"""Load exists backups."""
|
||||||
self._backups = {}
|
self._backups = {}
|
||||||
|
|
||||||
async def _load_backup(tar_file):
|
async def _load_backup(tar_file):
|
||||||
"""Load the backup."""
|
"""Load the backup."""
|
||||||
backup = Backup(self.coresys, tar_file)
|
backup = Backup(self.coresys, tar_file, "temp")
|
||||||
if await backup.load():
|
if await backup.load():
|
||||||
self._backups[backup.slug] = backup
|
self._backups[backup.slug] = Backup(
|
||||||
|
self.coresys, tar_file, backup.slug, backup.data
|
||||||
|
)
|
||||||
|
|
||||||
tasks = [
|
tasks = [
|
||||||
self.sys_create_task(_load_backup(tar_file))
|
self.sys_create_task(_load_backup(tar_file))
|
||||||
for path in self.backup_locations
|
for path in self.backup_locations
|
||||||
for tar_file in _list_backup_files(path)
|
for tar_file in self._list_backup_files(path)
|
||||||
]
|
]
|
||||||
|
|
||||||
_LOGGER.info("Found %d backup files", len(tasks))
|
_LOGGER.info("Found %d backup files", len(tasks))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.wait(tasks)
|
||||||
|
|
||||||
def remove(self, backup):
|
def remove(self, backup: Backup) -> bool:
|
||||||
"""Remove a backup."""
|
"""Remove a backup."""
|
||||||
try:
|
try:
|
||||||
backup.tarfile.unlink()
|
backup.tarfile.unlink()
|
||||||
@@ -151,14 +204,19 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
_LOGGER.info("Removed backup file %s", backup.slug)
|
_LOGGER.info("Removed backup file %s", backup.slug)
|
||||||
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
|
if (
|
||||||
|
err.errno == errno.EBADMSG
|
||||||
|
and backup.tarfile.parent == self.sys_config.path_backup
|
||||||
|
):
|
||||||
|
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
_LOGGER.error("Can't remove backup %s: %s", backup.slug, err)
|
_LOGGER.error("Can't remove backup %s: %s", backup.slug, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def import_backup(self, tar_file):
|
async def import_backup(self, tar_file: Path) -> Backup | None:
|
||||||
"""Check backup tarfile and import it."""
|
"""Check backup tarfile and import it."""
|
||||||
backup = Backup(self.coresys, tar_file)
|
backup = Backup(self.coresys, tar_file, "temp")
|
||||||
|
|
||||||
# Read meta data
|
# Read meta data
|
||||||
if not await backup.load():
|
if not await backup.load():
|
||||||
@@ -175,11 +233,13 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
backup.tarfile.rename(tar_origin)
|
backup.tarfile.rename(tar_origin)
|
||||||
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
|
if err.errno == errno.EBADMSG:
|
||||||
|
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
_LOGGER.error("Can't move backup file to storage: %s", err)
|
_LOGGER.error("Can't move backup file to storage: %s", err)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Load new backup
|
# Load new backup
|
||||||
backup = Backup(self.coresys, tar_origin)
|
backup = Backup(self.coresys, tar_origin, backup.slug, backup.data)
|
||||||
if not await backup.load():
|
if not await backup.load():
|
||||||
return None
|
return None
|
||||||
_LOGGER.info("Successfully imported %s", backup.slug)
|
_LOGGER.info("Successfully imported %s", backup.slug)
|
||||||
@@ -193,73 +253,116 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
addon_list: list[Addon],
|
addon_list: list[Addon],
|
||||||
folder_list: list[str],
|
folder_list: list[str],
|
||||||
homeassistant: bool,
|
homeassistant: bool,
|
||||||
):
|
homeassistant_exclude_database: bool | None,
|
||||||
|
) -> Backup | None:
|
||||||
|
"""Create a backup.
|
||||||
|
|
||||||
|
Must be called from an existing backup job.
|
||||||
|
"""
|
||||||
addon_start_tasks: list[Awaitable[None]] | None = None
|
addon_start_tasks: list[Awaitable[None]] | None = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.sys_core.state = CoreState.FREEZE
|
self.sys_core.state = CoreState.FREEZE
|
||||||
|
|
||||||
async with backup:
|
async with backup:
|
||||||
# Backup add-ons
|
|
||||||
if addon_list:
|
|
||||||
_LOGGER.info("Backing up %s store Add-ons", backup.slug)
|
|
||||||
addon_start_tasks = await backup.store_addons(addon_list)
|
|
||||||
|
|
||||||
# HomeAssistant Folder is for v1
|
# HomeAssistant Folder is for v1
|
||||||
if homeassistant:
|
if homeassistant:
|
||||||
await backup.store_homeassistant()
|
self._change_stage(BackupJobStage.HOME_ASSISTANT, backup)
|
||||||
|
await backup.store_homeassistant(
|
||||||
|
self.sys_homeassistant.backups_exclude_database
|
||||||
|
if homeassistant_exclude_database is None
|
||||||
|
else homeassistant_exclude_database
|
||||||
|
)
|
||||||
|
|
||||||
|
# Backup add-ons
|
||||||
|
if addon_list:
|
||||||
|
self._change_stage(BackupJobStage.ADDONS, backup)
|
||||||
|
addon_start_tasks = await backup.store_addons(addon_list)
|
||||||
|
|
||||||
# Backup folders
|
# Backup folders
|
||||||
if folder_list:
|
if folder_list:
|
||||||
_LOGGER.info("Backing up %s store folders", backup.slug)
|
self._change_stage(BackupJobStage.FOLDERS, backup)
|
||||||
await backup.store_folders(folder_list)
|
await backup.store_folders(folder_list)
|
||||||
|
|
||||||
|
self._change_stage(BackupJobStage.FINISHING_FILE, backup)
|
||||||
|
|
||||||
|
except BackupError as err:
|
||||||
|
self.sys_jobs.current.capture_error(err)
|
||||||
|
return None
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Backup %s error", backup.slug)
|
_LOGGER.exception("Backup %s error", backup.slug)
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
|
self.sys_jobs.current.capture_error(
|
||||||
|
BackupError(f"Backup {backup.slug} error, see supervisor logs")
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
self._backups[backup.slug] = backup
|
self._backups[backup.slug] = backup
|
||||||
|
|
||||||
if addon_start_tasks:
|
if addon_start_tasks:
|
||||||
|
self._change_stage(BackupJobStage.AWAIT_ADDON_RESTARTS, backup)
|
||||||
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
||||||
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
|
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.sys_homeassistant.websocket.async_send_command(
|
||||||
|
{
|
||||||
|
ATTR_TYPE: WSType.BACKUP_SYNC,
|
||||||
|
ATTR_DATA: {
|
||||||
|
ATTR_SLUG: backup.slug,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except HomeAssistantWSError as err:
|
||||||
|
_LOGGER.error("Can't send backup sync to Home Assistant: %s", err)
|
||||||
|
|
||||||
return backup
|
return backup
|
||||||
finally:
|
finally:
|
||||||
self.sys_core.state = CoreState.RUNNING
|
self.sys_core.state = CoreState.RUNNING
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="backup_manager_full_backup",
|
name="backup_manager_full_backup",
|
||||||
conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING],
|
conditions=[JobCondition.RUNNING],
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=BackupJobError,
|
||||||
|
cleanup=False,
|
||||||
)
|
)
|
||||||
async def do_backup_full(
|
async def do_backup_full(
|
||||||
self,
|
self,
|
||||||
name="",
|
name: str = "",
|
||||||
password=None,
|
password: str | None = None,
|
||||||
compressed=True,
|
compressed: bool = True,
|
||||||
location: Mount | type[DEFAULT] | None = DEFAULT,
|
location: Mount | type[DEFAULT] | None = DEFAULT,
|
||||||
):
|
homeassistant_exclude_database: bool | None = None,
|
||||||
|
) -> Backup | None:
|
||||||
"""Create a full backup."""
|
"""Create a full backup."""
|
||||||
if self.lock.locked():
|
if self._get_base_path(location) == self.sys_config.path_backup:
|
||||||
_LOGGER.error("A backup/restore process is already running")
|
await Job.check_conditions(
|
||||||
return None
|
self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_full"
|
||||||
|
)
|
||||||
|
|
||||||
backup = self._create_backup(
|
backup = self._create_backup(
|
||||||
name, BackupType.FULL, password, compressed, location
|
name, BackupType.FULL, password, compressed, location
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.info("Creating new full backup with slug %s", backup.slug)
|
_LOGGER.info("Creating new full backup with slug %s", backup.slug)
|
||||||
async with self.lock:
|
backup = await self._do_backup(
|
||||||
backup = await self._do_backup(
|
backup,
|
||||||
backup, self.sys_addons.installed, ALL_FOLDERS, True
|
self.sys_addons.installed,
|
||||||
)
|
ALL_FOLDERS,
|
||||||
if backup:
|
True,
|
||||||
_LOGGER.info("Creating full backup with slug %s completed", backup.slug)
|
homeassistant_exclude_database,
|
||||||
return backup
|
)
|
||||||
|
if backup:
|
||||||
|
_LOGGER.info("Creating full backup with slug %s completed", backup.slug)
|
||||||
|
return backup
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="backup_manager_partial_backup",
|
name="backup_manager_partial_backup",
|
||||||
conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING],
|
conditions=[JobCondition.RUNNING],
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=BackupJobError,
|
||||||
|
cleanup=False,
|
||||||
)
|
)
|
||||||
async def do_backup_partial(
|
async def do_backup_partial(
|
||||||
self,
|
self,
|
||||||
@@ -270,11 +373,13 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
homeassistant: bool = False,
|
homeassistant: bool = False,
|
||||||
compressed: bool = True,
|
compressed: bool = True,
|
||||||
location: Mount | type[DEFAULT] | None = DEFAULT,
|
location: Mount | type[DEFAULT] | None = DEFAULT,
|
||||||
):
|
homeassistant_exclude_database: bool | None = None,
|
||||||
|
) -> Backup | None:
|
||||||
"""Create a partial backup."""
|
"""Create a partial backup."""
|
||||||
if self.lock.locked():
|
if self._get_base_path(location) == self.sys_config.path_backup:
|
||||||
_LOGGER.error("A backup/restore process is already running")
|
await Job.check_conditions(
|
||||||
return None
|
self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_partial"
|
||||||
|
)
|
||||||
|
|
||||||
addons = addons or []
|
addons = addons or []
|
||||||
folders = folders or []
|
folders = folders or []
|
||||||
@@ -292,21 +397,20 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.info("Creating new partial backup with slug %s", backup.slug)
|
_LOGGER.info("Creating new partial backup with slug %s", backup.slug)
|
||||||
async with self.lock:
|
addon_list = []
|
||||||
addon_list = []
|
for addon_slug in addons:
|
||||||
for addon_slug in addons:
|
addon = self.sys_addons.get(addon_slug)
|
||||||
addon = self.sys_addons.get(addon_slug)
|
if addon and addon.is_installed:
|
||||||
if addon and addon.is_installed:
|
addon_list.append(addon)
|
||||||
addon_list.append(addon)
|
continue
|
||||||
continue
|
_LOGGER.warning("Add-on %s not found/installed", addon_slug)
|
||||||
_LOGGER.warning("Add-on %s not found/installed", addon_slug)
|
|
||||||
|
|
||||||
backup = await self._do_backup(backup, addon_list, folders, homeassistant)
|
backup = await self._do_backup(
|
||||||
if backup:
|
backup, addon_list, folders, homeassistant, homeassistant_exclude_database
|
||||||
_LOGGER.info(
|
)
|
||||||
"Creating partial backup with slug %s completed", backup.slug
|
if backup:
|
||||||
)
|
_LOGGER.info("Creating partial backup with slug %s completed", backup.slug)
|
||||||
return backup
|
return backup
|
||||||
|
|
||||||
async def _do_restore(
|
async def _do_restore(
|
||||||
self,
|
self,
|
||||||
@@ -315,70 +419,87 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
folder_list: list[str],
|
folder_list: list[str],
|
||||||
homeassistant: bool,
|
homeassistant: bool,
|
||||||
replace: bool,
|
replace: bool,
|
||||||
):
|
) -> bool:
|
||||||
|
"""Restore from a backup.
|
||||||
|
|
||||||
|
Must be called from an existing restore job.
|
||||||
|
"""
|
||||||
addon_start_tasks: list[Awaitable[None]] | None = None
|
addon_start_tasks: list[Awaitable[None]] | None = None
|
||||||
|
success = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
task_hass: asyncio.Task | None = None
|
task_hass: asyncio.Task | None = None
|
||||||
async with backup:
|
async with backup:
|
||||||
# Restore docker config
|
# Restore docker config
|
||||||
_LOGGER.info("Restoring %s Docker config", backup.slug)
|
self._change_stage(RestoreJobStage.DOCKER_CONFIG, backup)
|
||||||
backup.restore_dockerconfig(replace)
|
backup.restore_dockerconfig(replace)
|
||||||
|
|
||||||
# Process folders
|
# Process folders
|
||||||
if folder_list:
|
if folder_list:
|
||||||
_LOGGER.info("Restoring %s folders", backup.slug)
|
self._change_stage(RestoreJobStage.FOLDERS, backup)
|
||||||
await backup.restore_folders(folder_list)
|
success = await backup.restore_folders(folder_list)
|
||||||
|
|
||||||
# Process Home-Assistant
|
# Process Home-Assistant
|
||||||
if homeassistant:
|
if homeassistant:
|
||||||
_LOGGER.info("Restoring %s Home Assistant Core", backup.slug)
|
self._change_stage(RestoreJobStage.HOME_ASSISTANT, backup)
|
||||||
task_hass = await backup.restore_homeassistant()
|
task_hass = await backup.restore_homeassistant()
|
||||||
|
|
||||||
# Delete delta add-ons
|
# Delete delta add-ons
|
||||||
if replace:
|
if replace:
|
||||||
_LOGGER.info("Removing Add-ons not in the backup %s", backup.slug)
|
self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup)
|
||||||
for addon in self.sys_addons.installed:
|
success = success and await backup.remove_delta_addons()
|
||||||
if addon.slug in backup.addon_list:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Remove Add-on because it's not a part of the new env
|
|
||||||
# Do it sequential avoid issue on slow IO
|
|
||||||
try:
|
|
||||||
await addon.uninstall()
|
|
||||||
except AddonsError:
|
|
||||||
_LOGGER.warning("Can't uninstall Add-on %s", addon.slug)
|
|
||||||
|
|
||||||
if addon_list:
|
if addon_list:
|
||||||
_LOGGER.info("Restoring %s Repositories", backup.slug)
|
self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup)
|
||||||
await backup.restore_repositories(replace)
|
await backup.restore_repositories(replace)
|
||||||
|
|
||||||
_LOGGER.info("Restoring %s Add-ons", backup.slug)
|
self._change_stage(RestoreJobStage.ADDONS, backup)
|
||||||
addon_start_tasks = await backup.restore_addons(addon_list)
|
restore_success, addon_start_tasks = await backup.restore_addons(
|
||||||
|
addon_list
|
||||||
|
)
|
||||||
|
success = success and restore_success
|
||||||
|
|
||||||
# Wait for Home Assistant Core update/downgrade
|
# Wait for Home Assistant Core update/downgrade
|
||||||
if task_hass:
|
if task_hass:
|
||||||
_LOGGER.info("Restore %s wait for Home-Assistant", backup.slug)
|
self._change_stage(
|
||||||
|
RestoreJobStage.AWAIT_HOME_ASSISTANT_RESTART, backup
|
||||||
|
)
|
||||||
await task_hass
|
await task_hass
|
||||||
|
except BackupError:
|
||||||
|
raise
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Restore %s error", backup.slug)
|
_LOGGER.exception("Restore %s error", backup.slug)
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
return False
|
raise BackupError(
|
||||||
|
f"Restore {backup.slug} error, see supervisor logs"
|
||||||
|
) from err
|
||||||
else:
|
else:
|
||||||
if addon_start_tasks:
|
if addon_start_tasks:
|
||||||
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
self._change_stage(RestoreJobStage.AWAIT_ADDON_RESTARTS, backup)
|
||||||
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
|
# Failure to resume addons post restore is still a restore failure
|
||||||
|
if any(
|
||||||
|
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return success
|
||||||
finally:
|
finally:
|
||||||
# Do we need start Home Assistant Core?
|
# Leave Home Assistant alone if it wasn't part of the restore
|
||||||
if not await self.sys_homeassistant.core.is_running():
|
if homeassistant:
|
||||||
await self.sys_homeassistant.core.start()
|
self._change_stage(RestoreJobStage.CHECK_HOME_ASSISTANT, backup)
|
||||||
|
|
||||||
# Check If we can access to API / otherwise restart
|
# Do we need start Home Assistant Core?
|
||||||
if not await self.sys_homeassistant.api.check_api_state():
|
if not await self.sys_homeassistant.core.is_running():
|
||||||
_LOGGER.warning("Need restart HomeAssistant for API")
|
await self.sys_homeassistant.core.start(
|
||||||
await self.sys_homeassistant.core.restart()
|
_job_override__cleanup=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check If we can access to API / otherwise restart
|
||||||
|
if not await self.sys_homeassistant.api.check_api_state():
|
||||||
|
_LOGGER.warning("Need restart HomeAssistant for API")
|
||||||
|
await self.sys_homeassistant.core.restart(
|
||||||
|
_job_override__cleanup=False
|
||||||
|
)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="backup_manager_full_restore",
|
name="backup_manager_full_restore",
|
||||||
@@ -389,48 +510,50 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
JobCondition.INTERNET_SYSTEM,
|
JobCondition.INTERNET_SYSTEM,
|
||||||
JobCondition.RUNNING,
|
JobCondition.RUNNING,
|
||||||
],
|
],
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=BackupJobError,
|
||||||
|
cleanup=False,
|
||||||
)
|
)
|
||||||
async def do_restore_full(self, backup: Backup, password=None):
|
async def do_restore_full(
|
||||||
|
self, backup: Backup, password: str | None = None
|
||||||
|
) -> bool:
|
||||||
"""Restore a backup."""
|
"""Restore a backup."""
|
||||||
# Add backup ID to job
|
# Add backup ID to job
|
||||||
if job := self.sys_jobs.get_job():
|
self.sys_jobs.current.reference = backup.slug
|
||||||
job.reference = backup.slug
|
|
||||||
|
|
||||||
if self.lock.locked():
|
|
||||||
_LOGGER.error("A backup/restore process is already running")
|
|
||||||
return False
|
|
||||||
|
|
||||||
if backup.sys_type != BackupType.FULL:
|
if backup.sys_type != BackupType.FULL:
|
||||||
_LOGGER.error("%s is only a partial backup!", backup.slug)
|
raise BackupInvalidError(
|
||||||
return False
|
f"{backup.slug} is only a partial backup!", _LOGGER.error
|
||||||
|
)
|
||||||
|
|
||||||
if backup.protected and not backup.set_password(password):
|
if backup.protected and not backup.set_password(password):
|
||||||
_LOGGER.error("Invalid password for backup %s", backup.slug)
|
raise BackupInvalidError(
|
||||||
return False
|
f"Invalid password for backup {backup.slug}", _LOGGER.error
|
||||||
|
)
|
||||||
|
|
||||||
if backup.supervisor_version > self.sys_supervisor.version:
|
if backup.supervisor_version > self.sys_supervisor.version:
|
||||||
_LOGGER.error(
|
raise BackupInvalidError(
|
||||||
"Backup was made on supervisor version %s, can't restore on %s. Must update supervisor first.",
|
f"Backup was made on supervisor version {backup.supervisor_version}, "
|
||||||
backup.supervisor_version,
|
f"can't restore on {self.sys_supervisor.version}. Must update supervisor first.",
|
||||||
self.sys_supervisor.version,
|
_LOGGER.error,
|
||||||
)
|
)
|
||||||
return False
|
|
||||||
|
|
||||||
_LOGGER.info("Full-Restore %s start", backup.slug)
|
_LOGGER.info("Full-Restore %s start", backup.slug)
|
||||||
async with self.lock:
|
self.sys_core.state = CoreState.FREEZE
|
||||||
self.sys_core.state = CoreState.FREEZE
|
|
||||||
|
|
||||||
|
try:
|
||||||
# Stop Home-Assistant / Add-ons
|
# Stop Home-Assistant / Add-ons
|
||||||
await self.sys_core.shutdown()
|
await self.sys_core.shutdown()
|
||||||
|
|
||||||
success = await self._do_restore(
|
success = await self._do_restore(
|
||||||
backup, backup.addon_list, backup.folders, True, True
|
backup, backup.addon_list, backup.folders, True, True
|
||||||
)
|
)
|
||||||
|
finally:
|
||||||
self.sys_core.state = CoreState.RUNNING
|
self.sys_core.state = CoreState.RUNNING
|
||||||
|
|
||||||
if success:
|
if success:
|
||||||
_LOGGER.info("Full-Restore %s done", backup.slug)
|
_LOGGER.info("Full-Restore %s done", backup.slug)
|
||||||
|
return success
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="backup_manager_partial_restore",
|
name="backup_manager_partial_restore",
|
||||||
@@ -441,6 +564,9 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
JobCondition.INTERNET_SYSTEM,
|
JobCondition.INTERNET_SYSTEM,
|
||||||
JobCondition.RUNNING,
|
JobCondition.RUNNING,
|
||||||
],
|
],
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=BackupJobError,
|
||||||
|
cleanup=False,
|
||||||
)
|
)
|
||||||
async def do_restore_partial(
|
async def do_restore_partial(
|
||||||
self,
|
self,
|
||||||
@@ -449,15 +575,10 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
addons: list[str] | None = None,
|
addons: list[str] | None = None,
|
||||||
folders: list[Path] | None = None,
|
folders: list[Path] | None = None,
|
||||||
password: str | None = None,
|
password: str | None = None,
|
||||||
):
|
) -> bool:
|
||||||
"""Restore a backup."""
|
"""Restore a backup."""
|
||||||
# Add backup ID to job
|
# Add backup ID to job
|
||||||
if job := self.sys_jobs.get_job():
|
self.sys_jobs.current.reference = backup.slug
|
||||||
job.reference = backup.slug
|
|
||||||
|
|
||||||
if self.lock.locked():
|
|
||||||
_LOGGER.error("A backup/restore process is already running")
|
|
||||||
return False
|
|
||||||
|
|
||||||
addon_list = addons or []
|
addon_list = addons or []
|
||||||
folder_list = folders or []
|
folder_list = folders or []
|
||||||
@@ -468,30 +589,118 @@ class BackupManager(FileConfiguration, CoreSysAttributes):
|
|||||||
homeassistant = True
|
homeassistant = True
|
||||||
|
|
||||||
if backup.protected and not backup.set_password(password):
|
if backup.protected and not backup.set_password(password):
|
||||||
_LOGGER.error("Invalid password for backup %s", backup.slug)
|
raise BackupInvalidError(
|
||||||
return False
|
f"Invalid password for backup {backup.slug}", _LOGGER.error
|
||||||
|
)
|
||||||
|
|
||||||
if backup.homeassistant is None and homeassistant:
|
if backup.homeassistant is None and homeassistant:
|
||||||
_LOGGER.error("No Home Assistant Core data inside the backup")
|
raise BackupInvalidError(
|
||||||
return False
|
"No Home Assistant Core data inside the backup", _LOGGER.error
|
||||||
|
)
|
||||||
|
|
||||||
if backup.supervisor_version > self.sys_supervisor.version:
|
if backup.supervisor_version > self.sys_supervisor.version:
|
||||||
_LOGGER.error(
|
raise BackupInvalidError(
|
||||||
"Backup was made on supervisor version %s, can't restore on %s. Must update supervisor first.",
|
f"Backup was made on supervisor version {backup.supervisor_version}, "
|
||||||
backup.supervisor_version,
|
f"can't restore on {self.sys_supervisor.version}. Must update supervisor first.",
|
||||||
self.sys_supervisor.version,
|
_LOGGER.error,
|
||||||
)
|
)
|
||||||
return False
|
|
||||||
|
|
||||||
_LOGGER.info("Partial-Restore %s start", backup.slug)
|
_LOGGER.info("Partial-Restore %s start", backup.slug)
|
||||||
async with self.lock:
|
self.sys_core.state = CoreState.FREEZE
|
||||||
self.sys_core.state = CoreState.FREEZE
|
|
||||||
|
|
||||||
|
try:
|
||||||
success = await self._do_restore(
|
success = await self._do_restore(
|
||||||
backup, addon_list, folder_list, homeassistant, False
|
backup, addon_list, folder_list, homeassistant, False
|
||||||
)
|
)
|
||||||
|
finally:
|
||||||
self.sys_core.state = CoreState.RUNNING
|
self.sys_core.state = CoreState.RUNNING
|
||||||
|
|
||||||
if success:
|
if success:
|
||||||
_LOGGER.info("Partial-Restore %s done", backup.slug)
|
_LOGGER.info("Partial-Restore %s done", backup.slug)
|
||||||
|
return success
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="backup_manager_freeze_all",
|
||||||
|
conditions=[JobCondition.RUNNING],
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=BackupJobError,
|
||||||
|
)
|
||||||
|
async def freeze_all(self, timeout: float = DEFAULT_FREEZE_TIMEOUT) -> None:
|
||||||
|
"""Freeze system to prepare for an external backup such as an image snapshot."""
|
||||||
|
self.sys_core.state = CoreState.FREEZE
|
||||||
|
|
||||||
|
# Determine running addons
|
||||||
|
installed = self.sys_addons.installed.copy()
|
||||||
|
is_running: list[bool] = await asyncio.gather(
|
||||||
|
*[addon.is_running() for addon in installed]
|
||||||
|
)
|
||||||
|
running_addons = [
|
||||||
|
installed[ind] for ind in range(len(installed)) if is_running[ind]
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create thaw task first to ensure we eventually undo freezes even if the below fails
|
||||||
|
self._thaw_task = asyncio.shield(
|
||||||
|
self.sys_create_task(self._thaw_all(running_addons, timeout))
|
||||||
|
)
|
||||||
|
|
||||||
|
# Tell Home Assistant to freeze for a backup
|
||||||
|
self._change_stage(BackupJobStage.HOME_ASSISTANT)
|
||||||
|
await self.sys_homeassistant.begin_backup()
|
||||||
|
|
||||||
|
# Run all pre-backup tasks for addons
|
||||||
|
self._change_stage(BackupJobStage.ADDONS)
|
||||||
|
await asyncio.gather(*[addon.begin_backup() for addon in running_addons])
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="backup_manager_thaw_all",
|
||||||
|
conditions=[JobCondition.FROZEN],
|
||||||
|
on_condition=BackupJobError,
|
||||||
|
)
|
||||||
|
async def _thaw_all(
|
||||||
|
self, running_addons: list[Addon], timeout: float = DEFAULT_FREEZE_TIMEOUT
|
||||||
|
) -> None:
|
||||||
|
"""Thaw system after user signal or timeout."""
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(self._thaw_event.wait(), timeout)
|
||||||
|
except TimeoutError:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Timeout waiting for signal to thaw after manual freeze, beginning thaw now"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._change_stage(BackupJobStage.HOME_ASSISTANT)
|
||||||
|
await self.sys_homeassistant.end_backup()
|
||||||
|
|
||||||
|
self._change_stage(BackupJobStage.ADDONS)
|
||||||
|
addon_start_tasks: list[asyncio.Task] = [
|
||||||
|
task
|
||||||
|
for task in await asyncio.gather(
|
||||||
|
*[addon.end_backup() for addon in running_addons]
|
||||||
|
)
|
||||||
|
if task
|
||||||
|
]
|
||||||
|
finally:
|
||||||
|
self.sys_core.state = CoreState.RUNNING
|
||||||
|
self._thaw_event.clear()
|
||||||
|
self._thaw_task = None
|
||||||
|
|
||||||
|
if addon_start_tasks:
|
||||||
|
self._change_stage(BackupJobStage.AWAIT_ADDON_RESTARTS)
|
||||||
|
await asyncio.gather(*addon_start_tasks, return_exceptions=True)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="backup_manager_signal_thaw",
|
||||||
|
conditions=[JobCondition.FROZEN],
|
||||||
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
|
on_condition=BackupJobError,
|
||||||
|
internal=True,
|
||||||
|
)
|
||||||
|
async def thaw_all(self) -> None:
|
||||||
|
"""Signal thaw task to begin unfreezing the system."""
|
||||||
|
if not self._thaw_task:
|
||||||
|
raise BackupError(
|
||||||
|
"Freeze was not initiated by freeze API, cannot thaw this way"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._thaw_event.set()
|
||||||
|
await self._thaw_task
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Util add-on functions."""
|
"""Util add-on functions."""
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Validate some things around restore."""
|
"""Validate some things around restore."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -14,6 +15,7 @@ from ..const import (
|
|||||||
ATTR_DATE,
|
ATTR_DATE,
|
||||||
ATTR_DAYS_UNTIL_STALE,
|
ATTR_DAYS_UNTIL_STALE,
|
||||||
ATTR_DOCKER,
|
ATTR_DOCKER,
|
||||||
|
ATTR_EXCLUDE_DATABASE,
|
||||||
ATTR_FOLDERS,
|
ATTR_FOLDERS,
|
||||||
ATTR_HOMEASSISTANT,
|
ATTR_HOMEASSISTANT,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -52,7 +54,7 @@ def unique_addons(addons_list):
|
|||||||
|
|
||||||
|
|
||||||
def v1_homeassistant(
|
def v1_homeassistant(
|
||||||
homeassistant_data: dict[str, Any] | None
|
homeassistant_data: dict[str, Any] | None,
|
||||||
) -> dict[str, Any] | None:
|
) -> dict[str, Any] | None:
|
||||||
"""Cleanup homeassistant artefacts from v1."""
|
"""Cleanup homeassistant artefacts from v1."""
|
||||||
if not homeassistant_data:
|
if not homeassistant_data:
|
||||||
@@ -103,6 +105,9 @@ SCHEMA_BACKUP = vol.Schema(
|
|||||||
{
|
{
|
||||||
vol.Required(ATTR_VERSION): version_tag,
|
vol.Required(ATTR_VERSION): version_tag,
|
||||||
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
||||||
|
vol.Optional(
|
||||||
|
ATTR_EXCLUDE_DATABASE, default=False
|
||||||
|
): vol.Boolean(),
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
"""Bootstrap Supervisor."""
|
"""Bootstrap Supervisor."""
|
||||||
|
|
||||||
|
# ruff: noqa: T100
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -6,7 +8,7 @@ import signal
|
|||||||
|
|
||||||
from colorlog import ColoredFormatter
|
from colorlog import ColoredFormatter
|
||||||
|
|
||||||
from .addons import AddonManager
|
from .addons.manager import AddonManager
|
||||||
from .api import RestAPI
|
from .api import RestAPI
|
||||||
from .arch import CpuArch
|
from .arch import CpuArch
|
||||||
from .auth import Auth
|
from .auth import Auth
|
||||||
@@ -115,7 +117,7 @@ async def initialize_coresys() -> CoreSys:
|
|||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Missing SUPERVISOR_MACHINE environment variable. Fallback to deprecated extraction!"
|
"Missing SUPERVISOR_MACHINE environment variable. Fallback to deprecated extraction!"
|
||||||
)
|
)
|
||||||
_LOGGER.info("Seting up coresys for machine: %s", coresys.machine)
|
_LOGGER.info("Setting up coresys for machine: %s", coresys.machine)
|
||||||
|
|
||||||
return coresys
|
return coresys
|
||||||
|
|
||||||
@@ -221,6 +223,14 @@ def initialize_system(coresys: CoreSys) -> None:
|
|||||||
)
|
)
|
||||||
config.path_emergency.mkdir()
|
config.path_emergency.mkdir()
|
||||||
|
|
||||||
|
# Addon Configs folder
|
||||||
|
if not config.path_addon_configs.is_dir():
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Creating Supervisor add-on configs folder at '%s'",
|
||||||
|
config.path_addon_configs,
|
||||||
|
)
|
||||||
|
config.path_addon_configs.mkdir()
|
||||||
|
|
||||||
|
|
||||||
def migrate_system_env(coresys: CoreSys) -> None:
|
def migrate_system_env(coresys: CoreSys) -> None:
|
||||||
"""Cleanup some stuff after update."""
|
"""Cleanup some stuff after update."""
|
||||||
@@ -248,9 +258,11 @@ def migrate_system_env(coresys: CoreSys) -> None:
|
|||||||
def initialize_logging() -> None:
|
def initialize_logging() -> None:
|
||||||
"""Initialize the logging."""
|
"""Initialize the logging."""
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
fmt = (
|
||||||
|
"%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||||
|
)
|
||||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||||
datefmt = "%y-%m-%d %H:%M:%S"
|
datefmt = "%Y-%m-%d %H:%M:%S"
|
||||||
|
|
||||||
# suppress overly verbose logs from libraries that aren't helpful
|
# suppress overly verbose logs from libraries that aren't helpful
|
||||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Bus event system."""
|
"""Bus event system."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Awaitable, Callable
|
from collections.abc import Awaitable, Callable
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""Bootstrap Supervisor."""
|
"""Bootstrap Supervisor."""
|
||||||
from datetime import datetime
|
|
||||||
|
from datetime import UTC, datetime
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
@@ -48,8 +49,9 @@ MEDIA_DATA = PurePath("media")
|
|||||||
MOUNTS_FOLDER = PurePath("mounts")
|
MOUNTS_FOLDER = PurePath("mounts")
|
||||||
MOUNTS_CREDENTIALS = PurePath(".mounts_credentials")
|
MOUNTS_CREDENTIALS = PurePath(".mounts_credentials")
|
||||||
EMERGENCY_DATA = PurePath("emergency")
|
EMERGENCY_DATA = PurePath("emergency")
|
||||||
|
ADDON_CONFIGS = PurePath("addon_configs")
|
||||||
|
|
||||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
DEFAULT_BOOT_TIME = datetime.fromtimestamp(0, UTC).isoformat()
|
||||||
|
|
||||||
# We filter out UTC because it's the system default fallback
|
# We filter out UTC because it's the system default fallback
|
||||||
# Core also not respect the cotnainer timezone and reset timezones
|
# Core also not respect the cotnainer timezone and reset timezones
|
||||||
@@ -153,7 +155,7 @@ class CoreConfig(FileConfiguration):
|
|||||||
|
|
||||||
def modify_log_level(self) -> None:
|
def modify_log_level(self) -> None:
|
||||||
"""Change log level."""
|
"""Change log level."""
|
||||||
lvl = getattr(logging, str(self.logging.value).upper())
|
lvl = getattr(logging, self.logging.value.upper())
|
||||||
logging.getLogger("supervisor").setLevel(lvl)
|
logging.getLogger("supervisor").setLevel(lvl)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -163,7 +165,7 @@ class CoreConfig(FileConfiguration):
|
|||||||
|
|
||||||
boot_time = parse_datetime(boot_str)
|
boot_time = parse_datetime(boot_str)
|
||||||
if not boot_time:
|
if not boot_time:
|
||||||
return datetime.utcfromtimestamp(1)
|
return datetime.fromtimestamp(1, UTC)
|
||||||
return boot_time
|
return boot_time
|
||||||
|
|
||||||
@last_boot.setter
|
@last_boot.setter
|
||||||
@@ -231,6 +233,16 @@ class CoreConfig(FileConfiguration):
|
|||||||
"""Return root add-on data folder external for Docker."""
|
"""Return root add-on data folder external for Docker."""
|
||||||
return PurePath(self.path_extern_supervisor, ADDONS_DATA)
|
return PurePath(self.path_extern_supervisor, ADDONS_DATA)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_addon_configs(self) -> Path:
|
||||||
|
"""Return root Add-on configs folder."""
|
||||||
|
return self.path_supervisor / ADDON_CONFIGS
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_extern_addon_configs(self) -> PurePath:
|
||||||
|
"""Return root Add-on configs folder external for Docker."""
|
||||||
|
return PurePath(self.path_extern_supervisor, ADDON_CONFIGS)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_audio(self) -> Path:
|
def path_audio(self) -> Path:
|
||||||
"""Return root audio data folder."""
|
"""Return root audio data folder."""
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
"""Constants file for Supervisor."""
|
"""Constants file for Supervisor."""
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from sys import version_info as systemversion
|
from sys import version_info as systemversion
|
||||||
|
from typing import Self
|
||||||
|
|
||||||
from aiohttp import __version__ as aiohttpversion
|
from aiohttp import __version__ as aiohttpversion
|
||||||
|
|
||||||
@@ -19,6 +21,7 @@ SUPERVISOR_DATA = Path("/data")
|
|||||||
FILE_HASSIO_ADDONS = Path(SUPERVISOR_DATA, "addons.json")
|
FILE_HASSIO_ADDONS = Path(SUPERVISOR_DATA, "addons.json")
|
||||||
FILE_HASSIO_AUTH = Path(SUPERVISOR_DATA, "auth.json")
|
FILE_HASSIO_AUTH = Path(SUPERVISOR_DATA, "auth.json")
|
||||||
FILE_HASSIO_BACKUPS = Path(SUPERVISOR_DATA, "backups.json")
|
FILE_HASSIO_BACKUPS = Path(SUPERVISOR_DATA, "backups.json")
|
||||||
|
FILE_HASSIO_BOARD = Path(SUPERVISOR_DATA, "board.json")
|
||||||
FILE_HASSIO_CONFIG = Path(SUPERVISOR_DATA, "config.json")
|
FILE_HASSIO_CONFIG = Path(SUPERVISOR_DATA, "config.json")
|
||||||
FILE_HASSIO_DISCOVERY = Path(SUPERVISOR_DATA, "discovery.json")
|
FILE_HASSIO_DISCOVERY = Path(SUPERVISOR_DATA, "discovery.json")
|
||||||
FILE_HASSIO_DOCKER = Path(SUPERVISOR_DATA, "docker.json")
|
FILE_HASSIO_DOCKER = Path(SUPERVISOR_DATA, "docker.json")
|
||||||
@@ -66,6 +69,7 @@ META_SUPERVISOR = "supervisor"
|
|||||||
JSON_DATA = "data"
|
JSON_DATA = "data"
|
||||||
JSON_MESSAGE = "message"
|
JSON_MESSAGE = "message"
|
||||||
JSON_RESULT = "result"
|
JSON_RESULT = "result"
|
||||||
|
JSON_JOB_ID = "job_id"
|
||||||
|
|
||||||
RESULT_ERROR = "error"
|
RESULT_ERROR = "error"
|
||||||
RESULT_OK = "ok"
|
RESULT_OK = "ok"
|
||||||
@@ -88,6 +92,7 @@ REQUEST_FROM = "HASSIO_FROM"
|
|||||||
ATTR_ACCESS_TOKEN = "access_token"
|
ATTR_ACCESS_TOKEN = "access_token"
|
||||||
ATTR_ACCESSPOINTS = "accesspoints"
|
ATTR_ACCESSPOINTS = "accesspoints"
|
||||||
ATTR_ACTIVE = "active"
|
ATTR_ACTIVE = "active"
|
||||||
|
ATTR_ACTIVITY_LED = "activity_led"
|
||||||
ATTR_ADDON = "addon"
|
ATTR_ADDON = "addon"
|
||||||
ATTR_ADDONS = "addons"
|
ATTR_ADDONS = "addons"
|
||||||
ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list"
|
ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list"
|
||||||
@@ -113,6 +118,7 @@ ATTR_BACKUP_EXCLUDE = "backup_exclude"
|
|||||||
ATTR_BACKUP_POST = "backup_post"
|
ATTR_BACKUP_POST = "backup_post"
|
||||||
ATTR_BACKUP_PRE = "backup_pre"
|
ATTR_BACKUP_PRE = "backup_pre"
|
||||||
ATTR_BACKUPS = "backups"
|
ATTR_BACKUPS = "backups"
|
||||||
|
ATTR_BACKUPS_EXCLUDE_DATABASE = "backups_exclude_database"
|
||||||
ATTR_BLK_READ = "blk_read"
|
ATTR_BLK_READ = "blk_read"
|
||||||
ATTR_BLK_WRITE = "blk_write"
|
ATTR_BLK_WRITE = "blk_write"
|
||||||
ATTR_BOARD = "board"
|
ATTR_BOARD = "board"
|
||||||
@@ -152,9 +158,11 @@ ATTR_DIAGNOSTICS = "diagnostics"
|
|||||||
ATTR_DISCOVERY = "discovery"
|
ATTR_DISCOVERY = "discovery"
|
||||||
ATTR_DISK = "disk"
|
ATTR_DISK = "disk"
|
||||||
ATTR_DISK_FREE = "disk_free"
|
ATTR_DISK_FREE = "disk_free"
|
||||||
|
ATTR_DISK_LED = "disk_led"
|
||||||
ATTR_DISK_LIFE_TIME = "disk_life_time"
|
ATTR_DISK_LIFE_TIME = "disk_life_time"
|
||||||
ATTR_DISK_TOTAL = "disk_total"
|
ATTR_DISK_TOTAL = "disk_total"
|
||||||
ATTR_DISK_USED = "disk_used"
|
ATTR_DISK_USED = "disk_used"
|
||||||
|
ATTR_DISPLAYNAME = "displayname"
|
||||||
ATTR_DNS = "dns"
|
ATTR_DNS = "dns"
|
||||||
ATTR_DOCKER = "docker"
|
ATTR_DOCKER = "docker"
|
||||||
ATTR_DOCKER_API = "docker_api"
|
ATTR_DOCKER_API = "docker_api"
|
||||||
@@ -164,6 +172,7 @@ ATTR_ENABLE = "enable"
|
|||||||
ATTR_ENABLED = "enabled"
|
ATTR_ENABLED = "enabled"
|
||||||
ATTR_ENVIRONMENT = "environment"
|
ATTR_ENVIRONMENT = "environment"
|
||||||
ATTR_EVENT = "event"
|
ATTR_EVENT = "event"
|
||||||
|
ATTR_EXCLUDE_DATABASE = "exclude_database"
|
||||||
ATTR_FEATURES = "features"
|
ATTR_FEATURES = "features"
|
||||||
ATTR_FILENAME = "filename"
|
ATTR_FILENAME = "filename"
|
||||||
ATTR_FLAGS = "flags"
|
ATTR_FLAGS = "flags"
|
||||||
@@ -177,7 +186,9 @@ ATTR_HASSIO_API = "hassio_api"
|
|||||||
ATTR_HASSIO_ROLE = "hassio_role"
|
ATTR_HASSIO_ROLE = "hassio_role"
|
||||||
ATTR_HASSOS = "hassos"
|
ATTR_HASSOS = "hassos"
|
||||||
ATTR_HEALTHY = "healthy"
|
ATTR_HEALTHY = "healthy"
|
||||||
|
ATTR_HEARTBEAT_LED = "heartbeat_led"
|
||||||
ATTR_HOMEASSISTANT = "homeassistant"
|
ATTR_HOMEASSISTANT = "homeassistant"
|
||||||
|
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE = "homeassistant_exclude_database"
|
||||||
ATTR_HOMEASSISTANT_API = "homeassistant_api"
|
ATTR_HOMEASSISTANT_API = "homeassistant_api"
|
||||||
ATTR_HOST = "host"
|
ATTR_HOST = "host"
|
||||||
ATTR_HOST_DBUS = "host_dbus"
|
ATTR_HOST_DBUS = "host_dbus"
|
||||||
@@ -252,6 +263,7 @@ ATTR_PLUGINS = "plugins"
|
|||||||
ATTR_PORT = "port"
|
ATTR_PORT = "port"
|
||||||
ATTR_PORTS = "ports"
|
ATTR_PORTS = "ports"
|
||||||
ATTR_PORTS_DESCRIPTION = "ports_description"
|
ATTR_PORTS_DESCRIPTION = "ports_description"
|
||||||
|
ATTR_POWER_LED = "power_led"
|
||||||
ATTR_PREFIX = "prefix"
|
ATTR_PREFIX = "prefix"
|
||||||
ATTR_PRIMARY = "primary"
|
ATTR_PRIMARY = "primary"
|
||||||
ATTR_PRIORITY = "priority"
|
ATTR_PRIORITY = "priority"
|
||||||
@@ -298,6 +310,8 @@ ATTR_SUPERVISOR_VERSION = "supervisor_version"
|
|||||||
ATTR_SUPPORTED = "supported"
|
ATTR_SUPPORTED = "supported"
|
||||||
ATTR_SUPPORTED_ARCH = "supported_arch"
|
ATTR_SUPPORTED_ARCH = "supported_arch"
|
||||||
ATTR_SYSTEM = "system"
|
ATTR_SYSTEM = "system"
|
||||||
|
ATTR_SYSTEM_MANAGED = "system_managed"
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY = "system_managed_config_entry"
|
||||||
ATTR_TIMEOUT = "timeout"
|
ATTR_TIMEOUT = "timeout"
|
||||||
ATTR_TIMEZONE = "timezone"
|
ATTR_TIMEZONE = "timezone"
|
||||||
ATTR_TITLE = "title"
|
ATTR_TITLE = "title"
|
||||||
@@ -315,11 +329,13 @@ ATTR_UPDATE_KEY = "update_key"
|
|||||||
ATTR_URL = "url"
|
ATTR_URL = "url"
|
||||||
ATTR_USB = "usb"
|
ATTR_USB = "usb"
|
||||||
ATTR_USER = "user"
|
ATTR_USER = "user"
|
||||||
|
ATTR_USER_LED = "user_led"
|
||||||
ATTR_USERNAME = "username"
|
ATTR_USERNAME = "username"
|
||||||
ATTR_UUID = "uuid"
|
ATTR_UUID = "uuid"
|
||||||
ATTR_VALID = "valid"
|
ATTR_VALID = "valid"
|
||||||
ATTR_VALUE = "value"
|
ATTR_VALUE = "value"
|
||||||
ATTR_VERSION = "version"
|
ATTR_VERSION = "version"
|
||||||
|
ATTR_VERSION_TIMESTAMP = "version_timestamp"
|
||||||
ATTR_VERSION_LATEST = "version_latest"
|
ATTR_VERSION_LATEST = "version_latest"
|
||||||
ATTR_VIDEO = "video"
|
ATTR_VIDEO = "video"
|
||||||
ATTR_VLAN = "vlan"
|
ATTR_VLAN = "vlan"
|
||||||
@@ -334,14 +350,6 @@ PROVIDE_SERVICE = "provide"
|
|||||||
NEED_SERVICE = "need"
|
NEED_SERVICE = "need"
|
||||||
WANT_SERVICE = "want"
|
WANT_SERVICE = "want"
|
||||||
|
|
||||||
|
|
||||||
MAP_CONFIG = "config"
|
|
||||||
MAP_SSL = "ssl"
|
|
||||||
MAP_ADDONS = "addons"
|
|
||||||
MAP_BACKUP = "backup"
|
|
||||||
MAP_SHARE = "share"
|
|
||||||
MAP_MEDIA = "media"
|
|
||||||
|
|
||||||
ARCH_ARMHF = "armhf"
|
ARCH_ARMHF = "armhf"
|
||||||
ARCH_ARMV7 = "armv7"
|
ARCH_ARMV7 = "armv7"
|
||||||
ARCH_AARCH64 = "aarch64"
|
ARCH_AARCH64 = "aarch64"
|
||||||
@@ -374,14 +382,29 @@ ROLE_ADMIN = "admin"
|
|||||||
ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN]
|
ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN]
|
||||||
|
|
||||||
|
|
||||||
class AddonBoot(str, Enum):
|
class AddonBootConfig(StrEnum):
|
||||||
|
"""Boot mode config for the add-on."""
|
||||||
|
|
||||||
|
AUTO = "auto"
|
||||||
|
MANUAL = "manual"
|
||||||
|
MANUAL_ONLY = "manual_only"
|
||||||
|
|
||||||
|
|
||||||
|
class AddonBoot(StrEnum):
|
||||||
"""Boot mode for the add-on."""
|
"""Boot mode for the add-on."""
|
||||||
|
|
||||||
AUTO = "auto"
|
AUTO = "auto"
|
||||||
MANUAL = "manual"
|
MANUAL = "manual"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _missing_(cls, value: str) -> Self | None:
|
||||||
|
"""Convert 'forced' config values to their counterpart."""
|
||||||
|
if value == AddonBootConfig.MANUAL_ONLY:
|
||||||
|
return AddonBoot.MANUAL
|
||||||
|
return None
|
||||||
|
|
||||||
class AddonStartup(str, Enum):
|
|
||||||
|
class AddonStartup(StrEnum):
|
||||||
"""Startup types of Add-on."""
|
"""Startup types of Add-on."""
|
||||||
|
|
||||||
INITIALIZE = "initialize"
|
INITIALIZE = "initialize"
|
||||||
@@ -391,7 +414,7 @@ class AddonStartup(str, Enum):
|
|||||||
ONCE = "once"
|
ONCE = "once"
|
||||||
|
|
||||||
|
|
||||||
class AddonStage(str, Enum):
|
class AddonStage(StrEnum):
|
||||||
"""Stage types of add-on."""
|
"""Stage types of add-on."""
|
||||||
|
|
||||||
STABLE = "stable"
|
STABLE = "stable"
|
||||||
@@ -399,7 +422,7 @@ class AddonStage(str, Enum):
|
|||||||
DEPRECATED = "deprecated"
|
DEPRECATED = "deprecated"
|
||||||
|
|
||||||
|
|
||||||
class AddonState(str, Enum):
|
class AddonState(StrEnum):
|
||||||
"""State of add-on."""
|
"""State of add-on."""
|
||||||
|
|
||||||
STARTUP = "startup"
|
STARTUP = "startup"
|
||||||
@@ -409,7 +432,7 @@ class AddonState(str, Enum):
|
|||||||
ERROR = "error"
|
ERROR = "error"
|
||||||
|
|
||||||
|
|
||||||
class UpdateChannel(str, Enum):
|
class UpdateChannel(StrEnum):
|
||||||
"""Core supported update channels."""
|
"""Core supported update channels."""
|
||||||
|
|
||||||
STABLE = "stable"
|
STABLE = "stable"
|
||||||
@@ -417,7 +440,7 @@ class UpdateChannel(str, Enum):
|
|||||||
DEV = "dev"
|
DEV = "dev"
|
||||||
|
|
||||||
|
|
||||||
class CoreState(str, Enum):
|
class CoreState(StrEnum):
|
||||||
"""Represent current loading state."""
|
"""Represent current loading state."""
|
||||||
|
|
||||||
INITIALIZE = "initialize"
|
INITIALIZE = "initialize"
|
||||||
@@ -430,7 +453,7 @@ class CoreState(str, Enum):
|
|||||||
CLOSE = "close"
|
CLOSE = "close"
|
||||||
|
|
||||||
|
|
||||||
class LogLevel(str, Enum):
|
class LogLevel(StrEnum):
|
||||||
"""Logging level of system."""
|
"""Logging level of system."""
|
||||||
|
|
||||||
DEBUG = "debug"
|
DEBUG = "debug"
|
||||||
@@ -440,7 +463,7 @@ class LogLevel(str, Enum):
|
|||||||
CRITICAL = "critical"
|
CRITICAL = "critical"
|
||||||
|
|
||||||
|
|
||||||
class HostFeature(str, Enum):
|
class HostFeature(StrEnum):
|
||||||
"""Host feature."""
|
"""Host feature."""
|
||||||
|
|
||||||
HASSOS = "hassos"
|
HASSOS = "hassos"
|
||||||
@@ -452,16 +475,18 @@ class HostFeature(str, Enum):
|
|||||||
TIMEDATE = "timedate"
|
TIMEDATE = "timedate"
|
||||||
|
|
||||||
|
|
||||||
class BusEvent(str, Enum):
|
class BusEvent(StrEnum):
|
||||||
"""Bus event type."""
|
"""Bus event type."""
|
||||||
|
|
||||||
|
DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change"
|
||||||
HARDWARE_NEW_DEVICE = "hardware_new_device"
|
HARDWARE_NEW_DEVICE = "hardware_new_device"
|
||||||
HARDWARE_REMOVE_DEVICE = "hardware_remove_device"
|
HARDWARE_REMOVE_DEVICE = "hardware_remove_device"
|
||||||
DOCKER_CONTAINER_STATE_CHANGE = "docker_container_state_change"
|
SUPERVISOR_JOB_END = "supervisor_job_end"
|
||||||
|
SUPERVISOR_JOB_START = "supervisor_job_start"
|
||||||
SUPERVISOR_STATE_CHANGE = "supervisor_state_change"
|
SUPERVISOR_STATE_CHANGE = "supervisor_state_change"
|
||||||
|
|
||||||
|
|
||||||
class CpuArch(str, Enum):
|
class CpuArch(StrEnum):
|
||||||
"""Supported CPU architectures."""
|
"""Supported CPU architectures."""
|
||||||
|
|
||||||
ARMV7 = "armv7"
|
ARMV7 = "armv7"
|
||||||
@@ -476,8 +501,25 @@ class IngressSessionDataUser:
|
|||||||
"""Format of an IngressSessionDataUser object."""
|
"""Format of an IngressSessionDataUser object."""
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
display_name: str
|
display_name: str | None = None
|
||||||
username: str
|
username: str | None = None
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, str | None]:
|
||||||
|
"""Get dictionary representation."""
|
||||||
|
return {
|
||||||
|
ATTR_ID: self.id,
|
||||||
|
ATTR_DISPLAYNAME: self.display_name,
|
||||||
|
ATTR_USERNAME: self.username,
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: dict[str, str | None]) -> Self:
|
||||||
|
"""Return object from dictionary representation."""
|
||||||
|
return cls(
|
||||||
|
id=data[ATTR_ID],
|
||||||
|
display_name=data.get(ATTR_DISPLAYNAME),
|
||||||
|
username=data.get(ATTR_USERNAME),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -486,6 +528,15 @@ class IngressSessionData:
|
|||||||
|
|
||||||
user: IngressSessionDataUser
|
user: IngressSessionDataUser
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, dict[str, str | None]]:
|
||||||
|
"""Get dictionary representation."""
|
||||||
|
return {ATTR_USER: self.user.to_dict()}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: dict[str, dict[str, str | None]]) -> Self:
|
||||||
|
"""Return object from dictionary representation."""
|
||||||
|
return cls(user=IngressSessionDataUser.from_dict(data[ATTR_USER]))
|
||||||
|
|
||||||
|
|
||||||
STARTING_STATES = [
|
STARTING_STATES = [
|
||||||
CoreState.INITIALIZE,
|
CoreState.INITIALIZE,
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
"""Main file for Supervisor."""
|
"""Main file for Supervisor."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_STARTUP,
|
ATTR_STARTUP,
|
||||||
RUN_SUPERVISOR_STATE,
|
RUN_SUPERVISOR_STATE,
|
||||||
@@ -28,7 +27,7 @@ from .homeassistant.core import LANDINGPAGE
|
|||||||
from .resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason
|
from .resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason
|
||||||
from .utils.dt import utcnow
|
from .utils.dt import utcnow
|
||||||
from .utils.sentry import capture_exception
|
from .utils.sentry import capture_exception
|
||||||
from .utils.whoami import retrieve_whoami
|
from .utils.whoami import WhoamiData, retrieve_whoami
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -63,20 +62,23 @@ class Core(CoreSysAttributes):
|
|||||||
if self._state == new_state:
|
if self._state == new_state:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
RUN_SUPERVISOR_STATE.write_text(new_state.value, encoding="utf-8")
|
RUN_SUPERVISOR_STATE.write_text(new_state, encoding="utf-8")
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Can't update the Supervisor state to %s: %s", new_state, err
|
"Can't update the Supervisor state to %s: %s", new_state, err
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
self._state = new_state
|
self._state = new_state
|
||||||
self.sys_bus.fire_event(BusEvent.SUPERVISOR_STATE_CHANGE, new_state)
|
|
||||||
|
|
||||||
# These will be received by HA after startup has completed which won't make sense
|
# Don't attempt to notify anyone on CLOSE as we're about to stop the event loop
|
||||||
if new_state not in STARTING_STATES:
|
if new_state != CoreState.CLOSE:
|
||||||
self.sys_homeassistant.websocket.supervisor_update_event(
|
self.sys_bus.fire_event(BusEvent.SUPERVISOR_STATE_CHANGE, new_state)
|
||||||
"info", {"state": new_state}
|
|
||||||
)
|
# These will be received by HA after startup has completed which won't make sense
|
||||||
|
if new_state not in STARTING_STATES:
|
||||||
|
self.sys_homeassistant.websocket.supervisor_update_event(
|
||||||
|
"info", {"state": new_state}
|
||||||
|
)
|
||||||
|
|
||||||
async def connect(self):
|
async def connect(self):
|
||||||
"""Connect Supervisor container."""
|
"""Connect Supervisor container."""
|
||||||
@@ -132,10 +134,10 @@ class Core(CoreSysAttributes):
|
|||||||
self.sys_mounts.load(),
|
self.sys_mounts.load(),
|
||||||
# Load Docker manager
|
# Load Docker manager
|
||||||
self.sys_docker.load(),
|
self.sys_docker.load(),
|
||||||
# Load Plugins container
|
|
||||||
self.sys_plugins.load(),
|
|
||||||
# load last available data
|
# load last available data
|
||||||
self.sys_updater.load(),
|
self.sys_updater.load(),
|
||||||
|
# Load Plugins container
|
||||||
|
self.sys_plugins.load(),
|
||||||
# Load Home Assistant
|
# Load Home Assistant
|
||||||
self.sys_homeassistant.load(),
|
self.sys_homeassistant.load(),
|
||||||
# Load CPU/Arch
|
# Load CPU/Arch
|
||||||
@@ -176,7 +178,15 @@ class Core(CoreSysAttributes):
|
|||||||
and not self.sys_dev
|
and not self.sys_dev
|
||||||
and self.supported
|
and self.supported
|
||||||
):
|
):
|
||||||
self.sys_dbus.agent.diagnostics = self.sys_config.diagnostics
|
try:
|
||||||
|
await self.sys_dbus.agent.set_diagnostics(self.sys_config.diagnostics)
|
||||||
|
except Exception as err: # pylint: disable=broad-except
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Could not set diagnostics to %s due to %s",
|
||||||
|
self.sys_config.diagnostics,
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
capture_exception(err)
|
||||||
|
|
||||||
# Evaluate the system
|
# Evaluate the system
|
||||||
await self.sys_resolution.evaluate.evaluate_system()
|
await self.sys_resolution.evaluate.evaluate_system()
|
||||||
@@ -247,7 +257,7 @@ class Core(CoreSysAttributes):
|
|||||||
except HomeAssistantError as err:
|
except HomeAssistantError as err:
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
else:
|
else:
|
||||||
_LOGGER.info("Skiping start of Home Assistant")
|
_LOGGER.info("Skipping start of Home Assistant")
|
||||||
|
|
||||||
# Core is not running
|
# Core is not running
|
||||||
if self.sys_homeassistant.core.error_state:
|
if self.sys_homeassistant.core.error_state:
|
||||||
@@ -295,7 +305,7 @@ class Core(CoreSysAttributes):
|
|||||||
|
|
||||||
# Stage 1
|
# Stage 1
|
||||||
try:
|
try:
|
||||||
async with async_timeout.timeout(10):
|
async with asyncio.timeout(10):
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[
|
[
|
||||||
self.sys_create_task(coro)
|
self.sys_create_task(coro)
|
||||||
@@ -306,12 +316,12 @@ class Core(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning("Stage 1: Force Shutdown!")
|
_LOGGER.warning("Stage 1: Force Shutdown!")
|
||||||
|
|
||||||
# Stage 2
|
# Stage 2
|
||||||
try:
|
try:
|
||||||
async with async_timeout.timeout(10):
|
async with asyncio.timeout(10):
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[
|
[
|
||||||
self.sys_create_task(coro)
|
self.sys_create_task(coro)
|
||||||
@@ -323,7 +333,7 @@ class Core(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning("Stage 2: Force Shutdown!")
|
_LOGGER.warning("Stage 2: Force Shutdown!")
|
||||||
|
|
||||||
self.state = CoreState.CLOSE
|
self.state = CoreState.CLOSE
|
||||||
@@ -336,9 +346,6 @@ class Core(CoreSysAttributes):
|
|||||||
if self.state == CoreState.RUNNING:
|
if self.state == CoreState.RUNNING:
|
||||||
self.state = CoreState.SHUTDOWN
|
self.state = CoreState.SHUTDOWN
|
||||||
|
|
||||||
# Stop docker monitoring
|
|
||||||
await self.sys_docker.unload()
|
|
||||||
|
|
||||||
# Shutdown Application Add-ons, using Home Assistant API
|
# Shutdown Application Add-ons, using Home Assistant API
|
||||||
await self.sys_addons.shutdown(AddonStartup.APPLICATION)
|
await self.sys_addons.shutdown(AddonStartup.APPLICATION)
|
||||||
|
|
||||||
@@ -360,6 +367,13 @@ class Core(CoreSysAttributes):
|
|||||||
self.sys_config.last_boot = self.sys_hardware.helper.last_boot
|
self.sys_config.last_boot = self.sys_hardware.helper.last_boot
|
||||||
self.sys_config.save_data()
|
self.sys_config.save_data()
|
||||||
|
|
||||||
|
async def _retrieve_whoami(self, with_ssl: bool) -> WhoamiData | None:
|
||||||
|
try:
|
||||||
|
return await retrieve_whoami(self.sys_websession, with_ssl)
|
||||||
|
except WhoamiSSLError:
|
||||||
|
_LOGGER.info("Whoami service SSL error")
|
||||||
|
return None
|
||||||
|
|
||||||
async def _adjust_system_datetime(self):
|
async def _adjust_system_datetime(self):
|
||||||
"""Adjust system time/date on startup."""
|
"""Adjust system time/date on startup."""
|
||||||
# If no timezone is detect or set
|
# If no timezone is detect or set
|
||||||
@@ -372,21 +386,15 @@ class Core(CoreSysAttributes):
|
|||||||
|
|
||||||
# Get Timezone data
|
# Get Timezone data
|
||||||
try:
|
try:
|
||||||
data = await retrieve_whoami(self.sys_websession)
|
data = await self._retrieve_whoami(True)
|
||||||
except WhoamiSSLError:
|
|
||||||
pass
|
# SSL Date Issue & possible time drift
|
||||||
|
if not data:
|
||||||
|
data = await self._retrieve_whoami(False)
|
||||||
except WhoamiError as err:
|
except WhoamiError as err:
|
||||||
_LOGGER.warning("Can't adjust Time/Date settings: %s", err)
|
_LOGGER.warning("Can't adjust Time/Date settings: %s", err)
|
||||||
return
|
return
|
||||||
|
|
||||||
# SSL Date Issue & possible time drift
|
|
||||||
if not data:
|
|
||||||
try:
|
|
||||||
data = await retrieve_whoami(self.sys_websession, with_ssl=False)
|
|
||||||
except WhoamiError as err:
|
|
||||||
_LOGGER.error("Can't adjust Time/Date settings: %s", err)
|
|
||||||
return
|
|
||||||
|
|
||||||
self.sys_config.timezone = self.sys_config.timezone or data.timezone
|
self.sys_config.timezone = self.sys_config.timezone or data.timezone
|
||||||
|
|
||||||
# Calculate if system time is out of sync
|
# Calculate if system time is out of sync
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
"""Handle core shared data."""
|
"""Handle core shared data."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Callable, Coroutine
|
from collections.abc import Callable, Coroutine
|
||||||
|
from contextvars import Context, copy_context
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import logging
|
import logging
|
||||||
@@ -17,7 +19,7 @@ from .const import ENV_SUPERVISOR_DEV, SERVER_SOFTWARE
|
|||||||
from .utils.dt import UTC, get_time_zone
|
from .utils.dt import UTC, get_time_zone
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .addons import AddonManager
|
from .addons.manager import AddonManager
|
||||||
from .api import RestAPI
|
from .api import RestAPI
|
||||||
from .arch import CpuArch
|
from .arch import CpuArch
|
||||||
from .auth import Auth
|
from .auth import Auth
|
||||||
@@ -61,7 +63,7 @@ class CoreSys:
|
|||||||
|
|
||||||
# External objects
|
# External objects
|
||||||
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
|
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
|
||||||
self._websession: aiohttp.ClientSession = aiohttp.ClientSession()
|
self._websession = None
|
||||||
|
|
||||||
# Global objects
|
# Global objects
|
||||||
self._config: CoreConfig = CoreConfig()
|
self._config: CoreConfig = CoreConfig()
|
||||||
@@ -94,10 +96,11 @@ class CoreSys:
|
|||||||
self._bus: Bus | None = None
|
self._bus: Bus | None = None
|
||||||
self._mounts: MountManager | None = None
|
self._mounts: MountManager | None = None
|
||||||
|
|
||||||
# Set default header for aiohttp
|
# Setup aiohttp session
|
||||||
self._websession._default_headers = MappingProxyType(
|
self.create_websession()
|
||||||
{aiohttp.hdrs.USER_AGENT: SERVER_SOFTWARE}
|
|
||||||
)
|
# Task factory attributes
|
||||||
|
self._set_task_context: list[Callable[[Context], Context]] = []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dev(self) -> bool:
|
def dev(self) -> bool:
|
||||||
@@ -109,8 +112,11 @@ class CoreSys:
|
|||||||
"""Return system timezone."""
|
"""Return system timezone."""
|
||||||
if self.config.timezone:
|
if self.config.timezone:
|
||||||
return self.config.timezone
|
return self.config.timezone
|
||||||
|
# pylint bug with python 3.12.4 (https://github.com/pylint-dev/pylint/issues/9811)
|
||||||
|
# pylint: disable=no-member
|
||||||
if self.host.info.timezone:
|
if self.host.info.timezone:
|
||||||
return self.host.info.timezone
|
return self.host.info.timezone
|
||||||
|
# pylint: enable=no-member
|
||||||
return "UTC"
|
return "UTC"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -520,6 +526,17 @@ class CoreSys:
|
|||||||
"""Return now in local timezone."""
|
"""Return now in local timezone."""
|
||||||
return datetime.now(get_time_zone(self.timezone) or UTC)
|
return datetime.now(get_time_zone(self.timezone) or UTC)
|
||||||
|
|
||||||
|
def add_set_task_context_callback(
|
||||||
|
self, callback: Callable[[Context], Context]
|
||||||
|
) -> None:
|
||||||
|
"""Add callback used to modify context prior to creating a task.
|
||||||
|
|
||||||
|
Only used for tasks created via CoreSys.create_task. Callback can modify the provided
|
||||||
|
context using context.run (ex. `context.run(var.set, "new_value")`). Callback should
|
||||||
|
return the context to be provided to task.
|
||||||
|
"""
|
||||||
|
self._set_task_context.append(callback)
|
||||||
|
|
||||||
def run_in_executor(
|
def run_in_executor(
|
||||||
self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any]
|
self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any]
|
||||||
) -> Coroutine[Any, Any, T]:
|
) -> Coroutine[Any, Any, T]:
|
||||||
@@ -529,9 +546,54 @@ class CoreSys:
|
|||||||
|
|
||||||
return self.loop.run_in_executor(None, funct, *args)
|
return self.loop.run_in_executor(None, funct, *args)
|
||||||
|
|
||||||
|
def create_websession(self) -> None:
|
||||||
|
"""Create a new aiohttp session."""
|
||||||
|
if self._websession:
|
||||||
|
self.create_task(self._websession.close())
|
||||||
|
|
||||||
|
# Create session and set default header for aiohttp
|
||||||
|
self._websession: aiohttp.ClientSession = aiohttp.ClientSession(
|
||||||
|
headers=MappingProxyType({aiohttp.hdrs.USER_AGENT: SERVER_SOFTWARE})
|
||||||
|
)
|
||||||
|
|
||||||
|
def _create_context(self) -> Context:
|
||||||
|
"""Create a new context for a task."""
|
||||||
|
context = copy_context()
|
||||||
|
for callback in self._set_task_context:
|
||||||
|
context = callback(context)
|
||||||
|
return context
|
||||||
|
|
||||||
def create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
def create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
||||||
"""Create an async task."""
|
"""Create an async task."""
|
||||||
return self.loop.create_task(coroutine)
|
return self.loop.create_task(coroutine, context=self._create_context())
|
||||||
|
|
||||||
|
def call_later(
|
||||||
|
self,
|
||||||
|
delay: float,
|
||||||
|
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
*args: tuple[Any],
|
||||||
|
**kwargs: dict[str, Any],
|
||||||
|
) -> asyncio.TimerHandle:
|
||||||
|
"""Start a task after a delay."""
|
||||||
|
if kwargs:
|
||||||
|
funct = partial(funct, **kwargs)
|
||||||
|
|
||||||
|
return self.loop.call_later(delay, funct, *args, context=self._create_context())
|
||||||
|
|
||||||
|
def call_at(
|
||||||
|
self,
|
||||||
|
when: datetime,
|
||||||
|
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
*args: tuple[Any],
|
||||||
|
**kwargs: dict[str, Any],
|
||||||
|
) -> asyncio.TimerHandle:
|
||||||
|
"""Start a task at the specified datetime."""
|
||||||
|
if kwargs:
|
||||||
|
funct = partial(funct, **kwargs)
|
||||||
|
|
||||||
|
return self.loop.call_at(
|
||||||
|
when.timestamp(), funct, *args, context=self._create_context()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CoreSysAttributes:
|
class CoreSysAttributes:
|
||||||
@@ -706,9 +768,29 @@ class CoreSysAttributes:
|
|||||||
def sys_run_in_executor(
|
def sys_run_in_executor(
|
||||||
self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any]
|
self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any]
|
||||||
) -> Coroutine[Any, Any, T]:
|
) -> Coroutine[Any, Any, T]:
|
||||||
"""Add an job to the executor pool."""
|
"""Add a job to the executor pool."""
|
||||||
return self.coresys.run_in_executor(funct, *args, **kwargs)
|
return self.coresys.run_in_executor(funct, *args, **kwargs)
|
||||||
|
|
||||||
def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
def sys_create_task(self, coroutine: Coroutine) -> asyncio.Task:
|
||||||
"""Create an async task."""
|
"""Create an async task."""
|
||||||
return self.coresys.create_task(coroutine)
|
return self.coresys.create_task(coroutine)
|
||||||
|
|
||||||
|
def sys_call_later(
|
||||||
|
self,
|
||||||
|
delay: float,
|
||||||
|
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
*args: tuple[Any],
|
||||||
|
**kwargs: dict[str, Any],
|
||||||
|
) -> asyncio.TimerHandle:
|
||||||
|
"""Start a task after a delay."""
|
||||||
|
return self.coresys.call_later(delay, funct, *args, **kwargs)
|
||||||
|
|
||||||
|
def sys_call_at(
|
||||||
|
self,
|
||||||
|
when: datetime,
|
||||||
|
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
*args: tuple[Any],
|
||||||
|
**kwargs: dict[str, Any],
|
||||||
|
) -> asyncio.TimerHandle:
|
||||||
|
"""Start a task at the specified datetime."""
|
||||||
|
return self.coresys.call_at(when, funct, *args, **kwargs)
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
"raspberrypi3-64": ["aarch64", "armv7", "armhf"],
|
"raspberrypi3-64": ["aarch64", "armv7", "armhf"],
|
||||||
"raspberrypi4": ["armv7", "armhf"],
|
"raspberrypi4": ["armv7", "armhf"],
|
||||||
"raspberrypi4-64": ["aarch64", "armv7", "armhf"],
|
"raspberrypi4-64": ["aarch64", "armv7", "armhf"],
|
||||||
|
"raspberrypi5-64": ["aarch64", "armv7", "armhf"],
|
||||||
"yellow": ["aarch64", "armv7", "armhf"],
|
"yellow": ["aarch64", "armv7", "armhf"],
|
||||||
"green": ["aarch64", "armv7", "armhf"],
|
"green": ["aarch64", "armv7", "armhf"],
|
||||||
"tinker": ["armv7", "armhf"],
|
"tinker": ["armv7", "armhf"],
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
"""OS-Agent implementation for DBUS."""
|
"""OS-Agent implementation for DBUS."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
from ...exceptions import DBusError, DBusInterfaceError
|
from ...exceptions import DBusInterfaceError, DBusServiceUnkownError
|
||||||
from ..const import (
|
from ..const import (
|
||||||
DBUS_ATTR_DIAGNOSTICS,
|
DBUS_ATTR_DIAGNOSTICS,
|
||||||
DBUS_ATTR_VERSION,
|
DBUS_ATTR_VERSION,
|
||||||
@@ -80,11 +82,9 @@ class OSAgent(DBusInterfaceProxy):
|
|||||||
"""Return if diagnostics is enabled on OS-Agent."""
|
"""Return if diagnostics is enabled on OS-Agent."""
|
||||||
return self.properties[DBUS_ATTR_DIAGNOSTICS]
|
return self.properties[DBUS_ATTR_DIAGNOSTICS]
|
||||||
|
|
||||||
@diagnostics.setter
|
def set_diagnostics(self, value: bool) -> Awaitable[None]:
|
||||||
@dbus_property
|
|
||||||
def diagnostics(self, value: bool) -> None:
|
|
||||||
"""Enable or disable OS-Agent diagnostics."""
|
"""Enable or disable OS-Agent diagnostics."""
|
||||||
asyncio.create_task(self.dbus.set_diagnostics(value))
|
return self.dbus.set_diagnostics(value)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def all(self) -> list[DBusInterface]:
|
def all(self) -> list[DBusInterface]:
|
||||||
@@ -96,13 +96,25 @@ class OSAgent(DBusInterfaceProxy):
|
|||||||
_LOGGER.info("Load dbus interface %s", self.name)
|
_LOGGER.info("Load dbus interface %s", self.name)
|
||||||
try:
|
try:
|
||||||
await super().connect(bus)
|
await super().connect(bus)
|
||||||
await asyncio.gather(*[dbus.connect(bus) for dbus in self.all])
|
except (DBusServiceUnkownError, DBusInterfaceError):
|
||||||
except DBusError:
|
_LOGGER.error(
|
||||||
_LOGGER.warning("Can't connect to OS-Agent")
|
|
||||||
except DBusInterfaceError:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"No OS-Agent support on the host. Some Host functions have been disabled."
|
"No OS-Agent support on the host. Some Host functions have been disabled."
|
||||||
)
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
errors = await asyncio.gather(
|
||||||
|
*[dbus.connect(bus) for dbus in self.all], return_exceptions=True
|
||||||
|
)
|
||||||
|
|
||||||
|
for err in errors:
|
||||||
|
if err:
|
||||||
|
dbus = self.all[errors.index(err)]
|
||||||
|
_LOGGER.error(
|
||||||
|
"Can't load OS Agent dbus interface %s %s: %s",
|
||||||
|
dbus.bus_name,
|
||||||
|
dbus.object_path,
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
async def update(self, changed: dict[str, Any] | None = None) -> None:
|
async def update(self, changed: dict[str, Any] | None = None) -> None:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""AppArmor object for OS-Agent."""
|
"""AppArmor object for OS-Agent."""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
"""Board management for OS Agent."""
|
"""Board management for OS Agent."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
from ....exceptions import BoardInvalidError
|
from ....exceptions import BoardInvalidError, DBusInterfaceError, DBusServiceUnkownError
|
||||||
from ...const import (
|
from ...const import (
|
||||||
DBUS_ATTR_BOARD,
|
DBUS_ATTR_BOARD,
|
||||||
DBUS_IFACE_HAOS_BOARDS,
|
DBUS_IFACE_HAOS_BOARDS,
|
||||||
@@ -11,7 +12,8 @@ from ...const import (
|
|||||||
DBUS_OBJECT_HAOS_BOARDS,
|
DBUS_OBJECT_HAOS_BOARDS,
|
||||||
)
|
)
|
||||||
from ...interface import DBusInterfaceProxy, dbus_property
|
from ...interface import DBusInterfaceProxy, dbus_property
|
||||||
from .const import BOARD_NAME_SUPERVISED, BOARD_NAME_YELLOW
|
from .const import BOARD_NAME_GREEN, BOARD_NAME_SUPERVISED, BOARD_NAME_YELLOW
|
||||||
|
from .green import Green
|
||||||
from .interface import BoardProxy
|
from .interface import BoardProxy
|
||||||
from .supervised import Supervised
|
from .supervised import Supervised
|
||||||
from .yellow import Yellow
|
from .yellow import Yellow
|
||||||
@@ -39,6 +41,14 @@ class BoardManager(DBusInterfaceProxy):
|
|||||||
"""Get board name."""
|
"""Get board name."""
|
||||||
return self.properties[DBUS_ATTR_BOARD]
|
return self.properties[DBUS_ATTR_BOARD]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def green(self) -> Green:
|
||||||
|
"""Get Green board."""
|
||||||
|
if self.board != BOARD_NAME_GREEN:
|
||||||
|
raise BoardInvalidError("Green board is not in use", _LOGGER.error)
|
||||||
|
|
||||||
|
return self._board_proxy
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supervised(self) -> Supervised:
|
def supervised(self) -> Supervised:
|
||||||
"""Get Supervised board."""
|
"""Get Supervised board."""
|
||||||
@@ -61,8 +71,14 @@ class BoardManager(DBusInterfaceProxy):
|
|||||||
|
|
||||||
if self.board == BOARD_NAME_YELLOW:
|
if self.board == BOARD_NAME_YELLOW:
|
||||||
self._board_proxy = Yellow()
|
self._board_proxy = Yellow()
|
||||||
|
elif self.board == BOARD_NAME_GREEN:
|
||||||
|
self._board_proxy = Green()
|
||||||
elif self.board == BOARD_NAME_SUPERVISED:
|
elif self.board == BOARD_NAME_SUPERVISED:
|
||||||
self._board_proxy = Supervised()
|
self._board_proxy = Supervised()
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
if self._board_proxy:
|
try:
|
||||||
await self._board_proxy.connect(bus)
|
await self._board_proxy.connect(bus)
|
||||||
|
except (DBusServiceUnkownError, DBusInterfaceError) as ex:
|
||||||
|
_LOGGER.warning("OS-Agent board support initialization failed: %s", ex)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Constants for boards."""
|
"""Constants for boards."""
|
||||||
|
|
||||||
|
BOARD_NAME_GREEN = "Green"
|
||||||
BOARD_NAME_SUPERVISED = "Supervised"
|
BOARD_NAME_SUPERVISED = "Supervised"
|
||||||
BOARD_NAME_YELLOW = "Yellow"
|
BOARD_NAME_YELLOW = "Yellow"
|
||||||
|
|||||||
65
supervisor/dbus/agent/boards/green.py
Normal file
65
supervisor/dbus/agent/boards/green.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
"""Green board management."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from collections.abc import Awaitable
|
||||||
|
|
||||||
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
|
from ....const import ATTR_ACTIVITY_LED, ATTR_POWER_LED, ATTR_USER_LED
|
||||||
|
from ...const import DBUS_ATTR_ACTIVITY_LED, DBUS_ATTR_POWER_LED, DBUS_ATTR_USER_LED
|
||||||
|
from ...interface import dbus_property
|
||||||
|
from .const import BOARD_NAME_GREEN
|
||||||
|
from .interface import BoardProxy
|
||||||
|
from .validate import SCHEMA_GREEN_BOARD
|
||||||
|
|
||||||
|
|
||||||
|
class Green(BoardProxy):
|
||||||
|
"""Green board manager object."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize properties."""
|
||||||
|
super().__init__(BOARD_NAME_GREEN, SCHEMA_GREEN_BOARD)
|
||||||
|
|
||||||
|
@property
|
||||||
|
@dbus_property
|
||||||
|
def activity_led(self) -> bool:
|
||||||
|
"""Get activity LED enabled."""
|
||||||
|
return self.properties[DBUS_ATTR_ACTIVITY_LED]
|
||||||
|
|
||||||
|
def set_activity_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
|
"""Enable/disable activity LED."""
|
||||||
|
self._data[ATTR_ACTIVITY_LED] = enabled
|
||||||
|
return self.dbus.Boards.Green.set_activity_led(enabled)
|
||||||
|
|
||||||
|
@property
|
||||||
|
@dbus_property
|
||||||
|
def power_led(self) -> bool:
|
||||||
|
"""Get power LED enabled."""
|
||||||
|
return self.properties[DBUS_ATTR_POWER_LED]
|
||||||
|
|
||||||
|
def set_power_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
|
"""Enable/disable power LED."""
|
||||||
|
self._data[ATTR_POWER_LED] = enabled
|
||||||
|
return self.dbus.Boards.Green.set_power_led(enabled)
|
||||||
|
|
||||||
|
@property
|
||||||
|
@dbus_property
|
||||||
|
def user_led(self) -> bool:
|
||||||
|
"""Get user LED enabled."""
|
||||||
|
return self.properties[DBUS_ATTR_USER_LED]
|
||||||
|
|
||||||
|
def set_user_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
|
"""Enable/disable disk LED."""
|
||||||
|
self._data[ATTR_USER_LED] = enabled
|
||||||
|
return self.dbus.Boards.Green.set_user_led(enabled)
|
||||||
|
|
||||||
|
async def connect(self, bus: MessageBus) -> None:
|
||||||
|
"""Connect to D-Bus."""
|
||||||
|
await super().connect(bus)
|
||||||
|
|
||||||
|
# Set LEDs based on settings on connect
|
||||||
|
await asyncio.gather(
|
||||||
|
self.set_activity_led(self._data[ATTR_ACTIVITY_LED]),
|
||||||
|
self.set_power_led(self._data[ATTR_POWER_LED]),
|
||||||
|
self.set_user_led(self._data[ATTR_USER_LED]),
|
||||||
|
)
|
||||||
@@ -1,17 +1,23 @@
|
|||||||
"""Board dbus proxy interface."""
|
"""Board dbus proxy interface."""
|
||||||
|
|
||||||
|
from voluptuous import Schema
|
||||||
|
|
||||||
|
from ....const import FILE_HASSIO_BOARD
|
||||||
|
from ....utils.common import FileConfiguration
|
||||||
from ...const import DBUS_IFACE_HAOS_BOARDS, DBUS_NAME_HAOS, DBUS_OBJECT_HAOS_BOARDS
|
from ...const import DBUS_IFACE_HAOS_BOARDS, DBUS_NAME_HAOS, DBUS_OBJECT_HAOS_BOARDS
|
||||||
from ...interface import DBusInterfaceProxy
|
from ...interface import DBusInterfaceProxy
|
||||||
|
from .validate import SCHEMA_BASE_BOARD
|
||||||
|
|
||||||
|
|
||||||
class BoardProxy(DBusInterfaceProxy):
|
class BoardProxy(FileConfiguration, DBusInterfaceProxy):
|
||||||
"""DBus interface proxy for os board."""
|
"""DBus interface proxy for os board."""
|
||||||
|
|
||||||
bus_name: str = DBUS_NAME_HAOS
|
bus_name: str = DBUS_NAME_HAOS
|
||||||
|
|
||||||
def __init__(self, name: str) -> None:
|
def __init__(self, name: str, file_schema: Schema | None = None) -> None:
|
||||||
"""Initialize properties."""
|
"""Initialize properties."""
|
||||||
super().__init__()
|
super().__init__(FILE_HASSIO_BOARD, file_schema or SCHEMA_BASE_BOARD)
|
||||||
|
super(FileConfiguration, self).__init__()
|
||||||
|
|
||||||
self._name: str = name
|
self._name: str = name
|
||||||
self.object_path: str = f"{DBUS_OBJECT_HAOS_BOARDS}/{name}"
|
self.object_path: str = f"{DBUS_OBJECT_HAOS_BOARDS}/{name}"
|
||||||
|
|||||||
@@ -1,5 +1,9 @@
|
|||||||
"""Supervised board management."""
|
"""Supervised board management."""
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from supervisor.dbus.utils import dbus_connected
|
||||||
|
|
||||||
from .const import BOARD_NAME_SUPERVISED
|
from .const import BOARD_NAME_SUPERVISED
|
||||||
from .interface import BoardProxy
|
from .interface import BoardProxy
|
||||||
|
|
||||||
@@ -11,3 +15,11 @@ class Supervised(BoardProxy):
|
|||||||
"""Initialize properties."""
|
"""Initialize properties."""
|
||||||
super().__init__(BOARD_NAME_SUPERVISED)
|
super().__init__(BOARD_NAME_SUPERVISED)
|
||||||
self.sync_properties: bool = False
|
self.sync_properties: bool = False
|
||||||
|
|
||||||
|
@dbus_connected
|
||||||
|
async def update(self, changed: dict[str, Any] | None = None) -> None:
|
||||||
|
"""Do nothing as there are no properties.
|
||||||
|
|
||||||
|
Currently unused, avoid using the Properties interface to avoid a bug in
|
||||||
|
Go D-Bus, see: https://github.com/home-assistant/os-agent/issues/206
|
||||||
|
"""
|
||||||
|
|||||||
32
supervisor/dbus/agent/boards/validate.py
Normal file
32
supervisor/dbus/agent/boards/validate.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
"""Validation for board config."""
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ....const import (
|
||||||
|
ATTR_ACTIVITY_LED,
|
||||||
|
ATTR_DISK_LED,
|
||||||
|
ATTR_HEARTBEAT_LED,
|
||||||
|
ATTR_POWER_LED,
|
||||||
|
ATTR_USER_LED,
|
||||||
|
)
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_BASE_BOARD = vol.Schema({}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
SCHEMA_GREEN_BOARD = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_ACTIVITY_LED, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_POWER_LED, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_USER_LED, default=True): vol.Boolean(),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_YELLOW_BOARD = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_DISK_LED, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HEARTBEAT_LED, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_POWER_LED, default=True): vol.Boolean(),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
@@ -1,11 +1,16 @@
|
|||||||
"""Yellow board management."""
|
"""Yellow board management."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Awaitable
|
||||||
|
|
||||||
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
|
from ....const import ATTR_DISK_LED, ATTR_HEARTBEAT_LED, ATTR_POWER_LED
|
||||||
from ...const import DBUS_ATTR_DISK_LED, DBUS_ATTR_HEARTBEAT_LED, DBUS_ATTR_POWER_LED
|
from ...const import DBUS_ATTR_DISK_LED, DBUS_ATTR_HEARTBEAT_LED, DBUS_ATTR_POWER_LED
|
||||||
from ...interface import dbus_property
|
from ...interface import dbus_property
|
||||||
from .const import BOARD_NAME_YELLOW
|
from .const import BOARD_NAME_YELLOW
|
||||||
from .interface import BoardProxy
|
from .interface import BoardProxy
|
||||||
|
from .validate import SCHEMA_YELLOW_BOARD
|
||||||
|
|
||||||
|
|
||||||
class Yellow(BoardProxy):
|
class Yellow(BoardProxy):
|
||||||
@@ -13,7 +18,7 @@ class Yellow(BoardProxy):
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""Initialize properties."""
|
"""Initialize properties."""
|
||||||
super().__init__(BOARD_NAME_YELLOW)
|
super().__init__(BOARD_NAME_YELLOW, SCHEMA_YELLOW_BOARD)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
@@ -21,10 +26,10 @@ class Yellow(BoardProxy):
|
|||||||
"""Get heartbeat LED enabled."""
|
"""Get heartbeat LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_HEARTBEAT_LED]
|
return self.properties[DBUS_ATTR_HEARTBEAT_LED]
|
||||||
|
|
||||||
@heartbeat_led.setter
|
def set_heartbeat_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def heartbeat_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable heartbeat LED."""
|
"""Enable/disable heartbeat LED."""
|
||||||
asyncio.create_task(self.dbus.Boards.Yellow.set_heartbeat_led(enabled))
|
self._data[ATTR_HEARTBEAT_LED] = enabled
|
||||||
|
return self.dbus.Boards.Yellow.set_heartbeat_led(enabled)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
@@ -32,10 +37,10 @@ class Yellow(BoardProxy):
|
|||||||
"""Get power LED enabled."""
|
"""Get power LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_POWER_LED]
|
return self.properties[DBUS_ATTR_POWER_LED]
|
||||||
|
|
||||||
@power_led.setter
|
def set_power_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def power_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable power LED."""
|
"""Enable/disable power LED."""
|
||||||
asyncio.create_task(self.dbus.Boards.Yellow.set_power_led(enabled))
|
self._data[ATTR_POWER_LED] = enabled
|
||||||
|
return self.dbus.Boards.Yellow.set_power_led(enabled)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@dbus_property
|
@dbus_property
|
||||||
@@ -43,7 +48,18 @@ class Yellow(BoardProxy):
|
|||||||
"""Get disk LED enabled."""
|
"""Get disk LED enabled."""
|
||||||
return self.properties[DBUS_ATTR_DISK_LED]
|
return self.properties[DBUS_ATTR_DISK_LED]
|
||||||
|
|
||||||
@disk_led.setter
|
def set_disk_led(self, enabled: bool) -> Awaitable[None]:
|
||||||
def disk_led(self, enabled: bool) -> None:
|
|
||||||
"""Enable/disable disk LED."""
|
"""Enable/disable disk LED."""
|
||||||
asyncio.create_task(self.dbus.Boards.Yellow.set_disk_led(enabled))
|
self._data[ATTR_DISK_LED] = enabled
|
||||||
|
return self.dbus.Boards.Yellow.set_disk_led(enabled)
|
||||||
|
|
||||||
|
async def connect(self, bus: MessageBus) -> None:
|
||||||
|
"""Connect to D-Bus."""
|
||||||
|
await super().connect(bus)
|
||||||
|
|
||||||
|
# Set LEDs based on settings on connect
|
||||||
|
await asyncio.gather(
|
||||||
|
self.set_disk_led(self._data[ATTR_DISK_LED]),
|
||||||
|
self.set_heartbeat_led(self._data[ATTR_HEARTBEAT_LED]),
|
||||||
|
self.set_power_led(self._data[ATTR_POWER_LED]),
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""DataDisk object for OS-Agent."""
|
"""DataDisk object for OS-Agent."""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
|
|||||||
@@ -12,6 +12,6 @@ class System(DBusInterface):
|
|||||||
object_path: str = DBUS_OBJECT_HAOS_SYSTEM
|
object_path: str = DBUS_OBJECT_HAOS_SYSTEM
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
async def schedule_wipe_device(self) -> None:
|
async def schedule_wipe_device(self) -> bool:
|
||||||
"""Schedule a factory reset on next system boot."""
|
"""Schedule a factory reset on next system boot."""
|
||||||
await self.dbus.System.call_schedule_wipe_device()
|
return await self.dbus.System.call_schedule_wipe_device()
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""Constants for DBUS."""
|
"""Constants for DBUS."""
|
||||||
from enum import Enum, IntEnum
|
|
||||||
|
from enum import IntEnum, StrEnum
|
||||||
from socket import AF_INET, AF_INET6
|
from socket import AF_INET, AF_INET6
|
||||||
|
|
||||||
DBUS_NAME_HAOS = "io.hass.os"
|
DBUS_NAME_HAOS = "io.hass.os"
|
||||||
@@ -36,12 +37,14 @@ DBUS_IFACE_RAUC_INSTALLER = "de.pengutronix.rauc.Installer"
|
|||||||
DBUS_IFACE_RESOLVED_MANAGER = "org.freedesktop.resolve1.Manager"
|
DBUS_IFACE_RESOLVED_MANAGER = "org.freedesktop.resolve1.Manager"
|
||||||
DBUS_IFACE_SETTINGS_CONNECTION = "org.freedesktop.NetworkManager.Settings.Connection"
|
DBUS_IFACE_SETTINGS_CONNECTION = "org.freedesktop.NetworkManager.Settings.Connection"
|
||||||
DBUS_IFACE_SYSTEMD_MANAGER = "org.freedesktop.systemd1.Manager"
|
DBUS_IFACE_SYSTEMD_MANAGER = "org.freedesktop.systemd1.Manager"
|
||||||
|
DBUS_IFACE_SYSTEMD_UNIT = "org.freedesktop.systemd1.Unit"
|
||||||
DBUS_IFACE_TIMEDATE = "org.freedesktop.timedate1"
|
DBUS_IFACE_TIMEDATE = "org.freedesktop.timedate1"
|
||||||
DBUS_IFACE_UDISKS2_MANAGER = "org.freedesktop.UDisks2.Manager"
|
DBUS_IFACE_UDISKS2_MANAGER = "org.freedesktop.UDisks2.Manager"
|
||||||
|
|
||||||
DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED = (
|
DBUS_SIGNAL_NM_CONNECTION_ACTIVE_CHANGED = (
|
||||||
"org.freedesktop.NetworkManager.Connection.Active.StateChanged"
|
"org.freedesktop.NetworkManager.Connection.Active.StateChanged"
|
||||||
)
|
)
|
||||||
|
DBUS_SIGNAL_PROPERTIES_CHANGED = "org.freedesktop.DBus.Properties.PropertiesChanged"
|
||||||
DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED = "de.pengutronix.rauc.Installer.Completed"
|
DBUS_SIGNAL_RAUC_INSTALLER_COMPLETED = "de.pengutronix.rauc.Installer.Completed"
|
||||||
|
|
||||||
DBUS_OBJECT_BASE = "/"
|
DBUS_OBJECT_BASE = "/"
|
||||||
@@ -59,11 +62,14 @@ DBUS_OBJECT_RESOLVED = "/org/freedesktop/resolve1"
|
|||||||
DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings"
|
DBUS_OBJECT_SETTINGS = "/org/freedesktop/NetworkManager/Settings"
|
||||||
DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1"
|
DBUS_OBJECT_SYSTEMD = "/org/freedesktop/systemd1"
|
||||||
DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1"
|
DBUS_OBJECT_TIMEDATE = "/org/freedesktop/timedate1"
|
||||||
DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2/Manager"
|
DBUS_OBJECT_UDISKS2 = "/org/freedesktop/UDisks2"
|
||||||
|
DBUS_OBJECT_UDISKS2_MANAGER = "/org/freedesktop/UDisks2/Manager"
|
||||||
|
|
||||||
DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint"
|
DBUS_ATTR_ACTIVE_ACCESSPOINT = "ActiveAccessPoint"
|
||||||
DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection"
|
DBUS_ATTR_ACTIVE_CONNECTION = "ActiveConnection"
|
||||||
DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections"
|
DBUS_ATTR_ACTIVE_CONNECTIONS = "ActiveConnections"
|
||||||
|
DBUS_ATTR_ACTIVE_STATE = "ActiveState"
|
||||||
|
DBUS_ATTR_ACTIVITY_LED = "ActivityLED"
|
||||||
DBUS_ATTR_ADDRESS_DATA = "AddressData"
|
DBUS_ATTR_ADDRESS_DATA = "AddressData"
|
||||||
DBUS_ATTR_BITRATE = "Bitrate"
|
DBUS_ATTR_BITRATE = "Bitrate"
|
||||||
DBUS_ATTR_BOARD = "Board"
|
DBUS_ATTR_BOARD = "Board"
|
||||||
@@ -169,19 +175,21 @@ DBUS_ATTR_TIMEUSEC = "TimeUSec"
|
|||||||
DBUS_ATTR_TIMEZONE = "Timezone"
|
DBUS_ATTR_TIMEZONE = "Timezone"
|
||||||
DBUS_ATTR_TRANSACTION_STATISTICS = "TransactionStatistics"
|
DBUS_ATTR_TRANSACTION_STATISTICS = "TransactionStatistics"
|
||||||
DBUS_ATTR_TYPE = "Type"
|
DBUS_ATTR_TYPE = "Type"
|
||||||
|
DBUS_ATTR_USER_LED = "UserLED"
|
||||||
DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC = "UserspaceTimestampMonotonic"
|
DBUS_ATTR_USERSPACE_TIMESTAMP_MONOTONIC = "UserspaceTimestampMonotonic"
|
||||||
DBUS_ATTR_UUID_UPPERCASE = "UUID"
|
DBUS_ATTR_UUID_UPPERCASE = "UUID"
|
||||||
DBUS_ATTR_UUID = "Uuid"
|
DBUS_ATTR_UUID = "Uuid"
|
||||||
DBUS_ATTR_VARIANT = "Variant"
|
DBUS_ATTR_VARIANT = "Variant"
|
||||||
DBUS_ATTR_VENDOR = "Vendor"
|
DBUS_ATTR_VENDOR = "Vendor"
|
||||||
DBUS_ATTR_VERSION = "Version"
|
DBUS_ATTR_VERSION = "Version"
|
||||||
|
DBUS_ATTR_VIRTUALIZATION = "Virtualization"
|
||||||
DBUS_ATTR_WHAT = "What"
|
DBUS_ATTR_WHAT = "What"
|
||||||
DBUS_ATTR_WWN = "WWN"
|
DBUS_ATTR_WWN = "WWN"
|
||||||
|
|
||||||
DBUS_ERR_SYSTEMD_NO_SUCH_UNIT = "org.freedesktop.systemd1.NoSuchUnit"
|
DBUS_ERR_SYSTEMD_NO_SUCH_UNIT = "org.freedesktop.systemd1.NoSuchUnit"
|
||||||
|
|
||||||
|
|
||||||
class RaucState(str, Enum):
|
class RaucState(StrEnum):
|
||||||
"""Rauc slot states."""
|
"""Rauc slot states."""
|
||||||
|
|
||||||
GOOD = "good"
|
GOOD = "good"
|
||||||
@@ -189,7 +197,7 @@ class RaucState(str, Enum):
|
|||||||
ACTIVE = "active"
|
ACTIVE = "active"
|
||||||
|
|
||||||
|
|
||||||
class InterfaceMethod(str, Enum):
|
class InterfaceMethod(StrEnum):
|
||||||
"""Interface method simple."""
|
"""Interface method simple."""
|
||||||
|
|
||||||
AUTO = "auto"
|
AUTO = "auto"
|
||||||
@@ -198,14 +206,14 @@ class InterfaceMethod(str, Enum):
|
|||||||
LINK_LOCAL = "link-local"
|
LINK_LOCAL = "link-local"
|
||||||
|
|
||||||
|
|
||||||
class ConnectionType(str, Enum):
|
class ConnectionType(StrEnum):
|
||||||
"""Connection type."""
|
"""Connection type."""
|
||||||
|
|
||||||
ETHERNET = "802-3-ethernet"
|
ETHERNET = "802-3-ethernet"
|
||||||
WIRELESS = "802-11-wireless"
|
WIRELESS = "802-11-wireless"
|
||||||
|
|
||||||
|
|
||||||
class ConnectionStateType(int, Enum):
|
class ConnectionStateType(IntEnum):
|
||||||
"""Connection states.
|
"""Connection states.
|
||||||
|
|
||||||
https://developer.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMActiveConnectionState
|
https://developer.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMActiveConnectionState
|
||||||
@@ -218,7 +226,7 @@ class ConnectionStateType(int, Enum):
|
|||||||
DEACTIVATED = 4
|
DEACTIVATED = 4
|
||||||
|
|
||||||
|
|
||||||
class ConnectionStateFlags(int, Enum):
|
class ConnectionStateFlags(IntEnum):
|
||||||
"""Connection state flags.
|
"""Connection state flags.
|
||||||
|
|
||||||
https://developer-old.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMActivationStateFlags
|
https://developer-old.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMActivationStateFlags
|
||||||
@@ -235,7 +243,7 @@ class ConnectionStateFlags(int, Enum):
|
|||||||
EXTERNAL = 0x80
|
EXTERNAL = 0x80
|
||||||
|
|
||||||
|
|
||||||
class ConnectivityState(int, Enum):
|
class ConnectivityState(IntEnum):
|
||||||
"""Network connectvity.
|
"""Network connectvity.
|
||||||
|
|
||||||
https://developer.gnome.org/NetworkManager/unstable/nm-dbus-types.html#NMConnectivityState
|
https://developer.gnome.org/NetworkManager/unstable/nm-dbus-types.html#NMConnectivityState
|
||||||
@@ -248,7 +256,7 @@ class ConnectivityState(int, Enum):
|
|||||||
CONNECTIVITY_FULL = 4
|
CONNECTIVITY_FULL = 4
|
||||||
|
|
||||||
|
|
||||||
class DeviceType(int, Enum):
|
class DeviceType(IntEnum):
|
||||||
"""Device types.
|
"""Device types.
|
||||||
|
|
||||||
https://developer.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMDeviceType
|
https://developer.gnome.org/NetworkManager/stable/nm-dbus-types.html#NMDeviceType
|
||||||
@@ -263,7 +271,7 @@ class DeviceType(int, Enum):
|
|||||||
VETH = 20
|
VETH = 20
|
||||||
|
|
||||||
|
|
||||||
class WirelessMethodType(int, Enum):
|
class WirelessMethodType(IntEnum):
|
||||||
"""Device Type."""
|
"""Device Type."""
|
||||||
|
|
||||||
UNKNOWN = 0
|
UNKNOWN = 0
|
||||||
@@ -280,7 +288,7 @@ class DNSAddressFamily(IntEnum):
|
|||||||
INET6 = AF_INET6
|
INET6 = AF_INET6
|
||||||
|
|
||||||
|
|
||||||
class MulticastProtocolEnabled(str, Enum):
|
class MulticastProtocolEnabled(StrEnum):
|
||||||
"""Multicast protocol enabled or resolve."""
|
"""Multicast protocol enabled or resolve."""
|
||||||
|
|
||||||
YES = "yes"
|
YES = "yes"
|
||||||
@@ -288,7 +296,7 @@ class MulticastProtocolEnabled(str, Enum):
|
|||||||
RESOLVE = "resolve"
|
RESOLVE = "resolve"
|
||||||
|
|
||||||
|
|
||||||
class DNSOverTLSEnabled(str, Enum):
|
class DNSOverTLSEnabled(StrEnum):
|
||||||
"""DNS over TLS enabled."""
|
"""DNS over TLS enabled."""
|
||||||
|
|
||||||
YES = "yes"
|
YES = "yes"
|
||||||
@@ -296,7 +304,7 @@ class DNSOverTLSEnabled(str, Enum):
|
|||||||
OPPORTUNISTIC = "opportunistic"
|
OPPORTUNISTIC = "opportunistic"
|
||||||
|
|
||||||
|
|
||||||
class DNSSECValidation(str, Enum):
|
class DNSSECValidation(StrEnum):
|
||||||
"""DNSSEC validation enforced."""
|
"""DNSSEC validation enforced."""
|
||||||
|
|
||||||
YES = "yes"
|
YES = "yes"
|
||||||
@@ -304,7 +312,7 @@ class DNSSECValidation(str, Enum):
|
|||||||
ALLOW_DOWNGRADE = "allow-downgrade"
|
ALLOW_DOWNGRADE = "allow-downgrade"
|
||||||
|
|
||||||
|
|
||||||
class DNSStubListenerEnabled(str, Enum):
|
class DNSStubListenerEnabled(StrEnum):
|
||||||
"""DNS stub listener enabled."""
|
"""DNS stub listener enabled."""
|
||||||
|
|
||||||
YES = "yes"
|
YES = "yes"
|
||||||
@@ -313,7 +321,7 @@ class DNSStubListenerEnabled(str, Enum):
|
|||||||
UDP_ONLY = "udp"
|
UDP_ONLY = "udp"
|
||||||
|
|
||||||
|
|
||||||
class ResolvConfMode(str, Enum):
|
class ResolvConfMode(StrEnum):
|
||||||
"""Resolv.conf management mode."""
|
"""Resolv.conf management mode."""
|
||||||
|
|
||||||
FOREIGN = "foreign"
|
FOREIGN = "foreign"
|
||||||
@@ -323,7 +331,7 @@ class ResolvConfMode(str, Enum):
|
|||||||
UPLINK = "uplink"
|
UPLINK = "uplink"
|
||||||
|
|
||||||
|
|
||||||
class StopUnitMode(str, Enum):
|
class StopUnitMode(StrEnum):
|
||||||
"""Mode for stopping the unit."""
|
"""Mode for stopping the unit."""
|
||||||
|
|
||||||
REPLACE = "replace"
|
REPLACE = "replace"
|
||||||
@@ -332,7 +340,7 @@ class StopUnitMode(str, Enum):
|
|||||||
IGNORE_REQUIREMENTS = "ignore-requirements"
|
IGNORE_REQUIREMENTS = "ignore-requirements"
|
||||||
|
|
||||||
|
|
||||||
class StartUnitMode(str, Enum):
|
class StartUnitMode(StrEnum):
|
||||||
"""Mode for starting the unit."""
|
"""Mode for starting the unit."""
|
||||||
|
|
||||||
REPLACE = "replace"
|
REPLACE = "replace"
|
||||||
@@ -342,7 +350,7 @@ class StartUnitMode(str, Enum):
|
|||||||
ISOLATE = "isolate"
|
ISOLATE = "isolate"
|
||||||
|
|
||||||
|
|
||||||
class UnitActiveState(str, Enum):
|
class UnitActiveState(StrEnum):
|
||||||
"""Active state of a systemd unit."""
|
"""Active state of a systemd unit."""
|
||||||
|
|
||||||
ACTIVE = "active"
|
ACTIVE = "active"
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
"""D-Bus interface for hostname."""
|
"""D-Bus interface for hostname."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
from ..exceptions import DBusError, DBusInterfaceError
|
from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
|
||||||
from .const import (
|
from .const import (
|
||||||
DBUS_ATTR_CHASSIS,
|
DBUS_ATTR_CHASSIS,
|
||||||
DBUS_ATTR_DEPLOYMENT,
|
DBUS_ATTR_DEPLOYMENT,
|
||||||
@@ -39,7 +40,7 @@ class Hostname(DBusInterfaceProxy):
|
|||||||
await super().connect(bus)
|
await super().connect(bus)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to systemd-hostname")
|
_LOGGER.warning("Can't connect to systemd-hostname")
|
||||||
except DBusInterfaceError:
|
except (DBusServiceUnkownError, DBusInterfaceError):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"No hostname support on the host. Hostname functions have been disabled."
|
"No hostname support on the host. Hostname functions have been disabled."
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Interface class for D-Bus wrappers."""
|
"""Interface class for D-Bus wrappers."""
|
||||||
|
|
||||||
from abc import ABC
|
from abc import ABC
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
"""Interface to Logind over D-Bus."""
|
"""Interface to Logind over D-Bus."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
from ..exceptions import DBusError, DBusInterfaceError
|
from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
|
||||||
from .const import DBUS_NAME_LOGIND, DBUS_OBJECT_LOGIND
|
from .const import DBUS_NAME_LOGIND, DBUS_OBJECT_LOGIND
|
||||||
from .interface import DBusInterface
|
from .interface import DBusInterface
|
||||||
from .utils import dbus_connected
|
from .utils import dbus_connected
|
||||||
@@ -28,8 +29,8 @@ class Logind(DBusInterface):
|
|||||||
await super().connect(bus)
|
await super().connect(bus)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to systemd-logind")
|
_LOGGER.warning("Can't connect to systemd-logind")
|
||||||
except DBusInterfaceError:
|
except (DBusServiceUnkownError, DBusInterfaceError):
|
||||||
_LOGGER.info("No systemd-logind support on the host.")
|
_LOGGER.warning("No systemd-logind support on the host.")
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
async def reboot(self) -> None:
|
async def reboot(self) -> None:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""D-Bus interface objects."""
|
"""D-Bus interface objects."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
@@ -17,7 +18,7 @@ from .rauc import Rauc
|
|||||||
from .resolved import Resolved
|
from .resolved import Resolved
|
||||||
from .systemd import Systemd
|
from .systemd import Systemd
|
||||||
from .timedate import TimeDate
|
from .timedate import TimeDate
|
||||||
from .udisks2 import UDisks2
|
from .udisks2 import UDisks2Manager
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -37,7 +38,7 @@ class DBusManager(CoreSysAttributes):
|
|||||||
self._agent: OSAgent = OSAgent()
|
self._agent: OSAgent = OSAgent()
|
||||||
self._timedate: TimeDate = TimeDate()
|
self._timedate: TimeDate = TimeDate()
|
||||||
self._resolved: Resolved = Resolved()
|
self._resolved: Resolved = Resolved()
|
||||||
self._udisks2: UDisks2 = UDisks2()
|
self._udisks2: UDisks2Manager = UDisks2Manager()
|
||||||
self._bus: MessageBus | None = None
|
self._bus: MessageBus | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -81,7 +82,7 @@ class DBusManager(CoreSysAttributes):
|
|||||||
return self._resolved
|
return self._resolved
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def udisks2(self) -> UDisks2:
|
def udisks2(self) -> UDisks2Manager:
|
||||||
"""Return the udisks2 interface."""
|
"""Return the udisks2 interface."""
|
||||||
return self._udisks2
|
return self._udisks2
|
||||||
|
|
||||||
@@ -128,9 +129,11 @@ class DBusManager(CoreSysAttributes):
|
|||||||
|
|
||||||
for err in errors:
|
for err in errors:
|
||||||
if err:
|
if err:
|
||||||
|
dbus = self.all[errors.index(err)]
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Can't load dbus interface %s: %s",
|
"Can't load dbus interface %s %s: %s",
|
||||||
self.all[errors.index(err)].name,
|
dbus.name,
|
||||||
|
dbus.object_path,
|
||||||
err,
|
err,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Network Manager implementation for DBUS."""
|
"""Network Manager implementation for DBUS."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -9,6 +10,8 @@ from ...exceptions import (
|
|||||||
DBusError,
|
DBusError,
|
||||||
DBusFatalError,
|
DBusFatalError,
|
||||||
DBusInterfaceError,
|
DBusInterfaceError,
|
||||||
|
DBusNoReplyError,
|
||||||
|
DBusServiceUnkownError,
|
||||||
HostNotSupportedError,
|
HostNotSupportedError,
|
||||||
NetworkInterfaceNotFound,
|
NetworkInterfaceNotFound,
|
||||||
)
|
)
|
||||||
@@ -143,7 +146,7 @@ class NetworkManager(DBusInterfaceProxy):
|
|||||||
await self.settings.connect(bus)
|
await self.settings.connect(bus)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to Network Manager")
|
_LOGGER.warning("Can't connect to Network Manager")
|
||||||
except DBusInterfaceError:
|
except (DBusServiceUnkownError, DBusInterfaceError):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"No Network Manager support on the host. Local network functions have been disabled."
|
"No Network Manager support on the host. Local network functions have been disabled."
|
||||||
)
|
)
|
||||||
@@ -210,8 +213,22 @@ class NetworkManager(DBusInterfaceProxy):
|
|||||||
# try to query it. Ignore those cases.
|
# try to query it. Ignore those cases.
|
||||||
_LOGGER.debug("Can't process %s: %s", device, err)
|
_LOGGER.debug("Can't process %s: %s", device, err)
|
||||||
continue
|
continue
|
||||||
|
except (
|
||||||
|
DBusNoReplyError,
|
||||||
|
DBusServiceUnkownError,
|
||||||
|
) as err:
|
||||||
|
# This typically means that NetworkManager disappeared. Give up immeaditly.
|
||||||
|
_LOGGER.error(
|
||||||
|
"NetworkManager not responding while processing %s: %s. Giving up.",
|
||||||
|
device,
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
capture_exception(err)
|
||||||
|
return
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Error while processing %s: %s", device, err)
|
_LOGGER.exception(
|
||||||
|
"Unkown error while processing %s: %s", device, err
|
||||||
|
)
|
||||||
capture_exception(err)
|
capture_exception(err)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""NetworkConnection objects for Network Manager."""
|
"""NetworkConnection objects for Network Manager."""
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from ipaddress import IPv4Address, IPv6Address
|
from ipaddress import IPv4Address, IPv6Address
|
||||||
|
|
||||||
@@ -58,11 +59,22 @@ class VlanProperties:
|
|||||||
parent: str | None
|
parent: str | None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class IpAddress:
|
||||||
|
"""IP address object for Network Manager."""
|
||||||
|
|
||||||
|
address: str
|
||||||
|
prefix: int
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
class IpProperties:
|
class IpProperties:
|
||||||
"""IP properties object for Network Manager."""
|
"""IP properties object for Network Manager."""
|
||||||
|
|
||||||
method: str | None
|
method: str | None
|
||||||
|
address_data: list[IpAddress] | None
|
||||||
|
gateway: str | None
|
||||||
|
dns: list[bytes | int] | None
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
|
|||||||
@@ -121,7 +121,7 @@ class NetworkConnection(DBusInterfaceProxy):
|
|||||||
self._state_flags = {
|
self._state_flags = {
|
||||||
flag
|
flag
|
||||||
for flag in ConnectionStateFlags
|
for flag in ConnectionStateFlags
|
||||||
if flag.value & self.properties[DBUS_ATTR_STATE_FLAGS]
|
if flag & self.properties[DBUS_ATTR_STATE_FLAGS]
|
||||||
} or {ConnectionStateFlags.NONE}
|
} or {ConnectionStateFlags.NONE}
|
||||||
|
|
||||||
# IPv4
|
# IPv4
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Network Manager DNS Manager object."""
|
"""Network Manager DNS Manager object."""
|
||||||
|
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -12,7 +13,7 @@ from ...const import (
|
|||||||
ATTR_PRIORITY,
|
ATTR_PRIORITY,
|
||||||
ATTR_VPN,
|
ATTR_VPN,
|
||||||
)
|
)
|
||||||
from ...exceptions import DBusError, DBusInterfaceError
|
from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
|
||||||
from ..const import (
|
from ..const import (
|
||||||
DBUS_ATTR_CONFIGURATION,
|
DBUS_ATTR_CONFIGURATION,
|
||||||
DBUS_ATTR_MODE,
|
DBUS_ATTR_MODE,
|
||||||
@@ -67,7 +68,7 @@ class NetworkManagerDNS(DBusInterfaceProxy):
|
|||||||
await super().connect(bus)
|
await super().connect(bus)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to DnsManager")
|
_LOGGER.warning("Can't connect to DnsManager")
|
||||||
except DBusInterfaceError:
|
except (DBusServiceUnkownError, DBusInterfaceError):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"No DnsManager support on the host. Local DNS functions have been disabled."
|
"No DnsManager support on the host. Local DNS functions have been disabled."
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,17 +1,18 @@
|
|||||||
"""Connection object for Network Manager."""
|
"""Connection object for Network Manager."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from dbus_fast import Variant
|
from dbus_fast import Variant
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
from ....const import ATTR_METHOD, ATTR_MODE, ATTR_PSK, ATTR_SSID
|
|
||||||
from ...const import DBUS_NAME_NM
|
from ...const import DBUS_NAME_NM
|
||||||
from ...interface import DBusInterface
|
from ...interface import DBusInterface
|
||||||
from ...utils import dbus_connected
|
from ...utils import dbus_connected
|
||||||
from ..configuration import (
|
from ..configuration import (
|
||||||
ConnectionProperties,
|
ConnectionProperties,
|
||||||
EthernetProperties,
|
EthernetProperties,
|
||||||
|
IpAddress,
|
||||||
IpProperties,
|
IpProperties,
|
||||||
MatchProperties,
|
MatchProperties,
|
||||||
VlanProperties,
|
VlanProperties,
|
||||||
@@ -20,30 +21,52 @@ from ..configuration import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
CONF_ATTR_CONNECTION = "connection"
|
CONF_ATTR_CONNECTION = "connection"
|
||||||
|
CONF_ATTR_MATCH = "match"
|
||||||
CONF_ATTR_802_ETHERNET = "802-3-ethernet"
|
CONF_ATTR_802_ETHERNET = "802-3-ethernet"
|
||||||
CONF_ATTR_802_WIRELESS = "802-11-wireless"
|
CONF_ATTR_802_WIRELESS = "802-11-wireless"
|
||||||
CONF_ATTR_802_WIRELESS_SECURITY = "802-11-wireless-security"
|
CONF_ATTR_802_WIRELESS_SECURITY = "802-11-wireless-security"
|
||||||
CONF_ATTR_VLAN = "vlan"
|
CONF_ATTR_VLAN = "vlan"
|
||||||
CONF_ATTR_IPV4 = "ipv4"
|
CONF_ATTR_IPV4 = "ipv4"
|
||||||
CONF_ATTR_IPV6 = "ipv6"
|
CONF_ATTR_IPV6 = "ipv6"
|
||||||
CONF_ATTR_MATCH = "match"
|
|
||||||
CONF_ATTR_PATH = "path"
|
|
||||||
|
|
||||||
ATTR_ID = "id"
|
CONF_ATTR_CONNECTION_ID = "id"
|
||||||
ATTR_UUID = "uuid"
|
CONF_ATTR_CONNECTION_UUID = "uuid"
|
||||||
ATTR_TYPE = "type"
|
CONF_ATTR_CONNECTION_TYPE = "type"
|
||||||
ATTR_PARENT = "parent"
|
CONF_ATTR_CONNECTION_LLMNR = "llmnr"
|
||||||
ATTR_ASSIGNED_MAC = "assigned-mac-address"
|
CONF_ATTR_CONNECTION_MDNS = "mdns"
|
||||||
ATTR_POWERSAVE = "powersave"
|
CONF_ATTR_CONNECTION_AUTOCONNECT = "autoconnect"
|
||||||
ATTR_AUTH_ALG = "auth-alg"
|
CONF_ATTR_CONNECTION_INTERFACE_NAME = "interface-name"
|
||||||
ATTR_KEY_MGMT = "key-mgmt"
|
|
||||||
ATTR_INTERFACE_NAME = "interface-name"
|
CONF_ATTR_MATCH_PATH = "path"
|
||||||
ATTR_PATH = "path"
|
|
||||||
|
CONF_ATTR_VLAN_ID = "id"
|
||||||
|
CONF_ATTR_VLAN_PARENT = "parent"
|
||||||
|
|
||||||
|
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC = "assigned-mac-address"
|
||||||
|
|
||||||
|
CONF_ATTR_802_WIRELESS_MODE = "mode"
|
||||||
|
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC = "assigned-mac-address"
|
||||||
|
CONF_ATTR_802_WIRELESS_SSID = "ssid"
|
||||||
|
CONF_ATTR_802_WIRELESS_POWERSAVE = "powersave"
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG = "auth-alg"
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT = "key-mgmt"
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY_PSK = "psk"
|
||||||
|
|
||||||
|
CONF_ATTR_IPV4_METHOD = "method"
|
||||||
|
CONF_ATTR_IPV4_ADDRESS_DATA = "address-data"
|
||||||
|
CONF_ATTR_IPV4_GATEWAY = "gateway"
|
||||||
|
CONF_ATTR_IPV4_DNS = "dns"
|
||||||
|
|
||||||
|
CONF_ATTR_IPV6_METHOD = "method"
|
||||||
|
CONF_ATTR_IPV6_ADDRESS_DATA = "address-data"
|
||||||
|
CONF_ATTR_IPV6_GATEWAY = "gateway"
|
||||||
|
CONF_ATTR_IPV6_DNS = "dns"
|
||||||
|
|
||||||
IPV4_6_IGNORE_FIELDS = [
|
IPV4_6_IGNORE_FIELDS = [
|
||||||
"addresses",
|
"addresses",
|
||||||
"address-data",
|
"address-data",
|
||||||
"dns",
|
"dns",
|
||||||
|
"dns-data",
|
||||||
"gateway",
|
"gateway",
|
||||||
"method",
|
"method",
|
||||||
]
|
]
|
||||||
@@ -73,7 +96,7 @@ def _merge_settings_attribute(
|
|||||||
class NetworkSetting(DBusInterface):
|
class NetworkSetting(DBusInterface):
|
||||||
"""Network connection setting object for Network Manager.
|
"""Network connection setting object for Network Manager.
|
||||||
|
|
||||||
https://developer.gnome.org/NetworkManager/stable/gdbus-org.freedesktop.NetworkManager.Settings.Connection.html
|
https://networkmanager.dev/docs/api/1.48.0/gdbus-org.freedesktop.NetworkManager.Settings.Connection.html
|
||||||
"""
|
"""
|
||||||
|
|
||||||
bus_name: str = DBUS_NAME_NM
|
bus_name: str = DBUS_NAME_NM
|
||||||
@@ -147,7 +170,7 @@ class NetworkSetting(DBusInterface):
|
|||||||
new_settings,
|
new_settings,
|
||||||
settings,
|
settings,
|
||||||
CONF_ATTR_CONNECTION,
|
CONF_ATTR_CONNECTION,
|
||||||
ignore_current_value=[ATTR_INTERFACE_NAME],
|
ignore_current_value=[CONF_ATTR_CONNECTION_INTERFACE_NAME],
|
||||||
)
|
)
|
||||||
_merge_settings_attribute(new_settings, settings, CONF_ATTR_802_ETHERNET)
|
_merge_settings_attribute(new_settings, settings, CONF_ATTR_802_ETHERNET)
|
||||||
_merge_settings_attribute(new_settings, settings, CONF_ATTR_802_WIRELESS)
|
_merge_settings_attribute(new_settings, settings, CONF_ATTR_802_WIRELESS)
|
||||||
@@ -192,47 +215,69 @@ class NetworkSetting(DBusInterface):
|
|||||||
# See: https://developer-old.gnome.org/NetworkManager/stable/ch01.html
|
# See: https://developer-old.gnome.org/NetworkManager/stable/ch01.html
|
||||||
if CONF_ATTR_CONNECTION in data:
|
if CONF_ATTR_CONNECTION in data:
|
||||||
self._connection = ConnectionProperties(
|
self._connection = ConnectionProperties(
|
||||||
data[CONF_ATTR_CONNECTION].get(ATTR_ID),
|
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_ID),
|
||||||
data[CONF_ATTR_CONNECTION].get(ATTR_UUID),
|
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_UUID),
|
||||||
data[CONF_ATTR_CONNECTION].get(ATTR_TYPE),
|
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_TYPE),
|
||||||
data[CONF_ATTR_CONNECTION].get(ATTR_INTERFACE_NAME),
|
data[CONF_ATTR_CONNECTION].get(CONF_ATTR_CONNECTION_INTERFACE_NAME),
|
||||||
)
|
)
|
||||||
|
|
||||||
if CONF_ATTR_802_ETHERNET in data:
|
if CONF_ATTR_802_ETHERNET in data:
|
||||||
self._ethernet = EthernetProperties(
|
self._ethernet = EthernetProperties(
|
||||||
data[CONF_ATTR_802_ETHERNET].get(ATTR_ASSIGNED_MAC),
|
data[CONF_ATTR_802_ETHERNET].get(CONF_ATTR_802_ETHERNET_ASSIGNED_MAC),
|
||||||
)
|
)
|
||||||
|
|
||||||
if CONF_ATTR_802_WIRELESS in data:
|
if CONF_ATTR_802_WIRELESS in data:
|
||||||
self._wireless = WirelessProperties(
|
self._wireless = WirelessProperties(
|
||||||
bytes(data[CONF_ATTR_802_WIRELESS].get(ATTR_SSID, [])).decode(),
|
bytes(
|
||||||
data[CONF_ATTR_802_WIRELESS].get(ATTR_ASSIGNED_MAC),
|
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_SSID, [])
|
||||||
data[CONF_ATTR_802_WIRELESS].get(ATTR_MODE),
|
).decode(),
|
||||||
data[CONF_ATTR_802_WIRELESS].get(ATTR_POWERSAVE),
|
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_ASSIGNED_MAC),
|
||||||
|
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_MODE),
|
||||||
|
data[CONF_ATTR_802_WIRELESS].get(CONF_ATTR_802_WIRELESS_POWERSAVE),
|
||||||
)
|
)
|
||||||
|
|
||||||
if CONF_ATTR_802_WIRELESS_SECURITY in data:
|
if CONF_ATTR_802_WIRELESS_SECURITY in data:
|
||||||
self._wireless_security = WirelessSecurityProperties(
|
self._wireless_security = WirelessSecurityProperties(
|
||||||
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_AUTH_ALG),
|
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
|
||||||
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_KEY_MGMT),
|
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG
|
||||||
data[CONF_ATTR_802_WIRELESS_SECURITY].get(ATTR_PSK),
|
),
|
||||||
|
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT
|
||||||
|
),
|
||||||
|
data[CONF_ATTR_802_WIRELESS_SECURITY].get(
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY_PSK
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
if CONF_ATTR_VLAN in data:
|
if CONF_ATTR_VLAN in data:
|
||||||
self._vlan = VlanProperties(
|
self._vlan = VlanProperties(
|
||||||
data[CONF_ATTR_VLAN].get(ATTR_ID),
|
data[CONF_ATTR_VLAN].get(CONF_ATTR_VLAN_ID),
|
||||||
data[CONF_ATTR_VLAN].get(ATTR_PARENT),
|
data[CONF_ATTR_VLAN].get(CONF_ATTR_VLAN_PARENT),
|
||||||
)
|
)
|
||||||
|
|
||||||
if CONF_ATTR_IPV4 in data:
|
if CONF_ATTR_IPV4 in data:
|
||||||
|
address_data = None
|
||||||
|
if ips := data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_ADDRESS_DATA):
|
||||||
|
address_data = [IpAddress(ip["address"], ip["prefix"]) for ip in ips]
|
||||||
self._ipv4 = IpProperties(
|
self._ipv4 = IpProperties(
|
||||||
data[CONF_ATTR_IPV4].get(ATTR_METHOD),
|
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_METHOD),
|
||||||
|
address_data,
|
||||||
|
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_GATEWAY),
|
||||||
|
data[CONF_ATTR_IPV4].get(CONF_ATTR_IPV4_DNS),
|
||||||
)
|
)
|
||||||
|
|
||||||
if CONF_ATTR_IPV6 in data:
|
if CONF_ATTR_IPV6 in data:
|
||||||
|
address_data = None
|
||||||
|
if ips := data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_ADDRESS_DATA):
|
||||||
|
address_data = [IpAddress(ip["address"], ip["prefix"]) for ip in ips]
|
||||||
self._ipv6 = IpProperties(
|
self._ipv6 = IpProperties(
|
||||||
data[CONF_ATTR_IPV6].get(ATTR_METHOD),
|
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_METHOD),
|
||||||
|
address_data,
|
||||||
|
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_GATEWAY),
|
||||||
|
data[CONF_ATTR_IPV6].get(CONF_ATTR_IPV6_DNS),
|
||||||
)
|
)
|
||||||
|
|
||||||
if CONF_ATTR_MATCH in data:
|
if CONF_ATTR_MATCH in data:
|
||||||
self._match = MatchProperties(data[CONF_ATTR_MATCH].get(ATTR_PATH))
|
self._match = MatchProperties(
|
||||||
|
data[CONF_ATTR_MATCH].get(CONF_ATTR_MATCH_PATH)
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,47 +1,158 @@
|
|||||||
"""Payload generators for DBUS communication."""
|
"""Payload generators for DBUS communication."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import socket
|
import socket
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from dbus_fast import Variant
|
from dbus_fast import Variant
|
||||||
|
|
||||||
from . import (
|
|
||||||
ATTR_ASSIGNED_MAC,
|
|
||||||
CONF_ATTR_802_ETHERNET,
|
|
||||||
CONF_ATTR_802_WIRELESS,
|
|
||||||
CONF_ATTR_802_WIRELESS_SECURITY,
|
|
||||||
CONF_ATTR_CONNECTION,
|
|
||||||
CONF_ATTR_IPV4,
|
|
||||||
CONF_ATTR_IPV6,
|
|
||||||
CONF_ATTR_MATCH,
|
|
||||||
CONF_ATTR_PATH,
|
|
||||||
CONF_ATTR_VLAN,
|
|
||||||
)
|
|
||||||
from ....host.const import InterfaceMethod, InterfaceType
|
from ....host.const import InterfaceMethod, InterfaceType
|
||||||
|
from .. import NetworkManager
|
||||||
|
from . import (
|
||||||
|
CONF_ATTR_802_ETHERNET,
|
||||||
|
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC,
|
||||||
|
CONF_ATTR_802_WIRELESS,
|
||||||
|
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC,
|
||||||
|
CONF_ATTR_802_WIRELESS_MODE,
|
||||||
|
CONF_ATTR_802_WIRELESS_POWERSAVE,
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY,
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG,
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT,
|
||||||
|
CONF_ATTR_802_WIRELESS_SECURITY_PSK,
|
||||||
|
CONF_ATTR_802_WIRELESS_SSID,
|
||||||
|
CONF_ATTR_CONNECTION,
|
||||||
|
CONF_ATTR_CONNECTION_AUTOCONNECT,
|
||||||
|
CONF_ATTR_CONNECTION_ID,
|
||||||
|
CONF_ATTR_CONNECTION_LLMNR,
|
||||||
|
CONF_ATTR_CONNECTION_MDNS,
|
||||||
|
CONF_ATTR_CONNECTION_TYPE,
|
||||||
|
CONF_ATTR_CONNECTION_UUID,
|
||||||
|
CONF_ATTR_IPV4,
|
||||||
|
CONF_ATTR_IPV4_ADDRESS_DATA,
|
||||||
|
CONF_ATTR_IPV4_DNS,
|
||||||
|
CONF_ATTR_IPV4_GATEWAY,
|
||||||
|
CONF_ATTR_IPV4_METHOD,
|
||||||
|
CONF_ATTR_IPV6,
|
||||||
|
CONF_ATTR_IPV6_ADDRESS_DATA,
|
||||||
|
CONF_ATTR_IPV6_DNS,
|
||||||
|
CONF_ATTR_IPV6_GATEWAY,
|
||||||
|
CONF_ATTR_IPV6_METHOD,
|
||||||
|
CONF_ATTR_MATCH,
|
||||||
|
CONF_ATTR_MATCH_PATH,
|
||||||
|
CONF_ATTR_VLAN,
|
||||||
|
CONF_ATTR_VLAN_ID,
|
||||||
|
CONF_ATTR_VLAN_PARENT,
|
||||||
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ....host.configuration import Interface
|
from ....host.configuration import Interface
|
||||||
|
|
||||||
|
|
||||||
|
def _get_ipv4_connection_settings(ipv4setting) -> dict:
|
||||||
|
ipv4 = {}
|
||||||
|
if not ipv4setting or ipv4setting.method == InterfaceMethod.AUTO:
|
||||||
|
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "auto")
|
||||||
|
elif ipv4setting.method == InterfaceMethod.DISABLED:
|
||||||
|
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "disabled")
|
||||||
|
elif ipv4setting.method == InterfaceMethod.STATIC:
|
||||||
|
ipv4[CONF_ATTR_IPV4_METHOD] = Variant("s", "manual")
|
||||||
|
|
||||||
|
address_data = []
|
||||||
|
for address in ipv4setting.address:
|
||||||
|
address_data.append(
|
||||||
|
{
|
||||||
|
"address": Variant("s", str(address.ip)),
|
||||||
|
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
ipv4[CONF_ATTR_IPV4_ADDRESS_DATA] = Variant("aa{sv}", address_data)
|
||||||
|
if ipv4setting.gateway:
|
||||||
|
ipv4[CONF_ATTR_IPV4_GATEWAY] = Variant("s", str(ipv4setting.gateway))
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Invalid IPv4 InterfaceMethod")
|
||||||
|
|
||||||
|
if (
|
||||||
|
ipv4setting
|
||||||
|
and ipv4setting.nameservers
|
||||||
|
and ipv4setting.method
|
||||||
|
in (
|
||||||
|
InterfaceMethod.AUTO,
|
||||||
|
InterfaceMethod.STATIC,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
nameservers = ipv4setting.nameservers if ipv4setting else []
|
||||||
|
ipv4[CONF_ATTR_IPV4_DNS] = Variant(
|
||||||
|
"au",
|
||||||
|
[socket.htonl(int(ip_address)) for ip_address in nameservers],
|
||||||
|
)
|
||||||
|
|
||||||
|
return ipv4
|
||||||
|
|
||||||
|
|
||||||
|
def _get_ipv6_connection_settings(ipv6setting) -> dict:
|
||||||
|
ipv6 = {}
|
||||||
|
if not ipv6setting or ipv6setting.method == InterfaceMethod.AUTO:
|
||||||
|
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "auto")
|
||||||
|
elif ipv6setting.method == InterfaceMethod.DISABLED:
|
||||||
|
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "link-local")
|
||||||
|
elif ipv6setting.method == InterfaceMethod.STATIC:
|
||||||
|
ipv6[CONF_ATTR_IPV6_METHOD] = Variant("s", "manual")
|
||||||
|
|
||||||
|
address_data = []
|
||||||
|
for address in ipv6setting.address:
|
||||||
|
address_data.append(
|
||||||
|
{
|
||||||
|
"address": Variant("s", str(address.ip)),
|
||||||
|
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
ipv6[CONF_ATTR_IPV6_ADDRESS_DATA] = Variant("aa{sv}", address_data)
|
||||||
|
if ipv6setting.gateway:
|
||||||
|
ipv6[CONF_ATTR_IPV6_GATEWAY] = Variant("s", str(ipv6setting.gateway))
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Invalid IPv6 InterfaceMethod")
|
||||||
|
|
||||||
|
if (
|
||||||
|
ipv6setting
|
||||||
|
and ipv6setting.nameservers
|
||||||
|
and ipv6setting.method
|
||||||
|
in (
|
||||||
|
InterfaceMethod.AUTO,
|
||||||
|
InterfaceMethod.STATIC,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
nameservers = ipv6setting.nameservers if ipv6setting else []
|
||||||
|
ipv6[CONF_ATTR_IPV6_DNS] = Variant(
|
||||||
|
"aay",
|
||||||
|
[ip_address.packed for ip_address in nameservers],
|
||||||
|
)
|
||||||
|
return ipv6
|
||||||
|
|
||||||
|
|
||||||
def get_connection_from_interface(
|
def get_connection_from_interface(
|
||||||
interface: Interface, name: str | None = None, uuid: str | None = None
|
interface: Interface,
|
||||||
) -> Any:
|
network_manager: NetworkManager,
|
||||||
|
name: str | None = None,
|
||||||
|
uuid: str | None = None,
|
||||||
|
) -> dict[str, dict[str, Variant]]:
|
||||||
"""Generate message argument for network interface update."""
|
"""Generate message argument for network interface update."""
|
||||||
|
|
||||||
# Generate/Update ID/name
|
# Generate/Update ID/name
|
||||||
if not name or not name.startswith("Supervisor"):
|
if not name or not name.startswith("Supervisor"):
|
||||||
name = f"Supervisor {interface.name}"
|
name = f"Supervisor {interface.name}"
|
||||||
if interface.type == InterfaceType.VLAN:
|
if interface.type == InterfaceType.VLAN:
|
||||||
name = f"{name}.{interface.vlan.id}"
|
name = f"{name}.{interface.vlan.id}"
|
||||||
|
|
||||||
if interface.type == InterfaceType.ETHERNET:
|
if interface.type == InterfaceType.ETHERNET:
|
||||||
iftype = "802-3-ethernet"
|
iftype = "802-3-ethernet"
|
||||||
elif interface.type == InterfaceType.WIRELESS:
|
elif interface.type == InterfaceType.WIRELESS:
|
||||||
iftype = "802-11-wireless"
|
iftype = "802-11-wireless"
|
||||||
else:
|
else:
|
||||||
iftype = interface.type.value
|
iftype = interface.type
|
||||||
|
|
||||||
# Generate UUID
|
# Generate UUID
|
||||||
if not uuid:
|
if not uuid:
|
||||||
@@ -49,103 +160,77 @@ def get_connection_from_interface(
|
|||||||
|
|
||||||
conn: dict[str, dict[str, Variant]] = {
|
conn: dict[str, dict[str, Variant]] = {
|
||||||
CONF_ATTR_CONNECTION: {
|
CONF_ATTR_CONNECTION: {
|
||||||
"id": Variant("s", name),
|
CONF_ATTR_CONNECTION_ID: Variant("s", name),
|
||||||
"type": Variant("s", iftype),
|
CONF_ATTR_CONNECTION_UUID: Variant("s", uuid),
|
||||||
"uuid": Variant("s", uuid),
|
CONF_ATTR_CONNECTION_TYPE: Variant("s", iftype),
|
||||||
"llmnr": Variant("i", 2),
|
CONF_ATTR_CONNECTION_LLMNR: Variant("i", 2),
|
||||||
"mdns": Variant("i", 2),
|
CONF_ATTR_CONNECTION_MDNS: Variant("i", 2),
|
||||||
"autoconnect": Variant("b", True),
|
CONF_ATTR_CONNECTION_AUTOCONNECT: Variant("b", True),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if interface.type != InterfaceType.VLAN:
|
if interface.type != InterfaceType.VLAN:
|
||||||
if interface.path:
|
if interface.path:
|
||||||
conn[CONF_ATTR_MATCH] = {CONF_ATTR_PATH: Variant("as", [interface.path])}
|
conn[CONF_ATTR_MATCH] = {
|
||||||
|
CONF_ATTR_MATCH_PATH: Variant("as", [interface.path])
|
||||||
|
}
|
||||||
else:
|
else:
|
||||||
conn[CONF_ATTR_CONNECTION]["interface-name"] = Variant("s", interface.name)
|
conn[CONF_ATTR_CONNECTION]["interface-name"] = Variant("s", interface.name)
|
||||||
|
|
||||||
ipv4 = {}
|
conn[CONF_ATTR_IPV4] = _get_ipv4_connection_settings(interface.ipv4setting)
|
||||||
if not interface.ipv4 or interface.ipv4.method == InterfaceMethod.AUTO:
|
|
||||||
ipv4["method"] = Variant("s", "auto")
|
|
||||||
elif interface.ipv4.method == InterfaceMethod.DISABLED:
|
|
||||||
ipv4["method"] = Variant("s", "disabled")
|
|
||||||
else:
|
|
||||||
ipv4["method"] = Variant("s", "manual")
|
|
||||||
ipv4["dns"] = Variant(
|
|
||||||
"au",
|
|
||||||
[
|
|
||||||
socket.htonl(int(ip_address))
|
|
||||||
for ip_address in interface.ipv4.nameservers
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
adressdata = []
|
conn[CONF_ATTR_IPV6] = _get_ipv6_connection_settings(interface.ipv6setting)
|
||||||
for address in interface.ipv4.address:
|
|
||||||
adressdata.append(
|
|
||||||
{
|
|
||||||
"address": Variant("s", str(address.ip)),
|
|
||||||
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
ipv4["address-data"] = Variant("aa{sv}", adressdata)
|
|
||||||
ipv4["gateway"] = Variant("s", str(interface.ipv4.gateway))
|
|
||||||
|
|
||||||
conn[CONF_ATTR_IPV4] = ipv4
|
|
||||||
|
|
||||||
ipv6 = {}
|
|
||||||
if not interface.ipv6 or interface.ipv6.method == InterfaceMethod.AUTO:
|
|
||||||
ipv6["method"] = Variant("s", "auto")
|
|
||||||
elif interface.ipv6.method == InterfaceMethod.DISABLED:
|
|
||||||
ipv6["method"] = Variant("s", "link-local")
|
|
||||||
else:
|
|
||||||
ipv6["method"] = Variant("s", "manual")
|
|
||||||
ipv6["dns"] = Variant(
|
|
||||||
"aay", [ip_address.packed for ip_address in interface.ipv6.nameservers]
|
|
||||||
)
|
|
||||||
|
|
||||||
adressdata = []
|
|
||||||
for address in interface.ipv6.address:
|
|
||||||
adressdata.append(
|
|
||||||
{
|
|
||||||
"address": Variant("s", str(address.ip)),
|
|
||||||
"prefix": Variant("u", int(address.with_prefixlen.split("/")[-1])),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
ipv6["address-data"] = Variant("aa{sv}", adressdata)
|
|
||||||
ipv6["gateway"] = Variant("s", str(interface.ipv6.gateway))
|
|
||||||
|
|
||||||
conn[CONF_ATTR_IPV6] = ipv6
|
|
||||||
|
|
||||||
if interface.type == InterfaceType.ETHERNET:
|
if interface.type == InterfaceType.ETHERNET:
|
||||||
conn[CONF_ATTR_802_ETHERNET] = {ATTR_ASSIGNED_MAC: Variant("s", "preserve")}
|
conn[CONF_ATTR_802_ETHERNET] = {
|
||||||
|
CONF_ATTR_802_ETHERNET_ASSIGNED_MAC: Variant("s", "preserve")
|
||||||
|
}
|
||||||
elif interface.type == "vlan":
|
elif interface.type == "vlan":
|
||||||
|
parent = interface.vlan.interface
|
||||||
|
if parent in network_manager and (
|
||||||
|
parent_connection := network_manager.get(parent).connection
|
||||||
|
):
|
||||||
|
parent = parent_connection.uuid
|
||||||
|
|
||||||
conn[CONF_ATTR_VLAN] = {
|
conn[CONF_ATTR_VLAN] = {
|
||||||
"id": Variant("u", interface.vlan.id),
|
CONF_ATTR_VLAN_ID: Variant("u", interface.vlan.id),
|
||||||
"parent": Variant("s", interface.vlan.interface),
|
CONF_ATTR_VLAN_PARENT: Variant("s", parent),
|
||||||
}
|
}
|
||||||
elif interface.type == InterfaceType.WIRELESS:
|
elif interface.type == InterfaceType.WIRELESS:
|
||||||
wireless = {
|
wireless = {
|
||||||
ATTR_ASSIGNED_MAC: Variant("s", "preserve"),
|
CONF_ATTR_802_WIRELESS_ASSIGNED_MAC: Variant("s", "preserve"),
|
||||||
"ssid": Variant("ay", interface.wifi.ssid.encode("UTF-8")),
|
CONF_ATTR_802_WIRELESS_MODE: Variant("s", "infrastructure"),
|
||||||
"mode": Variant("s", "infrastructure"),
|
CONF_ATTR_802_WIRELESS_POWERSAVE: Variant("i", 1),
|
||||||
"powersave": Variant("i", 1),
|
|
||||||
}
|
}
|
||||||
|
if interface.wifi and interface.wifi.ssid:
|
||||||
|
wireless[CONF_ATTR_802_WIRELESS_SSID] = Variant(
|
||||||
|
"ay", interface.wifi.ssid.encode("UTF-8")
|
||||||
|
)
|
||||||
|
|
||||||
conn[CONF_ATTR_802_WIRELESS] = wireless
|
conn[CONF_ATTR_802_WIRELESS] = wireless
|
||||||
|
|
||||||
if interface.wifi.auth != "open":
|
if interface.wifi and interface.wifi.auth != "open":
|
||||||
wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY)
|
wireless["security"] = Variant("s", CONF_ATTR_802_WIRELESS_SECURITY)
|
||||||
wireless_security = {}
|
wireless_security = {}
|
||||||
if interface.wifi.auth == "wep":
|
if interface.wifi.auth == "wep":
|
||||||
wireless_security["auth-alg"] = Variant("s", "none")
|
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG] = Variant(
|
||||||
wireless_security["key-mgmt"] = Variant("s", "open")
|
"s", "open"
|
||||||
|
)
|
||||||
|
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT] = Variant(
|
||||||
|
"s", "none"
|
||||||
|
)
|
||||||
elif interface.wifi.auth == "wpa-psk":
|
elif interface.wifi.auth == "wpa-psk":
|
||||||
wireless_security["auth-alg"] = Variant("s", "open")
|
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_AUTH_ALG] = Variant(
|
||||||
wireless_security["key-mgmt"] = Variant("s", "wpa-psk")
|
"s", "open"
|
||||||
|
)
|
||||||
|
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_KEY_MGMT] = Variant(
|
||||||
|
"s", "wpa-psk"
|
||||||
|
)
|
||||||
|
|
||||||
if interface.wifi.psk:
|
if interface.wifi.psk:
|
||||||
wireless_security["psk"] = Variant("s", interface.wifi.psk)
|
wireless_security[CONF_ATTR_802_WIRELESS_SECURITY_PSK] = Variant(
|
||||||
|
"s", interface.wifi.psk
|
||||||
|
)
|
||||||
conn[CONF_ATTR_802_WIRELESS_SECURITY] = wireless_security
|
conn[CONF_ATTR_802_WIRELESS_SECURITY] = wireless_security
|
||||||
|
|
||||||
return conn
|
return conn
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
"""Network Manager implementation for DBUS."""
|
"""Network Manager implementation for DBUS."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from dbus_fast.aio.message_bus import MessageBus
|
from dbus_fast.aio.message_bus import MessageBus
|
||||||
|
|
||||||
from ...exceptions import DBusError, DBusInterfaceError
|
from ...exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
|
||||||
from ..const import DBUS_NAME_NM, DBUS_OBJECT_SETTINGS
|
from ..const import DBUS_NAME_NM, DBUS_OBJECT_SETTINGS
|
||||||
from ..interface import DBusInterface
|
from ..interface import DBusInterface
|
||||||
from ..network.setting import NetworkSetting
|
from ..network.setting import NetworkSetting
|
||||||
@@ -28,7 +29,7 @@ class NetworkManagerSettings(DBusInterface):
|
|||||||
await super().connect(bus)
|
await super().connect(bus)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to Network Manager Settings")
|
_LOGGER.warning("Can't connect to Network Manager Settings")
|
||||||
except DBusInterfaceError:
|
except (DBusServiceUnkownError, DBusInterfaceError):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"No Network Manager Settings support on the host. Local network functions have been disabled."
|
"No Network Manager Settings support on the host. Local network functions have been disabled."
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Wireless object for Network Manager."""
|
"""Wireless object for Network Manager."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user